def PostSlaveBuildToBuildbucket(self,
                                    build_name,
                                    build_config,
                                    master_build_id,
                                    master_buildbucket_id,
                                    dryrun=False):
        """Send a Put slave build request to Buildbucket.

    Args:
      build_name: Slave build name to put to Buildbucket.
      build_config: Slave build config to put to Buildbucket.
      master_build_id: CIDB id of the master scheduling the slave build.
      master_buildbucket_id: buildbucket id of the master scheduling the
                             slave build.
      dryrun: Whether a dryrun, default to False.

    Returns:
      Tuple:
        buildbucket_id
        created_ts
    """
        requested_bot = None
        request = self._CreateRequestBuild(build_name, build_config,
                                           master_build_id,
                                           master_buildbucket_id,
                                           requested_bot)
        result = request.Submit(dryrun=dryrun)

        logging.info('Build_name %s buildbucket_id %s created_timestamp %s',
                     result.build_config, result.buildbucket_id,
                     result.created_ts)
        logging.PrintBuildbotLink(result.build_config, result.url)

        return (result.buildbucket_id, result.created_ts)
示例#2
0
  def PerformStage(self):
    """Schedule child builds for this buildspec."""
    # build_identifier, _ = self._run.GetCIDBHandle()
    # build_id = build_identifier.cidb_id
    # master_buildbucket_id = self._run.options.buildbucket_id
    version_info = self.GetWorkspaceVersionInfo()

    extra_args = [
        '--buildbot',
        '--version', version_info.VersionString(),
    ]

    if self._run.options.debug:
      extra_args.append('--debug')

    for child_name in self._run.config.slave_configs:
      child = request_build.RequestBuild(
          build_config=child_name,
          # See crbug.com/940969. These id's get children killed during
          # multiple quick builds.
          # master_cidb_id=build_id,
          # master_buildbucket_id=master_buildbucket_id,
          extra_args=extra_args,
      )
      result = child.Submit(dryrun=self._run.options.debug)

      logging.info(
          'Build_name %s buildbucket_id %s created_timestamp %s',
          result.build_config, result.buildbucket_id, result.created_ts)
      logging.PrintBuildbotLink(result.build_config, result.url)
 def testPrintBuildbotFunctionsNoMarker(self):
     """PrintBuildbot* without markers should not be recognized by buildbot."""
     self.AssertLogContainsMsg(
         '@@@STEP_LINK@',
         lambda: logging.PrintBuildbotLink('name', 'url'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg(
         '@@@@STEP_TEXT@',
         lambda: logging.PrintBuildbotStepText('text'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
                               logging.PrintBuildbotStepWarnings,
                               check_stderr=True,
                               invert=True)
     self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
                               logging.PrintBuildbotStepFailure,
                               check_stderr=True,
                               invert=True)
     self.AssertLogContainsMsg(
         '@@@BUILD_STEP',
         lambda: logging.PrintBuildbotStepName('name'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg('@@@SET_BUILD_PROPERTY',
                               lambda: logging.PrintKitchenSetBuildProperty(
                                   'name', {'a': 'value'}),
                               check_stderr=True,
                               invert=True)
示例#4
0
    def _AnnotateFailingBuilders(self, failing, inflight, no_stat, statuses):
        """Add annotations that link to either failing or inflight builders.

    Adds buildbot links to failing builder dashboards. If no builders are
    failing, adds links to inflight builders. Adds step text for builders
    with status None.

    Args:
      failing: Set of builder names of slave builders that failed.
      inflight: Set of builder names of slave builders that are inflight.
      no_stat: Set of builder names of slave builders that had status None.
      statuses: A builder-name->status dictionary, which will provide
                the dashboard_url values for any links.
    """
        builders_to_link = set.union(failing, inflight)
        for builder in builders_to_link:
            if statuses[builder].dashboard_url:
                if statuses[builder].message:
                    text = '%s: %s' % (builder,
                                       statuses[builder].message.reason)
                else:
                    text = '%s: timed out' % builder

                logging.PrintBuildbotLink(text,
                                          statuses[builder].dashboard_url)

        for builder in no_stat:
            logging.PrintBuildbotStepText('%s did not start.' % builder)
 def testPrintBuildbotFunctionsWithMarker(self):
     """PrintBuildbot* with markers should be recognized by buildbot."""
     logging.EnableBuildbotMarkers()
     self.AssertLogContainsMsg(
         '@@@STEP_LINK@name@url@@@',
         lambda: logging.PrintBuildbotLink('name', 'url'),
         check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@STEP_TEXT@text@@@',
         lambda: logging.PrintBuildbotStepText('text'),
         check_stderr=True)
     self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
                               logging.PrintBuildbotStepWarnings,
                               check_stderr=True)
     self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
                               logging.PrintBuildbotStepFailure,
                               check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@BUILD_STEP@name@@@',
         lambda: logging.PrintBuildbotStepName('name'),
         check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@SET_BUILD_PROPERTY@name@"value"@@@',
         lambda: logging.PrintKitchenSetBuildProperty('name', 'value'),
         check_stderr=True)
示例#6
0
      def ApplyChange(self, change):
        if isinstance(change, cros_patch.GerritPatch):
          logging.PrintBuildbotLink(str(change), change.url)
        elif isinstance(change, cros_patch.UploadedLocalPatch):
          logging.PrintBuildbotStepText(str(change))

        return patch_series.PatchSeries.ApplyChange(self, change)
示例#7
0
def _UploadAndLinkGomaLogIfNecessary(stage_name, cbb_config_name, goma_dir,
                                     goma_client_json, goma_tmp_dir):
    """Uploads the logs for goma, if needed. Also create a link to the visualizer.

  If |goma_tmp_dir| is given, |goma_dir| and |goma_client_json| must not be
  None.

  Args:
    stage_name: Name of the stage where goma is used.
    cbb_config_name: Name of cbb_config used for the build.
    goma_dir: Path to goma installed directory.
    goma_client_json: Path to the service account json file.
    goma_tmp_dir: Goma's working directory.
  """
    if not goma_tmp_dir:
        return

    goma = goma_util.Goma(goma_dir,
                          goma_client_json,
                          goma_tmp_dir=goma_tmp_dir)
    # Just in case, stop the goma. E.g. In case of timeout, we do not want to
    # keep goma compiler_proxy running.
    goma.Stop()
    goma_urls = goma.UploadLogs(cbb_config_name)
    if goma_urls:
        for label, url in goma_urls:
            logging.PrintBuildbotLink('%s %s' % (stage_name, label), url)
def PrintUprevMetadata(build_branch, stable_candidate, new_ebuild):
  """Shows metadata on buildbot page at UprevAndroid step.

  Args:
    build_branch: The branch of Android builds.
    stable_candidate: The existing stable ebuild.
    new_ebuild: The newly written ebuild.
  """
  # Examples:
  # "android-container-pi revved 6461825-r1 -> 6468247-r1"
  # "android-container-pi revved 6461825-r1 -> 6461825-r2 (ebuild update only)"
  msg = '%s revved %s -> %s' % (stable_candidate.pkgname,
                                stable_candidate.version,
                                new_ebuild.version)

  old_android = stable_candidate.version_no_rev
  new_android = new_ebuild.version_no_rev

  if old_android == new_android:
    msg += ' (ebuild update only)'
  else:
    ab_link = ('https://android-build.googleplex.com'
               '/builds/%s/branches/%s/cls?end=%s'
               % (new_android, build_branch, old_android))
    logging.PrintBuildbotLink('Android changelog', ab_link)

  logging.PrintBuildbotStepText(msg)
  logging.PrintKitchenSetBuildProperty('android_uprev', json.dumps({
      'branch': build_branch,
      'new': new_ebuild.version,
      'old': stable_candidate.version,
      'pkgname': stable_candidate.pkgname,
  }))
示例#9
0
 def PerformStage(self):
   for spatch in ' '.join(self._run.options.rietveld_patches).split():
     patch, colon, subdir = spatch.partition(':')
     if not colon:
       subdir = 'src'
     url = self.URL_BASE % {'id': patch}
     logging.PrintBuildbotLink(spatch, url)
     commands.PatchChrome(self._run.options.chrome_root, patch, subdir)
    def _PrintBuildMessage(self, text, url=None):
        """Print the build message.

    Args:
      text: Text (string) to print.
      url: URL (string) to link to the text, default to None.
    """
        if url is not None:
            logging.PrintBuildbotLink(text, url)
        else:
            logging.PrintBuildbotStepText(text)
示例#11
0
    def PrintDownloadLink(self, filename, prefix='', text_to_display=None):
        """Print a link to an artifact in Google Storage.

    Args:
      filename: The filename of the uploaded file.
      prefix: The prefix to put in front of the filename.
      text_to_display: Text to display. If None, use |prefix| + |filename|.
    """
        url = '%s/%s' % (self.download_url.rstrip('/'), filename)
        if not text_to_display:
            text_to_display = '%s%s' % (prefix, filename)
        logging.PrintBuildbotLink(text_to_display, url)
    def _AnnotateNoStatBuilders(self, no_stat):
        """Annotate the build statuses fetched from the Buildbucket.

    Some builds may fail to upload statuses to GS. If the builds were
    scheduled by Buildbucket, get the build statuses and annotate the results.

    Args:
      no_stat: Config names of the slave builds with None status.
    """
        buildbucket_info_dict = buildbucket_lib.GetBuildInfoDict(
            self._run.attrs.metadata)

        for config_name in no_stat:
            if config_name in buildbucket_info_dict:
                buildbucket_id = buildbucket_info_dict[
                    config_name].buildbucket_id
                assert buildbucket_id is not None, 'buildbucket_id is None'
                try:
                    content = self.buildbucket_client.GetBuildRequest(
                        buildbucket_id, self._run.options.debug)

                    status = buildbucket_lib.GetBuildStatus(content)
                    result = buildbucket_lib.GetBuildResult(content)

                    text = '%s: [status] %s [result] %s' % (config_name,
                                                            status, result)

                    if result == constants.BUILDBUCKET_BUILDER_RESULT_FAILURE:
                        failure_reason = buildbucket_lib.GetBuildFailureReason(
                            content)
                        if failure_reason:
                            text += ' [failure_reason] %s' % failure_reason
                    elif result == constants.BUILDBUCKET_BUILDER_RESULT_CANCELED:
                        cancel_reason = buildbucket_lib.GetBuildCancelationReason(
                            content)
                        if cancel_reason:
                            text += ' [cancelation_reason] %s' % cancel_reason

                    dashboard_url = buildbucket_lib.GetBuildURL(content)
                    if dashboard_url:
                        logging.PrintBuildbotLink(text, dashboard_url)
                    else:
                        logging.PrintBuildbotStepText(text)
                except buildbucket_lib.BuildbucketResponseException as e:
                    logging.error('Cannot get status for %s: %s', config_name,
                                  e)
                    logging.PrintBuildbotStepText(
                        'No status found for build %s buildbucket_id %s' %
                        (config_name, buildbucket_id))
            else:
                logging.PrintBuildbotStepText(
                    "%s wasn't scheduled by master." % config_name)
    def _UploadPrebuilt(self, package_path, url_suffix):
        """Upload host or board prebuilt files to Google Storage space.

    Args:
      package_path: The path to the packages dir.
      url_suffix: The remote subdirectory where we should upload the packages.
    """
        # Process Packages file, removing duplicates and filtered packages.
        pkg_index = binpkg.GrabLocalPackageIndex(package_path)
        pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
        pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage)
        uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes)
        unmatched_pkgs = self._packages - self._found_packages
        if unmatched_pkgs:
            logging.warning('unable to match packages: %r' % unmatched_pkgs)

        # Write Packages file.
        pkg_index.header['TTL'] = _BINPKG_TTL
        tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()

        remote_location = '%s/%s' % (self._upload_location.rstrip('/'),
                                     url_suffix)
        assert remote_location.startswith('gs://')

        # Build list of files to upload. Manually include the dev-only files but
        # skip them if not present.
        # TODO(deymo): Upload dev-only-extras.tbz2 as dev-only-extras.tar.bz2
        # outside packages/ directory. See crbug.com/448178 for details.
        if os.path.exists(os.path.join(package_path, 'dev-only-extras.tbz2')):
            uploads.append({'CPV': 'dev-only-extras'})
        upload_files = GenerateUploadDict(package_path, remote_location,
                                          uploads)
        remote_file = '%s/Packages' % remote_location.rstrip('/')
        upload_files[tmp_packages_file.name] = remote_file

        RemoteUpload(self._gs_context, self._acl, upload_files)

        with tempfile.NamedTemporaryFile(
                prefix='chromite.upload_prebuilts.index.') as index:
            GenerateHtmlIndex(
                [x[len(remote_location) + 1:] for x in upload_files.values()],
                index.name, self._target, self._version)
            self._Upload(index.name,
                         '%s/index.html' % remote_location.rstrip('/'))

            link_name = 'Prebuilts[%s]: %s' % (self._target, self._version)
            url = '%s%s/index.html' % (gs.PUBLIC_BASE_HTTPS_URL,
                                       remote_location[len(gs.BASE_GS_URL):])
            logging.PrintBuildbotLink(link_name, url)
    def PerformStage(self):
        """Archive and publish the factory build artifacts."""
        logging.info('Factory version: %s', self.dummy_version)
        logging.info('Archive build as: %s', self.dummy_config)

        # Link dummy build artifacts from build.
        dummy_http_url = gs.GsUrlToHttp(self.dummy_archive_url,
                                        public=False,
                                        directory=True)

        label = '%s factory [%s]' % (self._current_board, self.dummy_version)
        logging.PrintBuildbotLink(label, dummy_http_url)

        # factory_image.zip
        self.CreateFactoryZip()
        self.CreateTestImageTar()
        self.CreateDummyMetadataJson()
        self.PushBoardImage()
    def PerformStage(self):
        """Archive and publish the firmware build artifacts."""
        logging.info('Firmware board: %s', self._current_board)
        logging.info('Firmware version: %s', self.dummy_version)
        logging.info('Archive build as: %s', self.dummy_config)

        # Link dummy build artifacts from build.
        dummy_http_url = gs.GsUrlToHttp(self.dummy_archive_url,
                                        public=False,
                                        directory=True)

        label = '%s firmware [%s]' % (self._current_board, self.dummy_version)
        logging.PrintBuildbotLink(label, dummy_http_url)

        # Upload all artifacts.
        self.CreateFirmwareArchive()
        self.CreateDummyMetadataJson()
        self.PushBoardImage()
    def PerformStage(self):
        """Sync stuff!"""
        logging.info('SubWorkspaceSync')

        cmd = [
            os.path.join(constants.CHROMITE_DIR, 'scripts',
                         'repo_sync_manifest'),
            '--repo-root',
            self._build_root,
            '--manifest-versions-int',
            self.int_manifest_versions_path,
            '--manifest-versions-ext',
            self.ext_manifest_versions_path,
        ]

        if self.external:
            cmd += ['--external']

        if self.branch and not self.version:
            cmd += ['--branch', self.branch]

        if self.version:
            logging.PrintBuildbotStepText('Version: %s' % self.version)
            cmd += ['--version', self.version]

        if self.patch_pool:
            patch_options = []
            for patch in self.patch_pool:
                logging.PrintBuildbotLink(str(patch), patch.url)
                patch_options += ['--gerrit-patches', patch.gerrit_number_str]

            cmd += patch_options

        if self.copy_repo:
            cmd += ['--copy-repo', self.copy_repo]

        assert not (self.version and self.patch_pool), (
            'Can\'t cherry-pick "%s" into an official version "%s."' %
            (patch_options, self.version))

        cros_build_lib.run(cmd)
示例#17
0
    def PerformStage(self):
        if not self._run.config.master:
            logging.info('This stage is only meaningful for master builds. '
                         'Doing nothing.')
            return

        build_id, db = self._run.GetCIDBHandle()

        if not db:
            logging.info('No cidb connection for this build. '
                         'Doing nothing.')
            return

        slave_failures = db.GetSlaveFailures(build_id)
        failures_by_build = cros_build_lib.GroupByKey(slave_failures,
                                                      'build_id')
        for build_id, build_failures in sorted(failures_by_build.items()):
            failures_by_stage = cros_build_lib.GroupByKey(
                build_failures, 'build_stage_id')
            # Surface a link to each slave stage that failed, in stage_id sorted
            # order.
            for stage_id in sorted(failures_by_stage):
                failure = failures_by_stage[stage_id][0]
                # Ignore failures that did not cause their enclosing stage to fail.
                # Ignore slave builds that are still inflight, because some stage logs
                # might not have been printed to buildbot yet.
                # TODO(akeshet) revisit this approach, if we seem to be suppressing
                # useful information as a result of it.
                if (failure['stage_status'] != constants.BUILDER_STATUS_FAILED
                        or failure['build_status']
                        == constants.BUILDER_STATUS_INFLIGHT):
                    continue
                waterfall_url = constants.WATERFALL_TO_DASHBOARD[
                    failure['waterfall']]
                slave_stage_url = tree_status.ConstructDashboardURL(
                    waterfall_url, failure['builder_name'],
                    failure['build_number'], failure['stage_name'])
                logging.PrintBuildbotLink(
                    '%s %s' % (failure['build_config'], failure['stage_name']),
                    slave_stage_url)
示例#18
0
    def PerformStage(self):
        if not self._run.config.master:
            logging.info('This stage is only meaningful for master builds. '
                         'Doing nothing.')
            return

        if not self.buildstore.AreClientsReady():
            logging.info('No buildstore connection for this build. '
                         'Doing nothing.')
            return

        child_failures = self.buildstore.GetBuildsFailures(
            self.GetScheduledSlaveBuildbucketIds())
        for failure in child_failures:
            if (failure.stage_status != constants.BUILDER_STATUS_FAILED or
                    failure.build_status == constants.BUILDER_STATUS_INFLIGHT):
                continue
            slave_stage_url = uri_lib.ConstructMiloBuildUri(
                failure.buildbucket_id)
            logging.PrintBuildbotLink(
                '%s %s' % (failure.build_config, failure.stage_name),
                slave_stage_url)
示例#19
0
  def GetNextManifest(self):
    """Uses the initialized manifest manager to get the next manifest."""
    assert self.manifest_manager, \
        'Must run GetStageManager before checkout out build.'

    build_id = self._run.attrs.metadata.GetDict().get('build_id')

    to_return = self.manifest_manager.GetNextBuildSpec(build_id=build_id)
    logging.info('Found next version to build: %s', to_return)
    previous_version = self.manifest_manager.GetLatestPassingSpec()
    target_version = self.manifest_manager.current_version

    # Print the Blamelist here.
    url_prefix = 'https://crosland.corp.google.com/log/'
    url = url_prefix + '%s..%s' % (previous_version, target_version)
    logging.PrintBuildbotLink('Blamelist', url)
    # The testManifestVersionedSyncOnePartBranch interacts badly with this
    # function.  It doesn't fully initialize self.manifest_manager which
    # causes target_version to be None.  Since there isn't a clean fix in
    # either direction, just throw this through str().  In the normal case,
    # it's already a string anyways.
    logging.PrintBuildbotStepText(str(target_version))

    return to_return
示例#20
0
    def PerformStage(self):
        if self._run.config['doc']:
            logging.PrintBuildbotLink('Builder documentation',
                                      self._run.config['doc'])

        WriteBasicMetadata(self._run)

        # This is a heuristic value for |important|, since patches that get applied
        # later in the build might change the config. We write it now anyway,
        # because in case the build fails before Sync, it is better to have this
        # heuristic value than None. In BuildReexecutionFinishedStage, we re-write
        # the definitive value.
        self._run.attrs.metadata.UpdateWithDict(
            {'important': self._run.config['important']})

        d = self._run.attrs.metadata.GetDict()

        # BuildStartStage should only run once per build. But just in case it
        # is somehow running a second time, we do not want to insert an additional
        # database entry. Detect if a database entry has been inserted already
        # and if so quit the stage.
        if 'build_id' in d:
            logging.info('Already have build_id %s, not inserting an entry.',
                         d['build_id'])
            return

        # Note: In other build stages we use self._run.GetCIDBHandle to fetch
        # a cidb handle. However, since we don't yet have a build_id, we can't
        # do that here.
        if self.buildstore.AreClientsReady():
            db_type = cidb.CIDBConnectionFactory.GetCIDBConnectionType()
            try:
                build_id = self.buildstore.InsertBuild(
                    builder_name=d['builder-name'],
                    build_number=d['build-number'],
                    build_config=d['bot-config'],
                    bot_hostname=d['bot-hostname'],
                    master_build_id=d['master_build_id'],
                    timeout_seconds=self._GetBuildTimeoutSeconds(),
                    important=d['important'],
                    buildbucket_id=self._run.options.buildbucket_id,
                    branch=self._run.manifest_branch)
            except Exception as e:
                logging.error(
                    'Error: %s\n If the buildbucket_id to insert is '
                    'duplicated to the buildbucket_id of an old build and '
                    'the old build was canceled because of a waterfall '
                    'master restart, please ignore this error. Else, '
                    'the error needs more investigation. More context: '
                    'crbug.com/679974 and crbug.com/685889', e)
                raise e

            self._run.attrs.metadata.UpdateWithDict({
                'build_id': build_id,
                'db_type': db_type
            })
            logging.info('Inserted build_id %s into cidb database type %s.',
                         build_id, db_type)
            logging.PrintBuildbotStepText('database: %s, build_id: %s' %
                                          (db_type, build_id))

            master_build_id = d['master_build_id']
            if master_build_id is not None:
                master_build_status = self.buildstore.GetBuildStatuses(
                    build_ids=[master_build_id])[0]

                if master_build_status['buildbucket_id']:
                    master_url = uri_lib.ConstructMiloBuildUri(
                        master_build_status['buildbucket_id'])
                else:
                    master_url = uri_lib.ConstructDashboardUri(
                        master_build_status['waterfall'],
                        master_build_status['builder_name'],
                        master_build_status['build_number'])
                logging.PrintBuildbotLink('Link to master build', master_url)

        # Set annealing snapshot revision build property for Findit integration.
        if self._run.options.cbb_snapshot_revision:
            logging.PrintKitchenSetBuildProperty(
                'GOT_REVISION', self._run.options.cbb_snapshot_revision)

        # Write the tag metadata last so that a build_id is available.
        WriteTagMetadata(self._run)
def MarkChromeEBuildAsStable(stable_candidate, unstable_ebuild, chrome_pn,
                             chrome_rev, chrome_version, commit, package_dir):
    r"""Uprevs the chrome ebuild specified by chrome_rev.

  This is the main function that uprevs the chrome_rev from a stable candidate
  to its new version.

  Args:
    stable_candidate: ebuild that corresponds to the stable ebuild we are
      revving from.  If None, builds the a new ebuild given the version
      and logic for chrome_rev type with revision set to 1.
    unstable_ebuild: ebuild corresponding to the unstable ebuild for chrome.
    chrome_pn: package name.
    chrome_rev: one of constants.VALID_CHROME_REVISIONS or LOCAL
      constants.CHROME_REV_SPEC -  Requires commit value.  Revs the ebuild for
        the specified version and uses the portage suffix of _alpha.
      constants.CHROME_REV_TOT -  Requires commit value.  Revs the ebuild for
        the TOT version and uses the portage suffix of _alpha.
      constants.CHROME_REV_LOCAL - Requires a chrome_root. Revs the ebuild for
        the local version and uses the portage suffix of _alpha.
      constants.CHROME_REV_LATEST - This uses the portage suffix of _rc as they
        are release candidates for the next sticky version.
      constants.CHROME_REV_STICKY -  Revs the sticky version.
    chrome_version: The \d.\d.\d.\d version of Chrome.
    commit: Used with constants.CHROME_REV_TOT.  The git revision of chrome.
    package_dir: Path to the chromeos-chrome package dir.

  Returns:
    Full portage version atom (including rc's, etc) that was revved.
  """
    def IsTheNewEBuildRedundant(new_ebuild, stable_ebuild):
        """Returns True if the new ebuild is redundant.

    This is True if there if the current stable ebuild is the exact same copy
    of the new one.
    """
        if not stable_ebuild:
            return False

        if stable_candidate.chrome_version == new_ebuild.chrome_version:
            return filecmp.cmp(new_ebuild.ebuild_path,
                               stable_ebuild.ebuild_path,
                               shallow=False)

    # Mark latest release and sticky branches as stable.
    mark_stable = chrome_rev not in [
        constants.CHROME_REV_TOT, constants.CHROME_REV_SPEC,
        constants.CHROME_REV_LOCAL
    ]

    # Case where we have the last stable candidate with same version just rev.
    if stable_candidate and stable_candidate.chrome_version == chrome_version:
        new_ebuild_path = '%s-r%d.ebuild' % (
            stable_candidate.ebuild_path_no_revision,
            stable_candidate.current_revision + 1)
    else:
        suffix = 'rc' if mark_stable else 'alpha'
        pf = '%s-%s_%s-r1' % (chrome_pn, chrome_version, suffix)
        new_ebuild_path = os.path.join(package_dir, '%s.ebuild' % pf)

    chrome_variables = dict()
    if commit:
        chrome_variables[_CHROME_SVN_TAG] = commit

    portage_util.EBuild.MarkAsStable(unstable_ebuild.ebuild_path,
                                     new_ebuild_path,
                                     chrome_variables,
                                     make_stable=mark_stable)
    new_ebuild = ChromeEBuild(new_ebuild_path)

    # Determine whether this is ebuild is redundant.
    if IsTheNewEBuildRedundant(new_ebuild, stable_candidate):
        msg = 'Previous ebuild with same version found and ebuild is redundant.'
        logging.info(msg)
        os.unlink(new_ebuild_path)
        return None

    if stable_candidate and chrome_rev in _REV_TYPES_FOR_LINKS:
        logging.PrintBuildbotLink(
            'Chromium revisions',
            GetChromeRevisionListLink(stable_candidate, new_ebuild,
                                      chrome_rev))

    git.RunGit(package_dir, ['add', new_ebuild_path])
    if stable_candidate and not stable_candidate.IsSticky():
        git.RunGit(package_dir, ['rm', stable_candidate.ebuild_path])

    portage_util.EBuild.CommitChange(
        _GIT_COMMIT_MESSAGE % {
            'chrome_pn': chrome_pn,
            'chrome_rev': chrome_rev,
            'chrome_version': chrome_version
        }, package_dir)

    return '%s-%s' % (new_ebuild.package, new_ebuild.version)
示例#22
0
    def PerformStage(self):
        """Perform the actual work for this stage.

    This includes final metadata archival, and update CIDB with our final status
    as well as producting a logged build result summary.
    """
        build_identifier, _ = self._run.GetCIDBHandle()
        build_id = build_identifier.cidb_id
        buildbucket_id = build_identifier.buildbucket_id
        if results_lib.Results.BuildSucceededSoFar(self.buildstore,
                                                   buildbucket_id, self.name):
            final_status = constants.BUILDER_STATUS_PASSED
        else:
            final_status = constants.BUILDER_STATUS_FAILED

        if not hasattr(self._run.attrs, 'release_tag'):
            # If, for some reason, sync stage was not completed and
            # release_tag was not set. Set it to None here because
            # ArchiveResults() depends the existence of this attr.
            self._run.attrs.release_tag = None

        # Set up our report metadata.
        self._run.attrs.metadata.UpdateWithDict(
            self.GetReportMetadata(
                final_status=final_status,
                completion_instance=self._completion_instance))

        src_root = self._build_root
        # Workspace builders use a different buildroot for overlays.
        if self._run.config.workspace_branch and self._run.options.workspace:
            src_root = self._run.options.workspace

        # Add tags for the arches and statuses of the build.
        # arches requires crossdev which isn't available at the early part of the
        # build.
        arches = []
        for board in self._run.config['boards']:
            toolchains = toolchain.GetToolchainsForBoard(board,
                                                         buildroot=src_root)
            default = list(
                toolchain.FilterToolchains(toolchains, 'default', True))
            if default:
                try:
                    arches.append(toolchain.GetArchForTarget(default[0]))
                except cros_build_lib.RunCommandError as e:
                    logging.warning(
                        'Unable to retrieve arch for board %s default toolchain %s: %s',
                        board, default, e)
        tags = {
            'arches': arches,
            'status': final_status,
        }
        results = self._run.attrs.metadata.GetValue('results')
        for stage in results:
            tags['stage_status:%s' % stage['name']] = stage['status']
            tags['stage_summary:%s' % stage['name']] = stage['summary']
        self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
                                                       tags)

        # Some operations can only be performed if a valid version is available.
        try:
            self._run.GetVersionInfo()
            self.ArchiveResults(final_status)
            metadata_url = os.path.join(self.upload_url,
                                        constants.METADATA_JSON)
        except cbuildbot_run.VersionNotSetError:
            logging.error('A valid version was never set for this run. '
                          'Can not archive results.')
            metadata_url = ''

        results_lib.Results.Report(sys.stdout,
                                   current_version=(self._run.attrs.release_tag
                                                    or ''))

        # Upload goma log if used for BuildPackage and TestSimpleChrome.
        _UploadAndLinkGomaLogIfNecessary(
            'BuildPackages', self._run.config.name, self._run.options.goma_dir,
            self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault('goma_tmp_dir'))
        _UploadAndLinkGomaLogIfNecessary(
            'TestSimpleChromeWorkflow', self._run.config.name,
            self._run.options.goma_dir, self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault(
                'goma_tmp_dir_for_simple_chrome'))

        if self.buildstore.AreClientsReady():
            status_for_db = final_status

            # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove
            # this extra temporary object creation.
            # XXX:HACK We're creating a BuildData with an empty URL. Don't try to
            # MarkGathered this object.
            build_data = metadata_lib.BuildData(
                '', self._run.attrs.metadata.GetDict())
            # TODO(akeshet): Find a clearer way to get the "primary upload url" for
            # the metadata.json file. One alternative is _GetUploadUrls(...)[0].
            # Today it seems that element 0 of its return list is the primary upload
            # url, but there is no guarantee or unit test coverage of that.
            self.buildstore.FinishBuild(build_id,
                                        status=status_for_db,
                                        summary=build_data.failure_message,
                                        metadata_url=metadata_url)

            duration = self._GetBuildDuration()

            mon_fields = {
                'status': status_for_db,
                'build_config': self._run.config.name,
                'important': self._run.config.important
            }
            metrics.Counter(
                constants.MON_BUILD_COMP_COUNT).increment(fields=mon_fields)
            metrics.CumulativeSecondsDistribution(
                constants.MON_BUILD_DURATION).add(duration, fields=mon_fields)

            if self._run.options.sanity_check_build:
                metrics.Counter(
                    constants.MON_BUILD_SANITY_COMP_COUNT).increment(
                        fields=mon_fields)
                metrics.Gauge(
                    constants.MON_BUILD_SANITY_ID,
                    description=
                    'The build number of the latest sanity build. Used '
                    'for recovering the link to the latest failing build '
                    'in the alert when a sanity build fails.',
                    field_spec=[
                        ts_mon.StringField('status'),
                        ts_mon.StringField('build_config'),
                        ts_mon.StringField('builder_name'),
                        ts_mon.BooleanField('important')
                    ]).set(self._run.buildnumber,
                           fields=dict(
                               mon_fields,
                               builder_name=self._run.GetBuilderName()))

            if config_lib.IsMasterCQ(self._run.config):
                self_destructed = self._run.attrs.metadata.GetValueWithDefault(
                    constants.SELF_DESTRUCTED_BUILD, False)
                mon_fields = {
                    'status': status_for_db,
                    'self_destructed': self_destructed
                }
                metrics.CumulativeSecondsDistribution(
                    constants.MON_CQ_BUILD_DURATION).add(duration,
                                                         fields=mon_fields)
                annotator_link = uri_lib.ConstructAnnotatorUri(build_id)
                logging.PrintBuildbotLink('Build annotator', annotator_link)

            # From this point forward, treat all exceptions as warnings.
            self._post_completion = True

            # Dump report about things we retry.
            retry_stats.ReportStats(sys.stdout)
示例#23
0
def MarkAndroidEBuildAsStable(stable_candidate, unstable_ebuild,
                              android_package, android_version, package_dir,
                              build_branch, arc_bucket_url, build_targets):
    r"""Uprevs the Android ebuild.

  This is the main function that uprevs from a stable candidate
  to its new version.

  Args:
    stable_candidate: ebuild that corresponds to the stable ebuild we are
      revving from.  If None, builds the a new ebuild given the version
      with revision set to 1.
    unstable_ebuild: ebuild corresponding to the unstable ebuild for Android.
    android_package: android package name.
    android_version: The \d+ build id of Android.
    package_dir: Path to the android-container package dir.
    build_branch: branch of Android builds.
    arc_bucket_url: URL of the target ARC build gs bucket.
    build_targets: build targets for this particular Android branch.

  Returns:
    Full portage version atom (including rc's, etc) that was revved.
  """
    def IsTheNewEBuildRedundant(new_ebuild, stable_ebuild):
        """Returns True if the new ebuild is redundant.

    This is True if there if the current stable ebuild is the exact same copy
    of the new one.
    """
        if not stable_ebuild:
            return False

        if stable_candidate.version_no_rev == new_ebuild.version_no_rev:
            return filecmp.cmp(new_ebuild.ebuild_path,
                               stable_ebuild.ebuild_path,
                               shallow=False)

    # Case where we have the last stable candidate with same version just rev.
    if stable_candidate and stable_candidate.version_no_rev == android_version:
        new_ebuild_path = '%s-r%d.ebuild' % (
            stable_candidate.ebuild_path_no_revision,
            stable_candidate.current_revision + 1)
    else:
        pf = '%s-%s-r1' % (android_package, android_version)
        new_ebuild_path = os.path.join(package_dir, '%s.ebuild' % pf)

    variables = {'BASE_URL': arc_bucket_url}
    for build, (target, _) in build_targets.iteritems():
        variables[build + '_TARGET'] = '%s-%s' % (build_branch, target)

    portage_util.EBuild.MarkAsStable(unstable_ebuild.ebuild_path,
                                     new_ebuild_path,
                                     variables,
                                     make_stable=True)
    new_ebuild = portage_util.EBuild(new_ebuild_path)

    # Determine whether this is ebuild is redundant.
    if IsTheNewEBuildRedundant(new_ebuild, stable_candidate):
        msg = 'Previous ebuild with same version found and ebuild is redundant.'
        logging.info(msg)
        os.unlink(new_ebuild_path)
        return None

    if stable_candidate:
        logging.PrintBuildbotLink(
            'Android revisions',
            GetAndroidRevisionListLink(build_branch, stable_candidate,
                                       new_ebuild))

    git.RunGit(package_dir, ['add', new_ebuild_path])
    if stable_candidate and not stable_candidate.IsSticky():
        git.RunGit(package_dir, ['rm', stable_candidate.ebuild_path])

    # Update ebuild manifest and git add it.
    gen_manifest_cmd = ['ebuild', new_ebuild_path, 'manifest', '--force']
    cros_build_lib.RunCommand(gen_manifest_cmd, extra_env=None, print_cmd=True)
    git.RunGit(package_dir, ['add', 'Manifest'])

    portage_util.EBuild.CommitChange(
        _GIT_COMMIT_MESSAGE % {
            'android_package': android_package,
            'android_version': android_version
        }, package_dir)

    return '%s-%s' % (new_ebuild.package, new_ebuild.version)
示例#24
0
def GenerateBlameList(source_repo, lkgm_path, only_print_chumps=False):
    """Generate the blamelist since the specified manifest.

  Args:
    source_repo: Repository object for the source code.
    lkgm_path: Path to LKGM manifest.
    only_print_chumps: If True, only print changes that were chumped.
  """
    handler = git.Manifest(lkgm_path)
    reviewed_on_re = re.compile(r'\s*Reviewed-on:\s*(\S+)')
    author_re = re.compile(r'\s*Author:.*<(\S+)@\S+>\s*')
    committer_re = re.compile(r'\s*Commit:.*<(\S+)@\S+>\s*')
    for rel_src_path, checkout in handler.checkouts_by_path.iteritems():
        project = checkout['name']

        # Additional case in case the repo has been removed from the manifest.
        src_path = source_repo.GetRelativePath(rel_src_path)
        if not os.path.exists(src_path):
            logging.info('Detected repo removed from manifest %s' % project)
            continue

        revision = checkout['revision']
        cmd = ['log', '--pretty=full', '%s..HEAD' % revision]
        try:
            result = git.RunGit(src_path, cmd)
        except cros_build_lib.RunCommandError as ex:
            # Git returns 128 when the revision does not exist.
            if ex.result.returncode != 128:
                raise
            logging.warning('Detected branch removed from local checkout.')
            logging.PrintBuildbotStepWarnings()
            return
        current_author = None
        current_committer = None
        for line in unicode(result.output, 'ascii', 'ignore').splitlines():
            author_match = author_re.match(line)
            if author_match:
                current_author = author_match.group(1)

            committer_match = committer_re.match(line)
            if committer_match:
                current_committer = committer_match.group(1)

            review_match = reviewed_on_re.match(line)
            if review_match:
                review = review_match.group(1)
                _, _, change_number = review.rpartition('/')
                if not current_author:
                    logging.notice(
                        'Failed to locate author before the line of review: '
                        '%s. Author name is set to <Unknown>', line)
                    current_author = '<Unknown>'
                items = [
                    os.path.basename(project),
                    current_author,
                    change_number,
                ]
                # TODO(phobbs) verify the domain of the email address as well.
                if current_committer not in ('chrome-bot',
                                             'chrome-internal-fetch',
                                             'chromeos-commit-bot',
                                             '3su6n15k.default'):
                    items.insert(0, 'CHUMP')
                elif only_print_chumps:
                    continue
                logging.PrintBuildbotLink(' | '.join(items), review)
示例#25
0
    def PerformStage(self):
        if self._run.config['doc']:
            logging.PrintBuildbotLink('Builder documentation',
                                      self._run.config['doc'])

        WriteBasicMetadata(self._run)
        WriteTagMetadata(self._run)

        # This is a heuristic value for |important|, since patches that get applied
        # later in the build might change the config. We write it now anyway,
        # because in case the build fails before Sync, it is better to have this
        # heuristic value than None. In BuildReexectuionFinishedStage, we re-write
        # the definitive value.
        self._run.attrs.metadata.UpdateWithDict(
            {'important': self._run.config['important']})

        d = self._run.attrs.metadata.GetDict()

        # BuildStartStage should only run once per build. But just in case it
        # is somehow running a second time, we do not want to insert an additional
        # database entry. Detect if a database entry has been inserted already
        # and if so quit the stage.
        if 'build_id' in d:
            logging.info('Already have build_id %s, not inserting an entry.',
                         d['build_id'])
            return

        graphite.StatsFactory.GetInstance().Counter('build_started').increment(
            self._run.config['name'] or 'NO_CONFIG')

        # Note: In other build stages we use self._run.GetCIDBHandle to fetch
        # a cidb handle. However, since we don't yet have a build_id, we can't
        # do that here.
        if cidb.CIDBConnectionFactory.IsCIDBSetup():
            db_type = cidb.CIDBConnectionFactory.GetCIDBConnectionType()
            db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
            if db:
                waterfall = d['buildbot-master-name']
                assert waterfall in constants.CIDB_KNOWN_WATERFALLS
                build_id = db.InsertBuild(
                    builder_name=d['builder-name'],
                    waterfall=waterfall,
                    build_number=d['build-number'],
                    build_config=d['bot-config'],
                    bot_hostname=d['bot-hostname'],
                    master_build_id=d['master_build_id'],
                    timeout_seconds=self._GetBuildTimeoutSeconds(),
                    important=d['important'],
                    buildbucket_id=self._run.options.buildbucket_id)
                self._run.attrs.metadata.UpdateWithDict({
                    'build_id': build_id,
                    'db_type': db_type
                })
                logging.info(
                    'Inserted build_id %s into cidb database type %s.',
                    build_id, db_type)
                logging.PrintBuildbotStepText('database: %s, build_id: %s' %
                                              (db_type, build_id))

                master_build_id = d['master_build_id']
                if master_build_id is not None:
                    master_build_status = db.GetBuildStatus(master_build_id)
                    master_waterfall_url = constants.WATERFALL_TO_DASHBOARD[
                        master_build_status['waterfall']]

                    master_url = tree_status.ConstructDashboardURL(
                        master_waterfall_url,
                        master_build_status['builder_name'],
                        master_build_status['build_number'])
                    logging.PrintBuildbotLink('Link to master build',
                                              master_url)
示例#26
0
    def Report(self, out, archive_urls=None, current_version=None):
        """Generate a user friendly text display of the results data.

    Args:
      out: Output stream to write to (e.g. sys.stdout).
      archive_urls: Dict where values are archive URLs and keys are names
        to associate with those URLs (typically board name).  If None then
        omit the name when logging the URL.
      current_version: Chrome OS version associated with this report.
    """
        results = self._results_log

        line = '*' * 60 + '\n'
        edge = '*' * 2

        if current_version:
            out.write(line)
            out.write(edge + ' RELEASE VERSION: ' + current_version + '\n')

        out.write(line)
        out.write(edge + ' Stage Results\n')
        warnings = False

        for entry in results:
            name, result, run_time = (entry.name, entry.result, entry.time)
            timestr = datetime.timedelta(seconds=math.ceil(run_time))

            # Don't print data on skipped stages.
            if result == self.SKIPPED:
                continue

            out.write(line)
            details = ''
            if result == self.SUCCESS:
                status = 'PASS'
            elif result == self.FORGIVEN:
                status = 'FAILED BUT FORGIVEN'
                warnings = True
            else:
                status = 'FAIL'
                if isinstance(result, cros_build_lib.RunCommandError):
                    # If there was a RunCommand error, give just the command that
                    # failed, not its full argument list, since those are usually
                    # too long.
                    details = ' in %s' % result.result.cmd[0]
                elif isinstance(result, failures_lib.BuildScriptFailure):
                    # BuildScriptFailure errors publish a 'short' name of the
                    # command that failed.
                    details = ' in %s' % result.shortname
                else:
                    # There was a normal error. Give the type of exception.
                    details = ' with %s' % type(result).__name__

            out.write('%s %s %s (%s)%s\n' %
                      (edge, status, name, timestr, details))

        out.write(line)

        if archive_urls:
            out.write('%s BUILD ARTIFACTS FOR THIS BUILD CAN BE FOUND AT:\n' %
                      edge)
            for name, url in sorted(archive_urls.iteritems()):
                named_url = url
                link_name = 'Artifacts'
                if name:
                    named_url = '%s: %s' % (name, url)
                    link_name = 'Artifacts[%s]' % name

                # Output the bot-id/version used in the archive url.
                link_name = '%s: %s' % (link_name, '/'.join(
                    url.split('/')[-3:-1]))
                out.write('%s  %s' % (edge, named_url))
                logging.PrintBuildbotLink(link_name, url, handle=out)
            out.write(line)

        for x in self.GetTracebacks():
            if x.failed_stage and x.traceback:
                out.write('\nFailed in stage %s:\n\n' % x.failed_stage)
                out.write(x.traceback)
                out.write('\n')

        if warnings:
            logging.PrintBuildbotStepWarnings(out)
示例#27
0
    def _LinkArtifacts(self, builder_run):
        """Upload an HTML index for the artifacts at remote archive location.

    If there are no artifacts in the archive then do nothing.

    Args:
      builder_run: BuilderRun object for this run.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        boards = builder_run.config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        # See if there are any artifacts found for this run.
        uploaded = os.path.join(archive_path, commands.UPLOADED_LIST_FILENAME)
        if not os.path.exists(uploaded):
            # UPLOADED doesn't exist.  Normal if Archive stage never ran, which
            # is possibly normal.  Regardless, no archive index is needed.
            logging.info('No archived artifacts found for %s run (%s)',
                         builder_run.config.name, board_names)
            return

        if builder_run.config.internal:
            # Internal builds simply link to pantheon directories, which require
            # authenticated access that most Googlers should have.
            artifacts_url = archive.download_url

        else:
            # External builds must allow unauthenticated access to build artifacts.
            # GS doesn't let unauthenticated users browse selected locations without
            # being able to browse everything (which would expose secret stuff).
            # So, we upload an index.html file and link to it instead of the
            # directory.
            title = 'Artifacts Index: %(board)s / %(version)s (%(config)s config)' % {
                'board': board_names,
                'config': builder_run.config.name,
                'version': builder_run.GetVersion(),
            }

            files = osutils.ReadFile(uploaded).splitlines() + [
                '.|Google Storage Index',
                '..|',
            ]

            index = os.path.join(archive_path, 'index.html')

            # TODO (sbasi) crbug.com/362776: Rework the way we do uploading to
            # multiple buckets. Currently this can only be done in the Archive Stage
            # therefore index.html will only end up in the normal Chrome OS bucket.
            commands.GenerateHtmlIndex(index, files, title=title)
            commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                        os.path.basename(index),
                                        debug=self._run.debug,
                                        acl=self.acl)

            artifacts_url = os.path.join(archive.download_url_file,
                                         'index.html')

        links_build_description = '%s/%s' % (builder_run.config.name,
                                             archive.version)
        logging.PrintBuildbotLink('Artifacts[%s]' % links_build_description,
                                  artifacts_url)
示例#28
0
    def ArchiveResults(self, final_status, build_id, db):
        """Archive our build results.

    Args:
      final_status: constants.FINAL_STATUS_PASSED or
                    constants.FINAL_STATUS_FAILED
      build_id: CIDB id for the current build.
      db: CIDBConnection instance.
    """
        # Make sure local archive directory is prepared, if it was not already.
        if not os.path.exists(self.archive_path):
            self.archive.SetupArchivePath()

        # Upload metadata, and update the pass/fail streak counter for the main
        # run only. These aren't needed for the child builder runs.
        self.UploadMetadata()
        self._UpdateRunStreak(self._run, final_status)

        # Alert if the Pre-CQ has infra failures.
        if final_status == constants.FINAL_STATUS_FAILED:
            self._SendPreCQInfraAlertMessageIfNeeded()

        # Iterate through each builder run, whether there is just the main one
        # or multiple child builder runs.
        for builder_run in self._run.GetUngroupedBuilderRuns():
            if db is not None:
                timeline = self._UploadBuildStagesTimeline(
                    builder_run, build_id, db)
                logging.PrintBuildbotLink('Build stages timeline', timeline)

                timeline = self._UploadSlavesTimeline(builder_run, build_id,
                                                      db)
                if timeline is not None:
                    logging.PrintBuildbotLink('Slaves timeline', timeline)

            if build_id is not None:
                details_link = self._MakeViceroyBuildDetailsLink(build_id)
                logging.PrintBuildbotLink('Build details', details_link)

            # Generate links to archived artifacts if there are any.  All the
            # archived artifacts for one run/config are in one location, so the link
            # is only specific to each run/config.  In theory multiple boards could
            # share that archive, but in practice it is usually one board.  A
            # run/config without a board will also usually not have artifacts to
            # archive, but that restriction is not assumed here.
            self._LinkArtifacts(builder_run)

            # Check if the builder_run is tied to any boards and if so get all
            # upload urls.
            if final_status == constants.FINAL_STATUS_PASSED:
                # Update the LATEST files if the build passed.
                try:
                    upload_urls = self._GetUploadUrls('LATEST-*',
                                                      builder_run=builder_run)
                except portage_util.MissingOverlayException as e:
                    # If the build failed prematurely, some overlays might be
                    # missing. Ignore them in this stage.
                    logging.warning(e)
                else:
                    if upload_urls:
                        archive = builder_run.GetArchive()
                        archive.UpdateLatestMarkers(
                            builder_run.manifest_branch,
                            builder_run.debug,
                            upload_urls=upload_urls)