示例#1
0
def ShowConfig(name):
  """Show the toolchain tuples used by |name|

  Args:
    name: The board name to query.
  """
  toolchains = toolchain.GetToolchainsForBoard(name)
  # Make sure we display the default toolchain first.
  print(','.join(
      toolchain.FilterToolchains(toolchains, 'default', True).keys() +
      toolchain.FilterToolchains(toolchains, 'default', False).keys()))
示例#2
0
def ShowBoardConfig(board):
    """Show the toolchain tuples used by |board|

  Args:
    board: The board to query.
  """
    toolchains = toolchain.GetToolchainsForBoard(board)
    # Make sure we display the default toolchain first.
    print ','.join(
        toolchain.FilterToolchains(toolchains, 'default', True).keys() +
        toolchain.FilterToolchains(toolchains, 'default', False).keys())
def ShowConfig(name):
    """Show the toolchain tuples used by |name|

  Args:
    name: The board name to query.
  """
    toolchains = toolchain.GetToolchainsForBoard(name)
    # Make sure we display the default toolchain first.
    # Note: Do not use logging here as this is meant to be used by other tools.
    print(','.join(
        toolchain.FilterToolchains(toolchains, 'default', True).keys() +
        toolchain.FilterToolchains(toolchains, 'default', False).keys()))
示例#4
0
def ExpandTargets(targets_wanted):
  """Expand any possible toolchain aliases into full targets

  This will expand 'all' and 'sdk' into the respective toolchain tuples.

  Args:
    targets_wanted: The targets specified by the user.

  Returns:
    Dictionary of concrete targets and their toolchain tuples.
  """
  targets_wanted = set(targets_wanted)
  if targets_wanted == set(['boards']):
    # Only pull targets from the included boards.
    return {}

  all_targets = toolchain.GetAllTargets()
  if targets_wanted == set(['all']):
    return all_targets
  if targets_wanted == set(['sdk']):
    # Filter out all the non-sdk toolchains as we don't want to mess
    # with those in all of our builds.
    return toolchain.FilterToolchains(all_targets, 'sdk', True)

  # Verify user input.
  nonexistent = targets_wanted.difference(all_targets)
  if nonexistent:
    raise ValueError('Invalid targets: %s', ','.join(nonexistent))
  return {t: all_targets[t] for t in targets_wanted}
示例#5
0
  def _GenerateConfig(self, toolchains, board_overlays, portdir_overlays,
                      header, **kwargs):
    """Create common config settings for boards and bricks.

    Args:
      toolchains: ToolchainList object to use.
      board_overlays: List of board overlays.
      portdir_overlays: List of portage overlays.
      header: Header comment string; must start with #.
      kwargs: Additional configuration values to set.

    Returns:
      Configuration string.

    Raises:
      ConfigurationError: Could not generate a valid configuration.
    """
    config = {}

    default_toolchains = toolchain.FilterToolchains(toolchains, 'default', True)
    if not default_toolchains:
      raise ConfigurationError('No default toolchain could be found.')
    config['CHOST'] = default_toolchains.keys()[0]
    config['ARCH'] = toolchain.GetArchForTarget(config['CHOST'])

    config['BOARD_OVERLAY'] = '\n'.join(board_overlays)
    config['PORTDIR_OVERLAY'] = '\n'.join(portdir_overlays)

    config['MAKEOPTS'] = '-j%s' % str(multiprocessing.cpu_count())
    config['ROOT'] = self.path + '/'
    config['PKG_CONFIG'] = self._WrapperPath('pkg-config')

    config.update(kwargs)

    return '\n'.join((header, _DictToKeyValue(config)))
示例#6
0
def ExpandTargets(targets_wanted):
    """Expand any possible toolchain aliases into full targets

  This will expand 'all' and 'sdk' into the respective toolchain tuples.

  Args:
    targets_wanted: The targets specified by the user.
  Returns:
    Full list of tuples with pseudo targets removed.
  """
    alltargets = toolchain.GetAllTargets()
    targets_wanted = set(targets_wanted)
    if targets_wanted == set(['all']):
        targets = alltargets
    elif targets_wanted == set(['sdk']):
        # Filter out all the non-sdk toolchains as we don't want to mess
        # with those in all of our builds.
        targets = toolchain.FilterToolchains(alltargets, 'sdk', True)
    else:
        # Verify user input.
        nonexistent = targets_wanted.difference(alltargets)
        if nonexistent:
            raise ValueError('Invalid targets: %s', ','.join(nonexistent))
        targets = dict((t, alltargets[t]) for t in targets_wanted)
    return targets
示例#7
0
 def GetCrossGdb(self):
     """Find the appropriate cross-version of gdb for the board."""
     toolchains = toolchain.GetToolchainsForBoard(self.board)
     tc = toolchain.FilterToolchains(toolchains, 'default', True).keys()
     cross_gdb = tc[0] + '-gdb'
     if not osutils.Which(cross_gdb):
         raise GdbMissingDebuggerError('Cannot find %s; do you need to run '
                                       'setup_board?' % cross_gdb)
     return cross_gdb
示例#8
0
    def PerformStage(self):
        config = self._run.config
        build_root = self._build_root

        logging.info('Build re-executions have finished. Chromite source '
                     'will not be modified for remainder of run.')
        logging.info("config['important']=%s", config['important'])
        logging.PrintBuildbotStepText("config['important']=%s" %
                                      config['important'])

        # Flat list of all child config boards. Since child configs
        # are not allowed to have children, it is not necessary to search
        # deeper than one generation.
        child_configs = GetChildConfigListMetadata(
            child_configs=config['child_configs'], config_status_map=None)

        sdk_verinfo = cros_build_lib.LoadKeyValueFile(os.path.join(
            build_root, constants.SDK_VERSION_FILE),
                                                      ignore_missing=True)

        verinfo = self._run.GetVersionInfo()
        platform_tag = getattr(self._run.attrs, 'release_tag')
        if not platform_tag:
            platform_tag = verinfo.VersionString()

        version = {
            'full': self._run.GetVersion(),
            'milestone': verinfo.chrome_branch,
            'platform': platform_tag,
        }

        metadata = {
            # Version of the metadata format.
            'metadata-version': '2',
            'boards': config['boards'],
            'child-configs': child_configs,
            'build_type': config['build_type'],
            'important': config['important'],

            # Data for the toolchain used.
            'sdk-version': sdk_verinfo.get('SDK_LATEST_VERSION', '<unknown>'),
            'toolchain-url': sdk_verinfo.get('TC_PATH', '<unknown>'),
        }

        if len(config['boards']) == 1:
            toolchains = toolchain.GetToolchainsForBoard(config['boards'][0],
                                                         buildroot=build_root)
            metadata['toolchain-tuple'] = (
                toolchain.FilterToolchains(toolchains, 'default', True).keys()
                + toolchain.FilterToolchains(toolchains, 'default',
                                             False).keys())

        logging.info('Metadata being written: %s', metadata)
        self._run.attrs.metadata.UpdateWithDict(metadata)
        # Update 'version' separately to avoid overwriting the existing
        # entries in it (e.g. PFQ builders may have written the Chrome
        # version to uprev).
        logging.info("Metadata 'version' being written: %s", version)
        self._run.attrs.metadata.UpdateKeyDictWithDict('version', version)

        # Ensure that all boards and child config boards have a per-board
        # metadata subdict.
        for b in config['boards']:
            self._run.attrs.metadata.UpdateBoardDictWithDict(b, {})

        for cc in child_configs:
            for b in cc['boards']:
                self._run.attrs.metadata.UpdateBoardDictWithDict(b, {})

        # Upload build metadata (and write it to database if necessary)
        self.UploadMetadata(filename=constants.PARTIAL_METADATA_JSON)

        # Write child-per-build and board-per-build rows to database
        build_id, db = self._run.GetCIDBHandle()
        if db:
            # TODO(akeshet): replace this with a GetValue call once crbug.com/406522
            # is resolved
            per_board_dict = self._run.attrs.metadata.GetDict(
            )['board-metadata']
            for board, board_metadata in per_board_dict.items():
                db.InsertBoardPerBuild(build_id, board)
                if board_metadata:
                    db.UpdateBoardPerBuildMetadata(build_id, board,
                                                   board_metadata)
            for child_config in self._run.attrs.metadata.GetValue(
                    'child-configs'):
                db.InsertChildConfigPerBuild(build_id, child_config['name'])

            # If this build has a master build, ensure that the master full_version
            # is the same as this build's full_version. This is a sanity check to
            # avoid bugs in master-slave logic.
            master_id = self._run.attrs.metadata.GetDict().get(
                'master_build_id')
            if master_id is not None:
                master_full_version = db.GetBuildStatus(
                    master_id)['full_version']
                my_full_version = self._run.attrs.metadata.GetValue(
                    'version').get('full')
                if master_full_version != my_full_version:
                    raise failures_lib.MasterSlaveVersionMismatchFailure(
                        'Master build id %s has full_version %s, while slave version is '
                        '%s.' %
                        (master_id, master_full_version, my_full_version))

        # Abort previous hw test suites. This happens after reexecution as it
        # requires chromite/third_party/swarming.client, which is not available
        # untill after reexecution.
        self._AbortPreviousHWTestSuites(version['milestone'])
示例#9
0
    def PerformStage(self):
        """Perform the actual work for this stage.

    This includes final metadata archival, and update CIDB with our final status
    as well as producting a logged build result summary.
    """
        build_identifier, _ = self._run.GetCIDBHandle()
        build_id = build_identifier.cidb_id
        buildbucket_id = build_identifier.buildbucket_id
        if results_lib.Results.BuildSucceededSoFar(self.buildstore,
                                                   buildbucket_id, self.name):
            final_status = constants.BUILDER_STATUS_PASSED
        else:
            final_status = constants.BUILDER_STATUS_FAILED

        if not hasattr(self._run.attrs, 'release_tag'):
            # If, for some reason, sync stage was not completed and
            # release_tag was not set. Set it to None here because
            # ArchiveResults() depends the existence of this attr.
            self._run.attrs.release_tag = None

        # Set up our report metadata.
        self._run.attrs.metadata.UpdateWithDict(
            self.GetReportMetadata(
                final_status=final_status,
                completion_instance=self._completion_instance))

        src_root = self._build_root
        # Workspace builders use a different buildroot for overlays.
        if self._run.config.workspace_branch and self._run.options.workspace:
            src_root = self._run.options.workspace

        # Add tags for the arches and statuses of the build.
        # arches requires crossdev which isn't available at the early part of the
        # build.
        arches = []
        for board in self._run.config['boards']:
            toolchains = toolchain.GetToolchainsForBoard(board,
                                                         buildroot=src_root)
            default = list(
                toolchain.FilterToolchains(toolchains, 'default', True))
            if default:
                try:
                    arches.append(toolchain.GetArchForTarget(default[0]))
                except cros_build_lib.RunCommandError as e:
                    logging.warning(
                        'Unable to retrieve arch for board %s default toolchain %s: %s',
                        board, default, e)
        tags = {
            'arches': arches,
            'status': final_status,
        }
        results = self._run.attrs.metadata.GetValue('results')
        for stage in results:
            tags['stage_status:%s' % stage['name']] = stage['status']
            tags['stage_summary:%s' % stage['name']] = stage['summary']
        self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
                                                       tags)

        # Some operations can only be performed if a valid version is available.
        try:
            self._run.GetVersionInfo()
            self.ArchiveResults(final_status)
            metadata_url = os.path.join(self.upload_url,
                                        constants.METADATA_JSON)
        except cbuildbot_run.VersionNotSetError:
            logging.error('A valid version was never set for this run. '
                          'Can not archive results.')
            metadata_url = ''

        results_lib.Results.Report(sys.stdout,
                                   current_version=(self._run.attrs.release_tag
                                                    or ''))

        # Upload goma log if used for BuildPackage and TestSimpleChrome.
        _UploadAndLinkGomaLogIfNecessary(
            'BuildPackages', self._run.config.name, self._run.options.goma_dir,
            self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault('goma_tmp_dir'))
        _UploadAndLinkGomaLogIfNecessary(
            'TestSimpleChromeWorkflow', self._run.config.name,
            self._run.options.goma_dir, self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault(
                'goma_tmp_dir_for_simple_chrome'))

        if self.buildstore.AreClientsReady():
            status_for_db = final_status

            # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove
            # this extra temporary object creation.
            # XXX:HACK We're creating a BuildData with an empty URL. Don't try to
            # MarkGathered this object.
            build_data = metadata_lib.BuildData(
                '', self._run.attrs.metadata.GetDict())
            # TODO(akeshet): Find a clearer way to get the "primary upload url" for
            # the metadata.json file. One alternative is _GetUploadUrls(...)[0].
            # Today it seems that element 0 of its return list is the primary upload
            # url, but there is no guarantee or unit test coverage of that.
            self.buildstore.FinishBuild(build_id,
                                        status=status_for_db,
                                        summary=build_data.failure_message,
                                        metadata_url=metadata_url)

            duration = self._GetBuildDuration()

            mon_fields = {
                'status': status_for_db,
                'build_config': self._run.config.name,
                'important': self._run.config.important
            }
            metrics.Counter(
                constants.MON_BUILD_COMP_COUNT).increment(fields=mon_fields)
            metrics.CumulativeSecondsDistribution(
                constants.MON_BUILD_DURATION).add(duration, fields=mon_fields)

            if self._run.options.sanity_check_build:
                metrics.Counter(
                    constants.MON_BUILD_SANITY_COMP_COUNT).increment(
                        fields=mon_fields)
                metrics.Gauge(
                    constants.MON_BUILD_SANITY_ID,
                    description=
                    'The build number of the latest sanity build. Used '
                    'for recovering the link to the latest failing build '
                    'in the alert when a sanity build fails.',
                    field_spec=[
                        ts_mon.StringField('status'),
                        ts_mon.StringField('build_config'),
                        ts_mon.StringField('builder_name'),
                        ts_mon.BooleanField('important')
                    ]).set(self._run.buildnumber,
                           fields=dict(
                               mon_fields,
                               builder_name=self._run.GetBuilderName()))

            if config_lib.IsMasterCQ(self._run.config):
                self_destructed = self._run.attrs.metadata.GetValueWithDefault(
                    constants.SELF_DESTRUCTED_BUILD, False)
                mon_fields = {
                    'status': status_for_db,
                    'self_destructed': self_destructed
                }
                metrics.CumulativeSecondsDistribution(
                    constants.MON_CQ_BUILD_DURATION).add(duration,
                                                         fields=mon_fields)
                annotator_link = uri_lib.ConstructAnnotatorUri(build_id)
                logging.PrintBuildbotLink('Build annotator', annotator_link)

            # From this point forward, treat all exceptions as warnings.
            self._post_completion = True

            # Dump report about things we retry.
            retry_stats.ReportStats(sys.stdout)