Ejemplo n.º 1
0
  def _GenerateConfig(self, toolchains, board_overlays, portdir_overlays,
                      header, **kwargs):
    """Create common config settings for boards and bricks.

    Args:
      toolchains: ToolchainList object to use.
      board_overlays: List of board overlays.
      portdir_overlays: List of portage overlays.
      header: Header comment string; must start with #.
      kwargs: Additional configuration values to set.

    Returns:
      Configuration string.

    Raises:
      ConfigurationError: Could not generate a valid configuration.
    """
    config = {}

    default_toolchains = toolchain.FilterToolchains(toolchains, 'default', True)
    if not default_toolchains:
      raise ConfigurationError('No default toolchain could be found.')
    config['CHOST'] = default_toolchains.keys()[0]
    config['ARCH'] = toolchain.GetArchForTarget(config['CHOST'])

    config['BOARD_OVERLAY'] = '\n'.join(board_overlays)
    config['PORTDIR_OVERLAY'] = '\n'.join(portdir_overlays)

    config['MAKEOPTS'] = '-j%s' % str(multiprocessing.cpu_count())
    config['ROOT'] = self.path + '/'
    config['PKG_CONFIG'] = self._WrapperPath('pkg-config')

    config.update(kwargs)

    return '\n'.join((header, _DictToKeyValue(config)))
Ejemplo n.º 2
0
  def testArchForToolchain(self):
    """Tests that we correctly parse crossdev's output."""
    rc_mock = cros_test_lib.RunCommandMock()

    noarch = """target=foo
category=bla
"""
    rc_mock.SetDefaultCmdResult(output=noarch)
    with rc_mock:
      self.assertEqual(None, toolchain.GetArchForTarget('fake_target'))

    amd64arch = """arch=amd64
target=foo
"""
    rc_mock.SetDefaultCmdResult(output=amd64arch)
    with rc_mock:
      self.assertEqual('amd64', toolchain.GetArchForTarget('fake_target'))
Ejemplo n.º 3
0
    def PerformStage(self):
        """Perform the actual work for this stage.

    This includes final metadata archival, and update CIDB with our final status
    as well as producting a logged build result summary.
    """
        build_identifier, _ = self._run.GetCIDBHandle()
        build_id = build_identifier.cidb_id
        buildbucket_id = build_identifier.buildbucket_id
        if results_lib.Results.BuildSucceededSoFar(self.buildstore,
                                                   buildbucket_id, self.name):
            final_status = constants.BUILDER_STATUS_PASSED
        else:
            final_status = constants.BUILDER_STATUS_FAILED

        if not hasattr(self._run.attrs, 'release_tag'):
            # If, for some reason, sync stage was not completed and
            # release_tag was not set. Set it to None here because
            # ArchiveResults() depends the existence of this attr.
            self._run.attrs.release_tag = None

        # Set up our report metadata.
        self._run.attrs.metadata.UpdateWithDict(
            self.GetReportMetadata(
                final_status=final_status,
                completion_instance=self._completion_instance))

        src_root = self._build_root
        # Workspace builders use a different buildroot for overlays.
        if self._run.config.workspace_branch and self._run.options.workspace:
            src_root = self._run.options.workspace

        # Add tags for the arches and statuses of the build.
        # arches requires crossdev which isn't available at the early part of the
        # build.
        arches = []
        for board in self._run.config['boards']:
            toolchains = toolchain.GetToolchainsForBoard(board,
                                                         buildroot=src_root)
            default = list(
                toolchain.FilterToolchains(toolchains, 'default', True))
            if default:
                try:
                    arches.append(toolchain.GetArchForTarget(default[0]))
                except cros_build_lib.RunCommandError as e:
                    logging.warning(
                        'Unable to retrieve arch for board %s default toolchain %s: %s',
                        board, default, e)
        tags = {
            'arches': arches,
            'status': final_status,
        }
        results = self._run.attrs.metadata.GetValue('results')
        for stage in results:
            tags['stage_status:%s' % stage['name']] = stage['status']
            tags['stage_summary:%s' % stage['name']] = stage['summary']
        self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
                                                       tags)

        # Some operations can only be performed if a valid version is available.
        try:
            self._run.GetVersionInfo()
            self.ArchiveResults(final_status)
            metadata_url = os.path.join(self.upload_url,
                                        constants.METADATA_JSON)
        except cbuildbot_run.VersionNotSetError:
            logging.error('A valid version was never set for this run. '
                          'Can not archive results.')
            metadata_url = ''

        results_lib.Results.Report(sys.stdout,
                                   current_version=(self._run.attrs.release_tag
                                                    or ''))

        # Upload goma log if used for BuildPackage and TestSimpleChrome.
        _UploadAndLinkGomaLogIfNecessary(
            'BuildPackages', self._run.config.name, self._run.options.goma_dir,
            self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault('goma_tmp_dir'))
        _UploadAndLinkGomaLogIfNecessary(
            'TestSimpleChromeWorkflow', self._run.config.name,
            self._run.options.goma_dir, self._run.options.goma_client_json,
            self._run.attrs.metadata.GetValueWithDefault(
                'goma_tmp_dir_for_simple_chrome'))

        if self.buildstore.AreClientsReady():
            status_for_db = final_status

            # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove
            # this extra temporary object creation.
            # XXX:HACK We're creating a BuildData with an empty URL. Don't try to
            # MarkGathered this object.
            build_data = metadata_lib.BuildData(
                '', self._run.attrs.metadata.GetDict())
            # TODO(akeshet): Find a clearer way to get the "primary upload url" for
            # the metadata.json file. One alternative is _GetUploadUrls(...)[0].
            # Today it seems that element 0 of its return list is the primary upload
            # url, but there is no guarantee or unit test coverage of that.
            self.buildstore.FinishBuild(build_id,
                                        status=status_for_db,
                                        summary=build_data.failure_message,
                                        metadata_url=metadata_url)

            duration = self._GetBuildDuration()

            mon_fields = {
                'status': status_for_db,
                'build_config': self._run.config.name,
                'important': self._run.config.important
            }
            metrics.Counter(
                constants.MON_BUILD_COMP_COUNT).increment(fields=mon_fields)
            metrics.CumulativeSecondsDistribution(
                constants.MON_BUILD_DURATION).add(duration, fields=mon_fields)

            if self._run.options.sanity_check_build:
                metrics.Counter(
                    constants.MON_BUILD_SANITY_COMP_COUNT).increment(
                        fields=mon_fields)
                metrics.Gauge(
                    constants.MON_BUILD_SANITY_ID,
                    description=
                    'The build number of the latest sanity build. Used '
                    'for recovering the link to the latest failing build '
                    'in the alert when a sanity build fails.',
                    field_spec=[
                        ts_mon.StringField('status'),
                        ts_mon.StringField('build_config'),
                        ts_mon.StringField('builder_name'),
                        ts_mon.BooleanField('important')
                    ]).set(self._run.buildnumber,
                           fields=dict(
                               mon_fields,
                               builder_name=self._run.GetBuilderName()))

            if config_lib.IsMasterCQ(self._run.config):
                self_destructed = self._run.attrs.metadata.GetValueWithDefault(
                    constants.SELF_DESTRUCTED_BUILD, False)
                mon_fields = {
                    'status': status_for_db,
                    'self_destructed': self_destructed
                }
                metrics.CumulativeSecondsDistribution(
                    constants.MON_CQ_BUILD_DURATION).add(duration,
                                                         fields=mon_fields)
                annotator_link = uri_lib.ConstructAnnotatorUri(build_id)
                logging.PrintBuildbotLink('Build annotator', annotator_link)

            # From this point forward, treat all exceptions as warnings.
            self._post_completion = True

            # Dump report about things we retry.
            retry_stats.ReportStats(sys.stdout)