Exemplo n.º 1
0
  def GenerateVerity(self):
    """Generate verity parameters and hashes for the image."""
    logging.info('Generating DLC image verity.')
    with osutils.TempDir(prefix='dlc_') as temp_dir:
      hash_tree = os.path.join(temp_dir, 'hash_tree')
      # Get blocks in the image.
      blocks = math.ceil(os.path.getsize(self.dest_image) / self._BLOCK_SIZE)
      result = cros_build_lib.run([
          'verity', 'mode=create', 'alg=sha256', 'payload=' + self.dest_image,
          'payload_blocks=' + str(blocks), 'hashtree=' + hash_tree,
          'salt=random'
      ],
                                  capture_output=True)
      table = result.output

      # Append the merkle tree to the image.
      osutils.WriteFile(
          self.dest_image, osutils.ReadFile(hash_tree, mode='rb'), mode='a+b')

      # Write verity parameter to table file.
      osutils.WriteFile(self.dest_table, table, mode='wb')

      # Compute image hash.
      image_hash = HashFile(self.dest_image)
      table_hash = HashFile(self.dest_table)
      # Write image hash to imageloader.json file.
      blocks = math.ceil(os.path.getsize(self.dest_image) / self._BLOCK_SIZE)
      imageloader_json_content = self.GetImageloaderJsonContent(
          image_hash, table_hash, int(blocks))
      pformat.json(imageloader_json_content, fp=self.dest_imageloader_json)
    def _CollectLLVMMetadata(self):
        def check_chroot_output(command):
            cmd = cros_build_lib.run(command,
                                     enter_chroot=True,
                                     stdout=True,
                                     encoding='utf-8')
            return cmd.output

        # The baked-in clang should be the one we're looking for. If not, yell.
        llvm_uses = check_chroot_output(
            ['equery', '-C', '-N', 'uses', 'sys-devel/llvm'])
        use_vars = self._ParseUseFlagState(llvm_uses)
        if '+llvm_pgo_generate' not in use_vars:
            raise ValueError(
                "The pgo_generate flag isn't enabled; USE flags: %r" %
                sorted(use_vars))

        clang_version_str = check_chroot_output(['clang', '--version'])
        head_sha = self._ParseLLVMHeadSHA(clang_version_str)
        metadata_output_path = os.path.join(self.archive_path,
                                            self.LLVM_METADATA)
        pformat.json({'head_sha': head_sha},
                     fp=metadata_output_path,
                     compact=True)
        # This is a tiny JSON file, so it doesn't need to be tarred/compressed.
        self._upload_queue.put([metadata_output_path])
Exemplo n.º 3
0
 def _Persist(self):
   """Dump our config to disk for later MoblabVm objects."""
   # We do not want to modify config dict for the object.
   config = dict(self._config)
   self._ConfigAbsolutePathsToRelative(config,
                                       os.path.dirname(self._config_path))
   pformat.json(config, fp=self._config_path)
Exemplo n.º 4
0
def Replicate(replication_config):
    """Run the replication described in replication_config.

  Args:
    replication_config: (ReplicationConfig) Describes the replication to run.
  """
    # Validate all rules before any of them are run, to decrease chance of ending
    # with a partial replication.
    for rule in replication_config.file_replication_rules:
        _ValidateFileReplicationRule(rule)

    for rule in replication_config.file_replication_rules:
        logging.info('Processing FileReplicationRule: %s', rule)

        src = os.path.join(constants.SOURCE_ROOT, rule.source_path)
        dst = os.path.join(constants.SOURCE_ROOT, rule.destination_path)

        osutils.SafeMakedirs(os.path.dirname(dst))

        if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
            assert (rule.replication_type ==
                    replication_config_pb2.REPLICATION_TYPE_FILTER)
            assert rule.destination_fields.paths

            with open(src, 'r') as f:
                source_json = json.load(f)

            try:
                source_device_configs = source_json['chromeos']['configs']
            except KeyError:
                raise NotImplementedError((
                    'Currently only ChromeOS Configs are supported (expected file %s '
                    'to have a list at "$.chromeos.configs")') % src)

            destination_device_configs = []
            for source_device_config in source_device_configs:
                destination_device_configs.append(
                    field_mask_util.CreateFilteredDict(rule.destination_fields,
                                                       source_device_config))

            destination_json = {
                'chromeos': {
                    'configs': destination_device_configs
                }
            }

            logging.info('Writing filtered JSON source to %s', dst)
            pformat.json(destination_json, fp=dst)
        else:
            assert rule.file_type == replication_config_pb2.FILE_TYPE_OTHER
            assert (rule.replication_type ==
                    replication_config_pb2.REPLICATION_TYPE_COPY)
            assert not rule.destination_fields.paths

            logging.info('Copying full file from %s to %s', src, dst)
            shutil.copy2(src, dst)

        if rule.string_replacement_rules:
            _ApplyStringReplacementRules(dst, rule.string_replacement_rules)
def main(argv):
    opts = ParseArgs(argv)

    sysroot = opts.sysroot or cros_build_lib.GetSysroot(opts.board)
    deps_list, _ = ExtractDeps(sysroot, opts.pkgs, opts.format)

    pformat.json(deps_list,
                 fp=opts.output_path if opts.output_path else sys.stdout)
Exemplo n.º 6
0
def write_sizes(sizes: dict, required_paths: list, human_readable: bool,
                output_format: str,
                output_path: typing.Union(str, typing.TextIO)):
  """Writes the sizes in CSV format.

  Args:
    sizes: A dictionary of path -> size.
    required_paths: list of paths to order results by
    human_readable: set to True when user wants output in human readable format
    output_format: output format (json or csv)
    output_path: path to write output to
  """

  def size_string(sz):
    if human_readable:
      return pformat.size(sz)
    return sz

  output = []

  # If required_paths passed in, emit output in same order as passed in.
  if required_paths:
    for path in required_paths:
      if path not in sizes:
        size = -1
      else:
        size = size_string(sizes[path])
      output.append({'path': path, 'size': size})
  else:
    for path, size in sorted(sizes.items()):
      output.append({'path': path, 'size': size_string(sizes[path])})

  with cros_build_lib.Open(output_path, mode='w') as f:
    if output_format == 'csv':
      writer = csv.DictWriter(f, ['path', 'size'])
      writer.writeheader()
      for row in output:
        writer.writerow(row)
    elif output_format == 'json':
      pformat.json(output, f)
    def _WaitForSigningResults(self,
                               instruction_urls_per_channel,
                               channel_notifier=None):
        """Do the work of waiting for signer results and logging them.

    Args:
      instruction_urls_per_channel: push_image data (see _WaitForPushImage).
      channel_notifier: Method to call with channel name when ready or None.

    Raises:
      ValueError: If the signer result isn't valid json.
      RunCommandError: If we are unable to download signer results.
    """
        gs_ctx = gs.GSContext(dry_run=self._run.options.debug)

        try:
            logging.info('Waiting for signer results.')
            timeout_util.WaitForReturnTrue(
                self._CheckForResults,
                func_args=(gs_ctx, instruction_urls_per_channel,
                           channel_notifier),
                timeout=self.SIGNING_TIMEOUT,
                period=self.SIGNING_PERIOD)
        except timeout_util.TimeoutError:
            msg = 'Image signing timed out.'
            logging.error(msg)
            logging.PrintBuildbotStepText(msg)
            raise SignerResultsTimeout(msg)

        # Log all signer results, then handle any signing failures.
        failures = []
        for url_results in self.signing_results.values():
            for url, signer_result in url_results.items():
                result_description = os.path.basename(url)
                logging.PrintBuildbotStepText(result_description)
                logging.info('Received results for: %s', result_description)
                logging.info(pformat.json(signer_result))

                status = self._SigningStatusFromJson(signer_result)
                if status != constants.SIGNER_STATUS_PASSED:
                    failures.append(result_description)
                    logging.error('Signing failed for: %s', result_description)
                    details = signer_result.get('status', {}).get('details')
                    if details:
                        logging.info('Details:\n%s', details)

        if failures:
            logging.error('Failure summary:')
            for failure in failures:
                logging.error('  %s', failure)
            raise SignerFailure(', '.join([str(f) for f in failures]))
def main(argv):
  parser = GetParser()
  options = parser.parse_args(argv)

  site_config = chromeos_config.GetConfig()

  filehandle = sys.stdout

  if options.update_config:
    filehandle = open(constants.CHROMEOS_CONFIG_FILE, 'w')

  if options.builder:
    if options.builder not in site_config:
      raise Exception('%s: Not a valid build config.' % options.builder)
    pformat.json(site_config[options.builder], fp=filehandle)
  elif options.full:
    filehandle.write(site_config.DumpExpandedConfigToString())
  elif options.csv:
    filehandle.write(site_config.DumpConfigCsv())
  else:
    filehandle.write(site_config.SaveConfigToString())

  if options.update_config:
    filehandle.close()
Exemplo n.º 9
0
def SwarmingRetriableErrorCheck(exception):
    """Check if a swarming error is retriable.

  Args:
    exception: A cros_build_lib.RunCommandError exception.

  Returns:
    True if retriable, otherwise False.
  """
    if not isinstance(exception, cros_build_lib.RunCommandError):
        logging.warning('Exception is not retriable: %s', str(exception))
        return False
    result = exception.result
    if not isinstance(result, SwarmingCommandResult):
        logging.warning(
            'Exception is not retriable as the result '
            'is not a SwarmingCommandResult: %s', str(result))
        return False
    if result.task_summary_json:
        try:
            internal_failure = result.GetValue('internal_failure')
            state = result.GetValue('state')
            if internal_failure and state in RETRIABLE_INTERNAL_FAILURE_STATES:
                logging.warning(
                    'Encountered retriable swarming internal failure: %s',
                    pformat.json(result.task_summary_json))
                return True
        except (IndexError, KeyError) as e:
            logging.warning(
                'Could not determine if exception is retriable. Exception: %s. '
                'Error: %s. Swarming summary json: %s', str(exception), str(e),
                pformat.json(result.task_summary_json))
            return False

    logging.warning('Exception is not retriable %s', str(exception))
    return False
Exemplo n.º 10
0
  def to_json(self):
    """Serialize this object to JSON.

    Attributes that have an empty/zero value are omitted from the output.  The
    output of this function can be passed to from_json() to get back another
    BuildSummary with the same values.

    Returns:
      A string containing a JSON-encoded representation of this object.
    """
    state = {}
    for a in self._PERSIST_ATTRIBUTES:
      val = getattr(self, a)
      if val:
        state[a] = getattr(self, a)
    return pformat.json(state, compact=True)
    def PerformStage(self):
        """Generate and upload build configs.

    The build config includes config.yaml (for unibuild) and USE flags.
    """
        board = self._current_board
        config_useflags = self._run.config.useflags

        logging.info('Generating build configs.')
        results = commands.GenerateBuildConfigs(board, config_useflags)

        results_str = pformat.json(results)
        logging.info('Results:\n%s', results_str)

        logging.info('Writing build configs to files for archive.')
        results_filename = os.path.join(
            self.archive_path, 'chromeos-build-configs-%s.json' % board)

        osutils.WriteFile(results_filename, results_str)

        logging.info('Uploading build config files.')
        self.UploadArtifact(os.path.basename(results_filename), archive=False)
    def CreateDummyMetadataJson(self):
        """Create/publish the firmware build artifact for the current board."""
        workspace_version_info = self.GetWorkspaceVersionInfo()

        # Use the metadata for the main build, with selected fields modified.
        board_metadata = self._run.attrs.metadata.GetDict()
        board_metadata['boards'] = [self._current_board]
        board_metadata['branch'] = self._run.config.workspace_branch
        board_metadata['version_full'] = self.dummy_version
        board_metadata['version_milestone'] = \
            workspace_version_info.chrome_branch
        board_metadata['version_platform'] = \
            workspace_version_info.VersionString()
        board_metadata['version'] = {
            'platform': workspace_version_info.VersionString(),
            'full': self.dummy_version,
            'milestone': workspace_version_info.chrome_branch,
        }

        current_time = datetime.datetime.now()
        current_time_stamp = cros_build_lib.UserDateTimeFormat(
            timeval=current_time)

        # We report the build as passing, since we can't get here if isn't.
        board_metadata['status'] = {
            'status': 'pass',
            'summary': '',
            'current-time': current_time_stamp,
        }

        with osutils.TempDir(prefix='metadata') as tempdir:
            metadata_path = os.path.join(tempdir, constants.METADATA_JSON)
            logging.info('Writing metadata to %s.', metadata_path)
            osutils.WriteFile(metadata_path,
                              pformat.json(board_metadata),
                              atomic=True)

            self.UploadDummyArtifact(metadata_path)
Exemplo n.º 13
0
    def _GetRequestBody(self):
        """Generate the request body for a swarming buildbucket request.

    Returns:
      buildbucket request properties as a python dict.
    """
        tags = {
            # buildset identifies a group of related builders.
            'buildset': SlaveBuildSet(self.master_buildbucket_id),
            'cbb_display_label': self.display_label,
            'cbb_branch': self.branch,
            'cbb_config': self.build_config,
            'cbb_email': self.user_email,
            'cbb_master_build_id': self.master_cidb_id,
            'cbb_master_buildbucket_id': self.master_buildbucket_id,
            'cbb_workspace_branch': self.workspace_branch,
            'cbb_goma_client_type': self.goma_client_type,
        }

        if self.master_cidb_id or self.master_buildbucket_id:
            # Used by Legoland as part of grouping slave builds. Set to False for
            # slave builds, not set otherwise.
            tags['master'] = 'False'

        # Include the extra_properties we might have passed into the tags.
        tags.update(self.extra_properties)

        # Don't include tags with no value, there is no point.
        # Convert tag values to strings.
        #
        # Note that cbb_master_build_id must be a string (not a number) in
        # properties because JSON does not distnguish integers and floats, so
        # nothing guarantees that 0 won't turn into 0.0.
        # Recipe expects it to be a string anyway.
        tags = {k: str(v) for k, v in tags.items() if v}

        # All tags should also be listed as properties.
        properties = tags.copy()
        properties['cbb_extra_args'] = self.extra_args

        parameters = {
            'builder_name': self.luci_builder,
            'properties': properties,
        }

        if self.user_email:
            parameters['email_notify'] = [{
                'email': self.user_email,
                'template': self.email_template,
            }]

        # If a specific bot was requested, pass along the request with a
        # 240 second (4 minute) timeout. If the bot isn't available, we
        # will fall back to the general builder restrictions (probably
        # based on role).
        if self.requested_bot:
            parameters['swarming'] = {
                'override_builder_cfg': {
                    'dimensions': [
                        '240:id:%s' % self.requested_bot,
                    ]
                }
            }

        return {
            'bucket': self.bucket,
            'parameters_json': pformat.json(parameters, compact=True),
            # These tags are indexed and searchable in buildbucket.
            'tags': ['%s:%s' % (k, tags[k]) for k in sorted(tags.keys())],
        }
Exemplo n.º 14
0
def CreateCacheTarball(extensions, outputdir, identifier, tarball):
    """Cache |extensions| in |outputdir| and pack them in |tarball|."""

    crxdir = os.path.join(outputdir, 'crx')
    jsondir = os.path.join(outputdir, 'json', 'extensions')
    validationdir = os.path.join(outputdir, 'validation')

    osutils.SafeMakedirs(os.path.join(crxdir, 'extensions'))
    osutils.SafeMakedirs(jsondir)
    was_errors = False
    for ext in extensions:
        extension = extensions[ext]
        # It should not be in use at this moment.
        if 'managed_users' in extension:
            cros_build_lib.Die(
                'managed_users is deprecated and not supported. '
                'Please use user_type.')
        # In case we work with old type json, use default 'user_type'.
        # TODO: Update all external_extensions.json files and deprecate this.
        if 'user_type' not in extension:
            user_type = ['unmanaged']
            if extension.get('child_users', 'no') == 'yes':
                user_type.append('child')
            logging.warning(
                'user_type filter has to be set explicitly for %s, using '
                '%s by default.', ext, user_type)
            extension['user_type'] = user_type
        else:
            if 'child_users' in extension:
                cros_build_lib.Die(
                    'child_users is not supported when user_type is '
                    'set.')

        # Verify user type is well-formed.
        allowed_user_types = {
            'unmanaged', 'managed', 'child', 'supervised', 'guest'
        }
        if not extension['user_type']:
            cros_build_lib.Die('user_type is not set')
        ext_keys = set(extension['user_type'])
        unknown_keys = ext_keys - allowed_user_types
        if unknown_keys:
            cros_build_lib.Die('user_type %s is not allowed', unknown_keys)

        cache_crx = extension.get('cache_crx', 'yes')

        # Remove fields that shouldn't be in the output file.
        for key in ('cache_crx', 'child_users'):
            extension.pop(key, None)

        if cache_crx == 'yes':
            if not DownloadCrx(ext, extension, crxdir):
                was_errors = True
        elif cache_crx == 'no':
            pass
        else:
            cros_build_lib.Die('Unknown value for "cache_crx" %s for %s',
                               cache_crx, ext)

        json_file = os.path.join(jsondir, '%s.json' % ext)
        pformat.json(extension, fp=json_file)

    if was_errors:
        cros_build_lib.Die('FAIL to download some extensions')

    CreateValidationFiles(validationdir, crxdir, identifier)
    cros_build_lib.CreateTarball(tarball, outputdir)
    logging.info('Tarball created %s', tarball)