コード例 #1
0
ファイル: pushimage.py プロジェクト: qlb7707/webrtc_src
  def OutputInsns(self, image_type, output_file, sect_insns, sect_general):
    """Generate the output instruction file for sending to the signer.

    Note: The format of the instruction file pushimage outputs (and the signer
    reads) is not exactly the same as the instruction file pushimage reads.

    Args:
      image_type: The type of image we will be signing (see --sign-types).
      output_file: The file to write the new instruction file to.
      sect_insns: Items to set/override in the [insns] section.
      sect_general: Items to set/override in the [general] section.
    """
    config = ConfigParser.ConfigParser()
    config.readfp(open(self.GetInsnFile(image_type)))

    # Clear channel entry in instructions file, ensuring we only get
    # one channel for the signer to look at.  Then provide all the
    # other details for this signing request to avoid any ambiguity
    # and to avoid relying on encoding data into filenames.
    for sect, fields in zip(('insns', 'general'), (sect_insns, sect_general)):
      if not config.has_section(sect):
        config.add_section(sect)
      for k, v in fields.iteritems():
        config.set(sect, k, v)

    output = cStringIO.StringIO()
    config.write(output)
    data = output.getvalue()
    osutils.WriteFile(output_file, data)
    logging.debug('generated insns file for %s:\n%s', image_type, data)
コード例 #2
0
ファイル: gconv_strip.py プロジェクト: sjg20/chromite
    def Load(self):
        """Load the charsets from gconv-modules."""
        for line in open(self._filename):
            line = line.split('#', 1)[0].strip()
            if not line:  # Comment
                continue

            lst = line.split()
            if lst[0] == 'module':
                _, fromset, toset, filename = lst[:4]
                for charset in (fromset, toset):
                    charset = charset.rstrip('/')
                    mods = self._modules.get(charset, set())
                    mods.add(filename)
                    self._modules[charset] = mods
            elif lst[0] == 'alias':
                _, fromset, toset = lst
                fromset = fromset.rstrip('/')
                toset = toset.rstrip('/')
                # Warn if the same charset is defined as two different aliases.
                if self._alias.get(fromset, toset) != toset:
                    logging.error('charset "%s" already defined as "%s".',
                                  fromset, self._alias[fromset])
                self._alias[fromset] = toset
            else:
                cros_build_lib.Die('Unknown line: %s', line)

        logging.debug('Found %d modules and %d alias in %s',
                      len(self._modules), len(self._alias), self._filename)
        charsets = sorted(self._alias.keys() + self._modules.keys())
        # Remove the 'INTERNAL' charset from the list, since it is not a charset
        # but an internal representation used to convert to and from other charsets.
        if 'INTERNAL' in charsets:
            charsets.remove('INTERNAL')
        return charsets
コード例 #3
0
    def GetPackageInfo(self, packages, use_all=False, use_workon_only=False):
        """Get information about packages.

    Args:
      packages: list of package name fragments.  These will be mapped to
          canonical portage atoms via the same process as
          StartWorkingOnPackages().
      use_all: True iff instead of the provided package list, we should just
          stop working on all currently worked on atoms for the system in
          question.
      use_workon_only: True iff instead of the provided package list, we should
          stop working on all currently worked on atoms that define only a
          -9999 ebuild.

    Returns:
      Returns a list of PackageInfo tuples.
    """
        if use_all or use_workon_only:
            # You can't use info to find the source code from Chrome, since that
            # workflow is different.
            ebuilds = self._GetWorkonEbuilds(filter_workon=use_workon_only,
                                             include_chrome=False)
        else:
            atoms = self._GetCanonicalAtoms(packages)
            ebuilds = [self._FindEbuildForPackage(atom) for atom in atoms]

        ebuild_to_repos = {}
        for ebuild in ebuilds:
            workon_vars = portage_util.EBuild.GetCrosWorkonVars(
                ebuild, portage_util.EbuildToCP(ebuild))
            projects = workon_vars.project if workon_vars else []
            ebuild_to_repos[ebuild] = projects

        repository_to_source_path = {}
        repo_list_result = cros_build_lib.RunCommand('repo list',
                                                     shell=True,
                                                     enter_chroot=True,
                                                     capture_output=True,
                                                     print_cmd=False)

        for line in repo_list_result.output.splitlines():
            pieces = line.split(' : ')
            if len(pieces) != 2:
                logging.debug('Ignoring malformed repo list output line: "%s"',
                              line)
                continue

            source_path, repository = pieces
            repository_to_source_path[repository] = source_path

        result = []
        for ebuild in ebuilds:
            package = portage_util.EbuildToCP(ebuild)
            repos = ebuild_to_repos.get(ebuild, [])
            src_paths = [repository_to_source_path.get(repo) for repo in repos]
            src_paths = [path for path in src_paths if path]
            result.append(PackageInfo(package, repos, src_paths))

        result.sort()
        return result
コード例 #4
0
def main(argv):
    """Standard main() for command line processing.

    @param argv Command line arguments (normally sys.argv).
    """

    parser = GetParser()
    options = parser.parse_args(argv[1:])

    with ts_mon_config.SetupTsMonGlobalState('dump_suite_report'):

        afe = frontend_wrappers.RetryingAFE(timeout_min=5,
                                            delay_sec=10,
                                            server=options.afe)
        tko = frontend_wrappers.RetryingTKO(timeout_min=5, delay_sec=10)

        # Look up and generate entries for all jobs.
        entries = []
        for suite_job_id in options.job_ids:
            logging.debug('Suite job %s:' % suite_job_id)
            suite_entries = suite_report.generate_suite_report(suite_job_id,
                                                               afe=afe,
                                                               tko=tko)
            logging.debug('... generated %d entries' % len(suite_entries))
            entries.extend(suite_entries)

        # Write all entries as JSON.
        if options.output:
            with open(options.output, 'w') as f:
                suite_report.dump_entries_as_json(entries, f)
        else:
            suite_report.dump_entries_as_json(entries, sys.stdout)
コード例 #5
0
  def __init__(self, options):
    super(BuildCommand, self).__init__(options)
    self.chroot_update = options.chroot_update and options.deps
    if options.chroot_update and not options.deps:
      logging.debug('Skipping chroot update due to --nodeps')
    self.build_pkgs = options.packages
    self.host = False
    self.board = None
    self.brick = None
    self.blueprint = None

    if self.options.host:
      self.host = True
    elif self.options.board:
      self.board = self.options.board
    elif self.options.blueprint:
      self.blueprint = blueprint_lib.Blueprint(self.options.blueprint)

      if not self.build_pkgs:
        self.build_pkgs = self.blueprint.GetPackages()
    elif self.options.brick or self.curr_brick_locator:
      self.brick = brick_lib.Brick(self.options.brick
                                   or self.curr_brick_locator)
      self.board = self.brick.FriendlyName()
      if not self.build_pkgs:
        self.build_pkgs = self.brick.MainPackages()
    else:
      # If nothing is explicitly set, use the default board.
      self.board = cros_build_lib.GetDefaultBoard()

    # Set sysroot and friendly name. The latter is None if building for host.
    self.sysroot = cros_build_lib.GetSysroot(self.blueprint.FriendlyName()
                                             if self.blueprint else self.board)
コード例 #6
0
def GetXbuddyPath(path):
    """A helper function to parse an xbuddy path.

  Args:
    path: Either an xbuddy path, gs path, or a path with no scheme.

  Returns:
    path/for/xbuddy if |path| is xbuddy://path/for/xbuddy;
    path/for/gs if |path| is gs://chromeos-image-archive/path/for/gs/;
    otherwise, |path|.

  Raises:
    ValueError if |path| is an unrecognized scheme, or is a gs path with
    an unrecognized bucket.
  """
    parsed = urllib.parse.urlparse(path)

    if parsed.scheme == 'xbuddy':
        return '%s%s' % (parsed.netloc, parsed.path)
    elif parsed.scheme == '':
        logging.debug('Assuming %s is an xbuddy path.', path)
        return path
    elif parsed.scheme == 'gs':
        if parsed.netloc != devserver_constants.GS_IMAGE_BUCKET:
            raise ValueError(
                'Do not support bucket %s. Only bucket %s is supported.' %
                (parsed.netloc, devserver_constants.GS_IMAGE_BUCKET))
        return '%s%s' % (xbuddy.REMOTE, parsed.path)
    else:
        raise ValueError('Do not support scheme %s.' % (parsed.scheme, ))
コード例 #7
0
    def PublishManifest(self, manifest, version, build_id=None):
        """Publishes the manifest as the manifest for the version to others.

    Args:
      manifest: Path to manifest file to publish.
      version: Manifest version string, e.g. 6102.0.0-rc4
      build_id: Optional integer giving build_id of the build that is
                publishing this manifest. If specified and non-negative,
                build_id will be included in the commit message.
    """
        # Note: This commit message is used by master.cfg for figuring out when to
        #       trigger slave builders.
        commit_message = 'Automatic: Start %s %s %s' % (self.build_names[0],
                                                        self.branch, version)
        if build_id is not None and build_id >= 0:
            commit_message += '\nCrOS-Build-Id: %s' % build_id

        logging.info('Publishing build spec for: %s', version)
        logging.info('Publishing with commit message: %s', commit_message)
        logging.debug('Manifest contents below.\n%s',
                      osutils.ReadFile(manifest))

        # Copy the manifest into the manifest repository.
        spec_file = '%s.xml' % os.path.join(self.all_specs_dir, version)
        osutils.SafeMakedirs(os.path.dirname(spec_file))

        shutil.copyfile(manifest, spec_file)

        # Actually push the manifest.
        self.PushSpecChanges(commit_message)
コード例 #8
0
def _PostParseCheck(options):
  """Perform some usage validation (after we've parsed the arguments).

  Args:
    options: The options object returned by optparse.
    _args: The args object returned by optparse.
  """
  if options.local_pkg_path and not os.path.isfile(options.local_pkg_path):
    cros_build_lib.Die('%s is not a file.', options.local_pkg_path)

  if not options.gyp_defines:
    gyp_env = os.getenv('GYP_DEFINES')
    if gyp_env is not None:
      options.gyp_defines = chrome_util.ProcessGypDefines(gyp_env)
      logging.debug('GYP_DEFINES taken from environment: %s',
                    options.gyp_defines)

  if options.strict and not options.gyp_defines:
    cros_build_lib.Die('When --strict is set, the GYP_DEFINES environment '
                       'variable must be set.')

  if not options.staging_flags:
    use_env = os.getenv('USE')
    if use_env is not None:
      options.staging_flags = ' '.join(set(use_env.split()).intersection(
          chrome_util.STAGING_FLAGS))
      logging.info('Staging flags taken from USE in environment: %s',
                   options.staging_flags)
コード例 #9
0
def CheckAndGetCIDBCreds(force_update=False, folder=None):
  """Check if CIDB creds exist, download creds if necessary."""
  cache_dir = path_util.GetCacheDir()
  dir_name = folder if folder is not None else 'cidb_creds'
  cidb_dir = os.path.join(cache_dir, dir_name)
  cidb_dir_lock = cidb_dir + '.lock'

  with locking.FileLock(cidb_dir_lock).write_lock():
    if os.path.exists(cidb_dir):
      if force_update:
        shutil.rmtree(cidb_dir, ignore_errors=True)
        logging.debug('Force updating CIDB creds. Deleted %s.', cidb_dir)
      else:
        logging.debug('Using cached credentials %s', cidb_dir)
        return cidb_dir

    os.mkdir(cidb_dir)

    try:
      GetCIDBCreds(cidb_dir)
      return cidb_dir
    except Exception as e:
      if isinstance(e, gs.GSCommandError):
        logging.warning('Please check if the GS credentials is configured '
                        'correctly. Please note the permissions to fetch '
                        'these credentials are for Googlers only,')

      logging.error('Failed to get CIDB credentials. Deleting %s', cidb_dir)
      shutil.rmtree(cidb_dir, ignore_errors=True)
      raise
コード例 #10
0
ファイル: remote_access.py プロジェクト: sjg20/chromite
    def lsb_release(self):
        """The /etc/lsb-release content on the device.

    Returns a dict of entries in /etc/lsb-release file. If multiple entries
    have the same key, only the first entry is recorded. Returns an empty dict
    if the reading command failed or the file is corrupted (i.e., does not have
    the format of <key>=<value> for every line).
    """
        if not self._lsb_release:
            try:
                content = self.CatFile(constants.LSB_RELEASE_PATH,
                                       max_size=None)
            except CatFileError as e:
                logging.debug('Failed to read "%s" on the device: %s',
                              constants.LSB_RELEASE_PATH, e)
            else:
                try:
                    self._lsb_release = dict(
                        e.split('=', 1)
                        for e in reversed(content.splitlines()))
                except ValueError:
                    logging.error('File "%s" on the device is mal-formatted.',
                                  constants.LSB_RELEASE_PATH)

        return self._lsb_release
コード例 #11
0
def get_directory_commits(directory, start_date=None, end_date=None):
    """Get all commits in the given directory.

  Args:
    directory (str): The directory in question. Must be in a git project.
    start_date (datetime.datetime): The earliest datetime to consider, if any.
    end_date (datetime.datetime): The latest datetime to consider, if any.

  Returns:
    list[Commits]: The commits relating to that directory.
  """
    # TODO(evanhernandez): I am not sure how --after/--until consider timezones.
    # For a script like this, the differences are probably negligible, but I
    # would be happier if it guaranteed correctness.
    start_date = start_date and start_date.strftime(DATE_FORMAT)
    end_date = end_date and end_date.strftime(DATE_FORMAT)

    output = git.Log(directory,
                     format='format:"%h|%cd|%s"',
                     after=start_date,
                     until=end_date,
                     reverse=True,
                     date='unix',
                     paths=[directory])
    if not output:
        return []
    logging.debug(output)

    commit_lines = [l.strip() for l in output.splitlines() if l.strip()]
    return [Commit(*cl.split('|', 2)) for cl in commit_lines]
コード例 #12
0
    def publish_notifications(self, topic, messages=None):
        """Publishes a test result notification to a given pubsub topic.

        @param topic: The Cloud pubsub topic.
        @param messages: A list of notification messages.

        @returns A list of pubsub message ids, and empty if fails.

        @raises PubSubException if failed to publish the notification.
        """
        if not messages:
            return None

        pubsub = self._get_pubsub_service()
        try:
            body = {'messages': messages}
            resp = pubsub.projects().topics().publish(
                topic=topic, body=body).execute(
                    num_retries=DEFAULT_PUBSUB_NUM_RETRIES)
            msgIds = []
            if resp:
                msgIds = resp.get('messageIds')
                if msgIds:
                    logging.debug('Published notification message')
                else:
                    logging.error('Failed to published notification message')
            return msgIds
        except errors.Error as e:
            logging.exception('Failed to publish test result notification:%s',
                    e)
            raise PubSubException('Failed to publish the notification')
コード例 #13
0
def poll_rpc_servers(servers, servers_lock, shards=None, period=60,
                     stop_event=None):
    """Blocking function that polls all servers and shards

    @param servers: list of servers to poll
    @param servers_lock: lock to be used when accessing servers or shards
    @param shards: list of shards to poll
    @param period: time between polls
    @param stop_event: Event that can be set to stop polling
    """
    pool = multiprocessing.Pool(processes=multiprocessing.cpu_count() * 4)

    while(not stop_event or not stop_event.is_set()):
        start_time = time.time()
        with servers_lock:
            all_servers = set(servers).union(shards)

        logging.debug('Starting Server Polling: %s', ', '.join(all_servers))
        pool.map(afe_rpc_call, all_servers)

        logging.debug('Finished Server Polling')

        metrics.Counter(METRIC_TICK).increment()

        wait_time = (start_time + period) - time.time()
        if wait_time > 0:
            time.sleep(wait_time)
コード例 #14
0
 def HandleSkip(self):
   """Set run.attrs.chrome_version to chrome version in buildroot now."""
   self._run.attrs.chrome_version = self._run.DetermineChromeVersion()
   logging.debug('Existing chrome version is %s.',
                 self._run.attrs.chrome_version)
   self._WriteChromeVersionToMetadata()
   super(SyncChromeStage, self).HandleSkip()
コード例 #15
0
ファイル: run_tests.py プロジェクト: msisov/chromium68
def BuildTestSets(tests, chroot_available, network, jobs=1):
    """Build the tests to execute.

  Take care of special test handling like whether it needs to be inside or
  outside of the sdk, whether the test should be skipped, etc...

  Args:
    tests: List of tests to execute.
    chroot_available: Whether we can execute tests inside the sdk.
    network: Whether to execute network tests.
    jobs: How many jobs will we run in parallel.

  Returns:
    List of tests to execute and their full command line.
  """
    testsets = []
    for test in SortTests(tests, jobs=jobs):
        cmd = [test]

        # See if this test requires special consideration.
        status = SPECIAL_TESTS.get(test)
        if status is SKIP:
            logging.info('Skipping %s', test)
            continue
        elif status is INSIDE:
            if not cros_build_lib.IsInsideChroot():
                if not chroot_available:
                    logging.info('Skipping %s: chroot not available', test)
                    continue
                cmd = [
                    'cros_sdk', '--',
                    os.path.join('..', '..', 'chromite', test)
                ]
        elif status is OUTSIDE:
            if cros_build_lib.IsInsideChroot():
                logging.info('Skipping %s: must be outside the chroot', test)
                continue
        else:
            mode = os.stat(test).st_mode
            if stat.S_ISREG(mode):
                if not mode & 0o111:
                    logging.debug('Skipping %s: not executable', test)
                    continue
            else:
                logging.debug('Skipping %s: not a regular file', test)
                continue

        # Build up the final test command.
        cmd.append('--verbose')
        if network:
            cmd.append('--network')
        cmd = [
            'timeout', '--preserve-status', '-k',
            '%sm' % TEST_SIG_TIMEOUT,
            '%sm' % TEST_TIMEOUT
        ] + cmd

        testsets.append((test, cmd, tempfile.TemporaryFile()))

    return testsets
コード例 #16
0
ファイル: gs.py プロジェクト: qlb7707/webrtc_src
  def DoCommand(self, gsutil_cmd, headers=(), retries=None, version=None,
                parallel=False, **kwargs):
    """Run a gsutil command, suppressing output, and setting retry/sleep.

    Args:
      gsutil_cmd: The (mostly) constructed gsutil subcommand to run.
      headers: A list of raw headers to pass down.
      parallel: Whether gsutil should enable parallel copy/update of multiple
        files. NOTE: This option causes gsutil to use significantly more
        memory, even if gsutil is only uploading one file.
      retries: How many times to retry this command (defaults to setting given
        at object creation).
      version: If given, the generation; essentially the timestamp of the last
        update.  Note this is not the same as sequence-number; it's
        monotonically increasing bucket wide rather than reset per file.
        The usage of this is if we intend to replace/update only if the version
        is what we expect.  This is useful for distributed reasons- for example,
        to ensure you don't overwrite someone else's creation, a version of
        0 states "only update if no version exists".

    Returns:
      A RunCommandResult object.
    """
    kwargs = kwargs.copy()
    kwargs.setdefault('redirect_stderr', True)

    cmd = [self.gsutil_bin]
    cmd += self.gsutil_flags
    for header in headers:
      cmd += ['-h', header]
    if version is not None:
      cmd += ['-h', 'x-goog-if-generation-match:%d' % int(version)]

    # Enable parallel copy/update of multiple files if stdin is not to
    # be piped to the command. This does not split a single file into
    # smaller components for upload.
    if parallel and kwargs.get('input') is None:
      cmd += ['-m']

    cmd.extend(gsutil_cmd)

    if retries is None:
      retries = self.retries

    extra_env = kwargs.pop('extra_env', {})
    if self.boto_file:
      extra_env.setdefault('BOTO_CONFIG', self.boto_file)

    if self.dry_run:
      logging.debug("%s: would've run: %s", self.__class__.__name__,
                    cros_build_lib.CmdToStr(cmd))
    else:
      try:
        return retry_stats.RetryWithStats(retry_stats.GSUTIL,
                                          self._RetryFilter,
                                          retries, cros_build_lib.RunCommand,
                                          cmd, sleep=self._sleep_time,
                                          extra_env=extra_env, **kwargs)
      except cros_build_lib.RunCommandError as e:
        raise GSCommandError(e.msg, e.result, e.exception)
コード例 #17
0
 def ConfigureCacheDir(cache_dir):
     if cache_dir is None:
         os.environ.pop(constants.SHARED_CACHE_ENVVAR, None)
         logging.debug('Removed cache_dir setting')
     else:
         os.environ[constants.SHARED_CACHE_ENVVAR] = cache_dir
         logging.debug('Configured cache_dir to %r', cache_dir)
コード例 #18
0
def _BreakoutDataByLinter(map_to_return, path):
    """Maps a linter method to the content of the |path|."""
    # Detect by content of the file itself.
    try:
        with open(path, 'rb') as fp:
            # We read 128 bytes because that's the Linux kernel's current limit.
            # Look for BINPRM_BUF_SIZE in fs/binfmt_script.c.
            data = fp.read(128)

            if not data.startswith(b'#!'):
                # If the file doesn't have a shebang, nothing to do.
                return

            m = SHEBANG_RE.match(data)
            if m:
                prog = m.group(1)
                if prog == b'/usr/bin/env':
                    prog = m.group(3)
                basename = os.path.basename(prog)
                if basename.startswith(b'python3'):
                    pylint_list = map_to_return.setdefault(_Pylint3File, [])
                    pylint_list.append(path)
                elif basename.startswith(b'python2'):
                    pylint_list = map_to_return.setdefault(_Pylint2File, [])
                    pylint_list.append(path)
                elif basename.startswith(b'python'):
                    pylint_list = map_to_return.setdefault(_Pylint2File, [])
                    pylint_list.append(path)
                    pylint_list = map_to_return.setdefault(_Pylint3File, [])
                    pylint_list.append(path)
                elif basename in (b'sh', b'dash', b'bash'):
                    shlint_list = map_to_return.setdefault(_ShellLintFile, [])
                    shlint_list.append(path)
    except IOError as e:
        logging.debug('%s: reading initial data failed: %s', path, e)
コード例 #19
0
ファイル: deploy.py プロジェクト: zhangjiayun/chromium.bb
def _DeployDLCImage(device, pkg_path):
    """Deploy (install and mount) a DLC image."""
    dlc_id, dlc_package = _GetDLCInfo(device, pkg_path, from_dut=False)
    if dlc_id and dlc_package:
        logging.notice('Deploy a DLC image for %s', dlc_id)

        dlc_path_src = os.path.join('/build/rootfs/dlc', dlc_id, dlc_package,
                                    'dlc.img')
        dlc_path = os.path.join(_DLC_INSTALL_ROOT, dlc_id, dlc_package)
        dlc_path_a = os.path.join(dlc_path, 'dlc_a')
        dlc_path_b = os.path.join(dlc_path, 'dlc_b')
        # Create folders for DLC images.
        device.RunCommand(['mkdir', '-p', dlc_path_a, dlc_path_b])
        # Copy images to the destination folders.
        device.RunCommand(
            ['cp', dlc_path_src,
             os.path.join(dlc_path_a, 'dlc.img')])
        device.RunCommand(
            ['cp', dlc_path_src,
             os.path.join(dlc_path_b, 'dlc.img')])

        # Set the proper perms and ownership so dlcservice can access the image.
        device.RunCommand(['chmod', '-R', '0755', _DLC_INSTALL_ROOT])
        device.RunCommand(
            ['chown', '-R', 'dlcservice:dlcservice', _DLC_INSTALL_ROOT])
        return True
    else:
        logging.debug('DLC_ID not found in package')
        return False
コード例 #20
0
def main(argv):
  options = ParseArgs(argv)

  if not cros_build_lib.IsInsideChroot():
    raise commandline.ChrootRequiredError()

  if os.geteuid() != 0:
    cros_build_lib.SudoRunCommand(sys.argv)
    return

  # sysroot must have a trailing / as the tree dictionary produced by
  # create_trees in indexed with a trailing /.
  sysroot = cros_build_lib.GetSysroot(options.board) + '/'
  trees = create_trees(target_root=sysroot, config_root=sysroot)

  vartree = trees[sysroot]['vartree']

  cache_dir = os.path.join(path_util.FindCacheDir(),
                           'cros_install_debug_syms-v' + CACHE_VERSION)

  if options.clearcache:
    osutils.RmDir(cache_dir, ignore_missing=True)

  binhost_cache = None
  if options.cachebinhost:
    binhost_cache = cache.DiskCache(cache_dir)

  boto_file = vartree.settings['BOTO_CONFIG']
  if boto_file:
    os.environ['BOTO_CONFIG'] = boto_file

  gs_context = gs.GSContext()
  symbols_mapping = RemoteSymbols(vartree, binhost_cache)

  if options.all:
    to_install = vartree.dbapi.cpv_all()
  else:
    to_install = [GetMatchingCPV(p, vartree.dbapi) for p in options.packages]

  to_install = [p for p in to_install
                if ShouldGetSymbols(p, vartree.dbapi, symbols_mapping)]

  if not to_install:
    logging.info('nothing to do, exit')
    return

  with DebugSymbolsInstaller(vartree, gs_context, sysroot,
                             not options.debug) as installer:
    args = [(p, symbols_mapping[p]) for p in to_install]
    parallel.RunTasksInProcessPool(installer.Install, args,
                                   processes=options.jobs)

  logging.debug('installation done, updating packages index file')
  packages_dir = os.path.join(sysroot, 'packages')
  packages_file = os.path.join(packages_dir, 'Packages')
  # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols
  # in the packages dir.
  pkgindex = binpkg.GrabLocalPackageIndex(packages_dir)
  with open(packages_file, 'w') as p:
    pkgindex.Write(p)
コード例 #21
0
def enumerate_package_ebuilds():
    """Determines package -> ebuild mappings for all packages.

  Yields a series of (package_path, package_name, [path_to_ebuilds]). This may
  yield the same package name multiple times if it's available in multiple
  overlays.
  """
    for overlay in portage_util.FindOverlays(overlay_type='both'):
        logging.debug('Found overlay %s', overlay)

        # Note that portage_util.GetOverlayEBuilds can't be used here, since that
        # specifically only searches for cros_workon candidates. We care about
        # everything we can possibly build.
        for dir_path, dir_names, file_names in os.walk(overlay):
            ebuilds = [x for x in file_names if x.endswith('.ebuild')]
            if not ebuilds:
                continue

            # os.walk directly uses `dir_names` to figure out what to walk next. If
            # there are ebuilds here, walking any lower is a waste, so don't do it.
            del dir_names[:]

            ebuild_dir = os.path.basename(dir_path)
            ebuild_parent_dir = os.path.basename(os.path.dirname(dir_path))
            package_name = '%s/%s' % (ebuild_parent_dir, ebuild_dir)
            yield dir_path, package_name, ebuilds
コード例 #22
0
ファイル: gconv_strip.py プロジェクト: qlb7707/webrtc_src
  def Load(self):
    """Load the charsets from gconv-modules."""
    for line in open(self._filename):
      line = line.split('#', 1)[0].strip()
      if not line: # Comment
        continue

      lst = line.split()
      if lst[0] == 'module':
        _, fromset, toset, filename = lst[:4]
        for charset in (fromset, toset):
          charset = charset.rstrip('/')
          mods = self._modules.get(charset, set())
          mods.add(filename)
          self._modules[charset] = mods
      elif lst[0] == 'alias':
        _, fromset, toset = lst
        fromset = fromset.rstrip('/')
        toset = toset.rstrip('/')
        # Warn if the same charset is defined as two different aliases.
        if self._alias.get(fromset, toset) != toset:
          logging.error('charset "%s" already defined as "%s".', fromset,
                        self._alias[fromset])
        self._alias[fromset] = toset
      else:
        cros_build_lib.Die('Unknown line: %s', line)

    logging.debug('Found %d modules and %d alias in %s', len(self._modules),
                  len(self._alias), self._filename)
    charsets = sorted(self._alias.keys() + self._modules.keys())
    # Remove the 'INTERNAL' charset from the list, since it is not a charset
    # but an internal representation used to convert to and from other charsets.
    if 'INTERNAL' in charsets:
      charsets.remove('INTERNAL')
    return charsets
コード例 #23
0
def GetUpdatePayloads(path, payload_dir, board=None,
                      src_image_to_delta=None, timeout=60 * 15,
                      static_dir=DEFAULT_STATIC_DIR):
  """Launch devserver to get the update payloads.

  Args:
    path: The xbuddy path.
    payload_dir: The directory to store the payloads. On failure, the devserver
                 log will be copied to |payload_dir|.
    board: The default board to use when |path| is None.
    src_image_to_delta: Image used as the base to generate the delta payloads.
    timeout: Timeout for launching devserver (seconds).
    static_dir: Devserver static dir to use.
  """
  ds = DevServerWrapper(static_dir=static_dir, src_image=src_image_to_delta,
                        board=board)
  req = GenerateXbuddyRequest(path, 'update')
  logging.info('Starting local devserver to generate/serve payloads...')
  try:
    ds.Start()
    url = ds.OpenURL(ds.GetURL(sub_dir=req), timeout=timeout)
    ds.DownloadFile(os.path.join(url, ROOTFS_FILENAME), payload_dir)
    ds.DownloadFile(os.path.join(url, STATEFUL_FILENAME), payload_dir)
  except DevServerException:
    logging.warning(ds.TailLog() or 'No devserver log is available.')
    raise
  else:
    logging.debug(ds.TailLog() or 'No devserver log is available.')
  finally:
    ds.Stop()
    if os.path.exists(ds.log_file):
      shutil.copyfile(ds.log_file,
                      os.path.join(payload_dir, 'local_devserver.log'))
    else:
      logging.warning('Could not find %s', ds.log_file)
コード例 #24
0
 def simulate_slave(slave_metadata):
   build_id = _SimulateBuildStart(db, slave_metadata,
                                  master_build_id)
   _SimulateCQBuildFinish(db, slave_metadata, build_id)
   logging.debug('Simulated slave build %s on pid %s', build_id,
                 os.getpid())
   return build_id
コード例 #25
0
  def _FindEbuildPath(self):
    """Discover the path to a package's associated ebuild.

    This method is not valid during the emerge hook process.

    Returns:
      full path file name of the ebuild file for this package.

    Raises:
      AssertionError if it can't be discovered for some reason.
    """
    equery_cmd = cros_build_lib.GetSysrootToolPath(
        cros_build_lib.GetSysroot(self.board), 'equery')
    args = [equery_cmd, '-q', '-C', 'which', self.fullnamerev]
    try:
      path = cros_build_lib.RunCommand(args, print_cmd=True,
                                       redirect_stdout=True).output.strip()
    except cros_build_lib.RunCommandError:
      path = None

    # Path can be false because of an exception, or a command result.
    if not path:
      raise AssertionError('_FindEbuildPath for %s failed.\n'
                           'Is your tree clean? Try a rebuild?' %
                           self.fullnamerev)

    logging.debug('%s -> %s', ' '.join(args), path)

    if not os.access(path, os.F_OK):
      raise AssertionError('Can\'t access %s', path)

    return path
コード例 #26
0
 def _SwitchToLocalManifest(self, local_manifest):
   """Reinitializes the repository if the manifest has changed."""
   logging.debug('Moving to manifest defined by %s', local_manifest)
   # TODO: use upstream repo's manifest logic when we bump repo version.
   manifest_path = self.GetRelativePath('.repo/manifest.xml')
   os.unlink(manifest_path)
   shutil.copyfile(local_manifest, manifest_path)
コード例 #27
0
 def CleanNoBindMount(path):
     # This test is a convenience for developers that bind mount these dirs.
     if not os.path.ismount(path):
         Clean(path)
     else:
         logging.debug('Ignoring bind mounted dir: %s',
                       self.options.path)
コード例 #28
0
  def PublishManifest(self, manifest, version, build_id=None):
    """Publishes the manifest as the manifest for the version to others.

    Args:
      manifest: Path to manifest file to publish.
      version: Manifest version string, e.g. 6102.0.0-rc4
      build_id: Optional integer giving build_id of the build that is
                publishing this manifest. If specified and non-negative,
                build_id will be included in the commit message.
    """
    # Note: This commit message is used by master.cfg for figuring out when to
    #       trigger slave builders.
    commit_message = 'Automatic: Start %s %s %s' % (self.build_names[0],
                                                    self.branch, version)
    if build_id is not None and build_id >= 0:
      commit_message += '\nCrOS-Build-Id: %s' % build_id

    logging.info('Publishing build spec for: %s', version)
    logging.info('Publishing with commit message: %s', commit_message)
    logging.debug('Manifest contents below.\n%s', osutils.ReadFile(manifest))

    # Copy the manifest into the manifest repository.
    spec_file = '%s.xml' % os.path.join(self.all_specs_dir, version)
    osutils.SafeMakedirs(os.path.dirname(spec_file))

    shutil.copyfile(manifest, spec_file)

    # Actually push the manifest.
    self.PushSpecChanges(commit_message)
コード例 #29
0
def CheckAndGetCIDBCreds(force_update=False, folder=None):
    """Check if CIDB creds exist, download creds if necessary."""
    cache_dir = path_util.GetCacheDir()
    dir_name = folder if folder is not None else 'cidb_creds'
    cidb_dir = os.path.join(cache_dir, dir_name)
    cidb_dir_lock = cidb_dir + '.lock'

    with locking.FileLock(cidb_dir_lock).write_lock():
        if os.path.exists(cidb_dir):
            if force_update:
                shutil.rmtree(cidb_dir, ignore_errors=True)
                logging.debug('Force updating CIDB creds. Deleted %s.',
                              cidb_dir)
            else:
                logging.debug('Using cached credentials %s', cidb_dir)
                return cidb_dir

        os.mkdir(cidb_dir)

        try:
            GetCIDBCreds(cidb_dir)
            return cidb_dir
        except Exception as e:
            if isinstance(e, gs.GSCommandError):
                logging.warning(
                    'Please check if the GS credentials is configured '
                    'correctly. Please note the permissions to fetch '
                    'these credentials are for Googlers only,')

            logging.error('Failed to get CIDB credentials. Deleting %s',
                          cidb_dir)
            shutil.rmtree(cidb_dir, ignore_errors=True)
            raise
コード例 #30
0
    def _ReadMetadataURL(url):
      # Read the metadata.json URL and parse json into a dict.
      metadata_dict = json.loads(gs_ctx.Cat(url, print_cmd=False))

      # Read the file next to url which indicates whether the metadata has
      # been gathered before, and with what stats version.
      if get_sheets_version:
        gathered_dict = {}
        gathered_url = url + '.gathered'
        if gs_ctx.Exists(gathered_url, print_cmd=False):
          gathered_dict = json.loads(gs_ctx.Cat(gathered_url,
                                                print_cmd=False))

        sheets_version = gathered_dict.get(BuildData.SHEETS_VER_KEY)
      else:
        sheets_version = None

      bd = BuildData(url, metadata_dict, sheets_version=sheets_version)

      if bd.build_number is None:
        logging.warning('Metadata at %s was missing build number.', url)
        m = re.match(r'.*-b([0-9]*)/.*', url)
        if m:
          inferred_number = int(m.groups()[0])
          logging.warning('Inferred build number %d from metadata url.',
                          inferred_number)
          bd.metadata_dict['build-number'] = inferred_number
      if sheets_version is not None:
        logging.debug('Read %s:\n  build_number=%d, sheets v%d', url,
                      bd.build_number, sheets_version)
      else:
        logging.debug('Read %s:\n  build_number=%d, ungathered', url,
                      bd.build_number)

      build_data_per_url[url] = bd
コード例 #31
0
def FilteredQuery(opts, query, helper=None):
    """Query gerrit and filter/clean up the results"""
    ret = []

    logging.debug('Running query: %s', query)
    for cl in _Query(opts, query, raw=True, helper=helper):
        # Gerrit likes to return a stats record too.
        if not 'project' in cl:
            continue

        # Strip off common leading names since the result is still
        # unique over the whole tree.
        if not opts.verbose:
            for pfx in ('aosp', 'chromeos', 'chromiumos', 'external',
                        'overlays', 'platform', 'third_party'):
                if cl['project'].startswith('%s/' % pfx):
                    cl['project'] = cl['project'][len(pfx) + 1:]

            cl['url'] = uri_lib.ShortenUri(cl['url'])

        ret.append(cl)

    if opts.sort == 'unsorted':
        return ret
    if opts.sort == 'number':
        key = lambda x: int(x[opts.sort])
    else:
        key = lambda x: x[opts.sort]
    return sorted(ret, key=key)
コード例 #32
0
    def _FixUpManifests(self, repo_manifest):
        """Points the checkouts at the new branch in the manifests.

    Within the branch, make sure all manifests with projects that are
    "branchable" are checked out to "refs/heads/<new_branch>".  Do this
    by updating all manifests in the known manifest projects.
    """
        assert not self._run.options.delete_branch, 'Cannot fix a deleted branch.'

        # Use local branch ref.
        branch_ref = git.NormalizeRef(self.branch_name)

        logging.debug('Fixing manifest projects for new branch.')
        for project in site_config.params.MANIFEST_PROJECTS:
            manifest_checkout = repo_manifest.FindCheckout(project)
            manifest_dir = manifest_checkout['local_path']
            push_remote = manifest_checkout['push_remote']

            # Checkout revision can be either a sha1 or a branch ref.
            src_ref = manifest_checkout['revision']
            if not git.IsSHA1(src_ref):
                src_ref = git.NormalizeRemoteRef(push_remote, src_ref)

            git.CreateBranch(manifest_dir, manifest_version.PUSH_BRANCH,
                             src_ref)

            # We want to process default.xml and official.xml + their imports.
            pending_manifests = [
                constants.DEFAULT_MANIFEST, constants.OFFICIAL_MANIFEST
            ]
            processed_manifests = []

            while pending_manifests:
                # Canonicalize the manifest name (resolve dir and symlinks).
                manifest_path = os.path.join(manifest_dir,
                                             pending_manifests.pop())
                manifest_path = os.path.realpath(manifest_path)

                # Don't process a manifest more than once.
                if manifest_path in processed_manifests:
                    continue

                processed_manifests.append(manifest_path)

                if not os.path.exists(manifest_path):
                    logging.info('Manifest not found: %s', manifest_path)
                    continue

                logging.debug('Fixing manifest at %s.', manifest_path)
                included_manifests = self._UpdateManifest(manifest_path)
                pending_manifests += included_manifests

            git.RunGit(manifest_dir, ['add', '-A'], print_cmd=True)
            message = 'Fix up manifest after branching %s.' % branch_ref
            git.RunGit(manifest_dir, ['commit', '-m', message], print_cmd=True)
            push_to = git.RemoteRef(push_remote, branch_ref)
            git.GitPush(manifest_dir,
                        manifest_version.PUSH_BRANCH,
                        push_to,
                        skip=self.skip_remote_push)
コード例 #33
0
    def run_cmd(self, cmd, expected=None):
        """Runs rpc command and log metrics

        @param cmd: string of rpc command to send
        @param expected: expected result of rpc
        """
        metric_fields = self._metric_fields.copy()
        metric_fields['command'] = cmd
        metric_fields['success'] = True
        metric_fields['failure_reason'] = ''

        with metrics.SecondsTimer(METRIC_RPC_CALL_DURATIONS,
                fields=dict(metric_fields)) as f:

            msg_str = "%s:%s" % (self._hostname, cmd)


            try:
                result = self._afe.run(cmd)
                logging.debug("%s result = %s", msg_str, result)
                if expected is not None and expected != result:
                    _failed(f, msg_str, 'IncorrectResponse')

            except urllib2.HTTPError as e:
                _failed(f, msg_str, 'HTTPError:%d' % e.code)

            except Exception as e:
                _failed(f, msg_str, FAILURE_REASONS.get(type(e), 'Unknown'),
                        err=e)

                if type(e) not in FAILURE_REASONS:
                    raise

            if f['success']:
                logging.info("%s success", msg_str)
コード例 #34
0
  def PerformStage(self):
    """Performs the stage."""
    if self._android_rev and self._run.config.master:
      self._android_version = self.GetLatestAndroidVersion()
      logging.info('Latest Android version is: %s', self._android_version)

    if (self._chrome_rev == constants.CHROME_REV_LATEST and
        self._run.config.master):
      # PFQ master needs to determine what version of Chrome to build
      # for all slaves.
      logging.info('I am a master running with CHROME_REV_LATEST, '
                   'therefore getting latest chrome version.')
      self._chrome_version = self.GetLatestChromeVersion()
      logging.info('Latest chrome version is: %s', self._chrome_version)

    ManifestVersionedSyncStage.PerformStage(self)

    # Generate blamelist
    cros_version = self.GetLastChromeOSVersion()
    if cros_version:
      old_filename = self.manifest_manager.GetBuildSpecFilePath(
          cros_version.milestone, cros_version.platform)
      if not os.path.exists(old_filename):
        logging.error(
            'Could not generate blamelist, '
            'manifest file does not exist: %s', old_filename)
      else:
        logging.debug('Generate blamelist against: %s', old_filename)
        lkgm_manager.GenerateBlameList(self.repo, old_filename)
コード例 #35
0
ファイル: commandline.py プロジェクト: qlb7707/webrtc_src
 def ConfigureCacheDir(cache_dir):
   if cache_dir is None:
     os.environ.pop(constants.SHARED_CACHE_ENVVAR, None)
     logging.debug('Removed cache_dir setting')
   else:
     os.environ[constants.SHARED_CACHE_ENVVAR] = cache_dir
     logging.debug('Configured cache_dir to %r', cache_dir)
コード例 #36
0
ファイル: gob_util.py プロジェクト: mariospr/chromium-browser
  def _FetchUrlHelper():
    err_prefix = 'A transient error occured while querying %s:\n' % (host,)
    try:
      conn = CreateHttpConn(host, path, reqtype=reqtype, headers=headers,
                            body=body)
      response = conn.getresponse()
    except socket.error as ex:
      logging.warning('%s%s', err_prefix, str(ex))
      raise

    # Normal/good responses.
    response_body = response.read()
    if response.status == 204 and ignore_204:
      # This exception is used to confirm expected response status.
      raise GOBError(response.status, response.reason)
    if response.status == 404 and ignore_404:
      return StringIO()
    elif response.status == 200:
      return StringIO(response_body)

    # Bad responses.
    logging.debug('response msg:\n%s', response.msg)
    http_version = 'HTTP/%s' % ('1.1' if response.version == 11 else '1.0')
    msg = ('%s %s %s\n%s %d %s\nResponse body: %r' %
           (reqtype, conn.req_params['url'], http_version,
            http_version, response.status, response.reason,
            response_body))

    # Ones we can retry.
    if response.status >= 500:
      # A status >=500 is assumed to be a possible transient error; retry.
      logging.warning('%s%s', err_prefix, msg)
      raise InternalGOBError(response.status, response.reason)

    # Ones we cannot retry.
    home = os.environ.get('HOME', '~')
    url = 'https://%s/new-password' % host
    if response.status in (302, 303, 307):
      err_prefix = ('Redirect found; missing/bad %s/.netrc credentials or '
                    'permissions (0600)?\n See %s' % (home, url))
    elif response.status in (400,):
      err_prefix = 'Permission error; talk to the admins of the GoB instance'
    elif response.status in (401,):
      err_prefix = ('Authorization error; missing/bad %s/.netrc credentials or '
                    'permissions (0600)?\n See %s' % (home, url))
    elif response.status in (422,):
      err_prefix = ('Bad request body?')

    if response.status >= 400:
      # The 'X-ErrorId' header is set only on >= 400 response code.
      logging.warning('%s\n%s\nX-ErrorId: %s', err_prefix, msg,
                      response.getheader('X-ErrorId'))
    else:
      logging.warning('%s\n%s', err_prefix, msg)

    try:
      logging.warning('conn.sock.getpeername(): %s', conn.sock.getpeername())
    except AttributeError:
      logging.warning('peer name unavailable')
    raise GOBError(response.status, response.reason)
コード例 #37
0
def uploadFiles(dir_entry, build, apfe_id, job_id, package, uploader, *args,
                **kwargs):
    """Upload CTS/GTS tests result to gs buckets.

  Args:
    dir_entry: path to the test folder.
    build: build name such as samus-release, or kevin-release.
    apfe_id: id number used for apfe upload, typically we use autotest parent
             job id.
    job_id: id number, such as autotest_job_id or builder_id.
    package: CTS package name.
    uploader: upload function to upload to gs
  """
    xml_files = getXMLGZFiles(package, dir_entry)
    logging.info('Uploading CTS/GTS xml files: %s', xml_files)
    for xml in xml_files:
        timestamp = os.path.basename(os.path.dirname(xml))
        gs_url = os.path.join(constants.DEFAULT_CTS_RESULTS_GSURI, package,
                              build + '-' + job_id + '_' + timestamp)
        uploader(gs_url, xml, *args, **kwargs)

    # Results produced by CTS test list collector are dummy results.
    # They don't need to be copied to APFE bucket which is mainly being used for
    # CTS APFE submission.
    if not _is_test_collector(package):
        apfe_files = getApfeFiles(package, dir_entry)
        logging.info('Uploading CTS/GTS apfe files: %s', apfe_files)
        for apfe in apfe_files:
            timestamp = os.path.splitext(os.path.basename(apfe))[0]
            gs_url = os.path.join(constants.DEFAULT_CTS_APFE_GSURI, build,
                                  apfe_id, package, job_id + '_' + timestamp)
            uploader(gs_url, apfe, *args, **kwargs)
    else:
        logging.debug('%s is a CTS Test collector Autotest test run.', package)
        logging.debug('Skipping CTS results upload to APFE gs:// bucket.')
コード例 #38
0
ファイル: cros_vm.py プロジェクト: metux/chromium-suckless
    def Start(self):
        """Start the VM."""

        self.Stop()

        if not self.kvm_path:
            self.kvm_path = self._FindKVMBinary()
        logging.debug('kvm path=%s', self.kvm_path)

        if not self.image_path:
            self.image_path = os.environ.get('VM_IMAGE_PATH', '')
        logging.debug('vm image path=%s', self.image_path)
        if not self.image_path or not os.path.exists(self.image_path):
            raise VMError('VM image path %s does not exist.' % self.image_path)

        self._CleanupFiles(recreate=True)
        open(self.kvm_serial, 'w')
        for pipe in [self.kvm_pipe_in, self.kvm_pipe_out]:
            os.mkfifo(pipe, 0600)

        args = [
            self.kvm_path, '-m', '2G', '-smp', '4', '-vga', 'cirrus',
            '-pidfile', self.pidfile, '-chardev',
            'pipe,id=control_pipe,path=%s' % self.kvm_monitor, '-serial',
            'file:%s' % self.kvm_serial, '-mon', 'chardev=control_pipe',
            '-daemonize', '-net', 'nic,model=virtio', '-net',
            'user,hostfwd=tcp::%d-:22' % self.ssh_port, '-drive',
            'file=%s,index=0,media=disk,cache=unsafe' % self.image_path
        ]
        logging.info(' '.join(args))
        logging.info('Pid file: %s', self.pidfile)
        if not self.dry_run:
            cros_build_lib.SudoRunCommand(args)
コード例 #39
0
 def _SwitchToLocalManifest(self, local_manifest):
     """Reinitializes the repository if the manifest has changed."""
     logging.debug('Moving to manifest defined by %s', local_manifest)
     # TODO: use upstream repo's manifest logic when we bump repo version.
     manifest_path = self.GetRelativePath('.repo/manifest.xml')
     os.unlink(manifest_path)
     shutil.copyfile(local_manifest, manifest_path)
コード例 #40
0
    def PostRebootUpdateCheckForAUTest(self):
        """Do another update check after reboot to get the post update hostlog.

    This is only done with autoupdate_EndToEndTest.
    """
        logging.debug(
            'Doing one final update check to get post update hostlog.')
        nebraska_bin = os.path.join(self.device_dev_dir,
                                    self.REMOTE_NEBRASKA_FILENAME)
        nebraska = nebraska_wrapper.RemoteNebraskaWrapper(
            self.device,
            nebraska_bin=nebraska_bin,
            update_metadata_dir=self.device.work_dir)

        try:
            nebraska.Start()

            nebraska_url = nebraska.GetURL(critical_update=True,
                                           no_update=True)
            cmd = [
                self.REMOTE_UPDATE_ENGINE_BIN_FILENAME, '--check_for_update',
                '--omaha_url="%s"' % nebraska_url
            ]
            self.device.run(cmd, **self._cmd_kwargs)
            op = self.GetUpdateStatus(self.device)[0]
            logging.info('Post update check status: %s', op)
        except Exception as err:
            logging.error('Post reboot update check failed: %s', str(err))
            logging.warning(nebraska.PrintLog()
                            or 'No nebraska log is available.')
        finally:
            nebraska.Stop()
コード例 #41
0
def FetchPinnedGuestImages(chroot, sysroot):
    """Fetch the file names and uris of Guest VM and Container images for testing.

  Args:
    chroot (chroot_lib.Chroot): Chroot where the sysroot lives.
    sysroot (sysroot_lib.Sysroot): Sysroot whose images are being fetched.

  Returns:
    list[PinnedGuestImage] - The pinned guest image uris.
  """
    pins_root = os.path.abspath(
        os.path.join(chroot.path, sysroot.path.lstrip(os.sep),
                     constants.GUEST_IMAGES_PINS_PATH))

    pins = []
    for pin_file in sorted(glob.iglob(os.path.join(pins_root, '*.json'))):
        with open(pin_file) as f:
            pin = json.load(f)

            filename = pin.get(constants.PIN_KEY_FILENAME)
            uri = pin.get(constants.PIN_KEY_GSURI)
            if not filename or not uri:
                logging.warning("Skipping invalid pin file: '%s'.", pin_file)
                logging.debug("'%s' data: filename='%s' uri='%s'", pin_file,
                              filename, uri)
                continue

            pins.append(PinnedGuestImage(filename=filename, uri=uri))

    return pins
コード例 #42
0
 def ResetStatefulPartition(self):
     """Clear any pending stateful update request."""
     logging.debug('Resetting stateful partition...')
     try:
         stateful_updater.StatefulUpdater(self.device).Reset()
     except stateful_updater.Error as e:
         raise StatefulUpdateError(e)
コード例 #43
0
ファイル: branch_stages.py プロジェクト: qlb7707/webrtc_src
    def PerformStage(self):
        """Run the branch operation."""
        # Setup and initialize the repo.
        super(BranchUtilStage, self).PerformStage()

        repo_manifest = git.ManifestCheckout.Cached(self._build_root)
        checkouts = repo_manifest.ListCheckouts()

        logging.debug("Processing %d checkouts from manifest in parallel.", len(checkouts))
        args = [[repo_manifest, x] for x in checkouts]
        parallel.RunTasksInProcessPool(self._ProcessCheckout, args, processes=16)

        if not self._run.options.delete_branch:
            self._FixUpManifests(repo_manifest)

        # Increment versions for a new branch.
        if not (self._run.options.delete_branch or self.rename_to):
            overlay_name = "chromiumos/overlays/chromiumos-overlay"
            overlay_checkout = repo_manifest.FindCheckout(overlay_name)
            overlay_dir = overlay_checkout["local_path"]
            push_remote = overlay_checkout["push_remote"]
            self._IncrementVersionOnDiskForNewBranch(push_remote)

            source_branch = repo_manifest.default["revision"]
            self._IncrementVersionOnDiskForSourceBranch(overlay_dir, push_remote, source_branch)
コード例 #44
0
ファイル: cros.py プロジェクト: qlb7707/webrtc_src
def main(argv):
  try:
    parser = GetOptions(command.ListCommands())
    # Cros currently does nothing without a subcmd. Print help if no args are
    # specified.
    if not argv:
      parser.print_help()
      return 1

    namespace = parser.parse_args(argv)
    subcommand = namespace.command_class(namespace)
    with stats.UploadContext() as queue:
      if subcommand.upload_stats:
        cmd_base = subcommand.options.command_class.command_name
        cmd_stats = stats.Stats.SafeInit(cmd_line=sys.argv, cmd_base=cmd_base)
        if cmd_stats:
          queue.put([cmd_stats, stats.StatsUploader.URL,
                     subcommand.upload_stats_timeout])
      # TODO: to make command completion faster, send an interrupt signal to the
      # stats uploader task after the subcommand completes.
      code = _RunSubCommand(subcommand)
      if code is not None:
        return code

    return 0
  except KeyboardInterrupt:
    logging.debug('Aborted due to keyboard interrupt.')
    return 1
コード例 #45
0
  def ProcessPackageLicenses(self):
    """Iterate through all packages provided and gather their licenses.

    GetLicenses will scrape licenses from the code and/or gather stock license
    names. We gather the list of stock and custom ones for later processing.

    Do not call this after adding virtual packages with AddExtraPkg.
    """
    for package_name in self.packages:
      pkg = self.packages[package_name]

      if pkg.skip:
        logging.debug('Package %s is in skip list', package_name)
        continue

      # Other skipped packages get dumped with incomplete info and the skip flag
      if not os.path.exists(pkg.license_dump_path):
        if not self.gen_licenses:
          raise PackageLicenseError('License for %s is missing' % package_name)

        logging.error('>>> License for %s is missing, creating now <<<',
                      package_name)
        build_info_path = os.path.join(
            cros_build_lib.GetSysroot(pkg.board),
            PER_PKG_LICENSE_DIR, pkg.fullnamerev)
        pkg.GetLicenses(build_info_path, None)

        # We dump packages where licensing failed too.
        pkg.SaveLicenseDump(pkg.license_dump_path)

      # Load the pre-cached version, if the in-memory version is incomplete.
      if not pkg.license_names:
        logging.debug('loading dump for %s', pkg.fullnamerev)
        self._LoadLicenseDump(pkg)
コード例 #46
0
    def _ApplyChanges(self, _inducing_change, changes):
        """Apply a given ordered sequence of changes.

    Args:
      _inducing_change: The core GitRepoPatch instance that lead to this
        sequence of changes; basically what this transaction was computed from.
        Needs to be passed in so that the exception wrapping machinery can
        convert any failures, assigning blame appropriately.
      manifest: A ManifestCheckout instance representing what we're working on.
      changes: A ordered sequence of GitRepoPatch instances to apply.
    """
        # Bail immediately if we know one of the requisite patches won't apply.
        for change in changes:
            failure = self.failed_tot.get(change.id)
            if failure is not None:
                raise failure

        applied = []
        for change in changes:
            if change in self._committed_cache:
                continue

            try:
                self.ApplyChange(change)
            except cros_patch.PatchException as e:
                if not e.inflight:
                    self.failed_tot[change.id] = e
                raise
            applied.append(change)

        logging.debug('Done investigating changes.  Applied %s',
                      ' '.join([c.id for c in applied]))
コード例 #47
0
    def _FindEbuildPath(self):
        """Discover the path to a package's associated ebuild.

    This method is not valid during the emerge hook process.

    Returns:
      full path file name of the ebuild file for this package.

    Raises:
      AssertionError if it can't be discovered for some reason.
    """
        equery_cmd = cros_build_lib.GetSysrootToolPath(
            cros_build_lib.GetSysroot(self.board), 'equery')
        args = [equery_cmd, '-q', '-C', 'which', self.fullnamerev]
        try:
            path = cros_build_lib.RunCommand(
                args, print_cmd=True, redirect_stdout=True).output.strip()
        except cros_build_lib.RunCommandError:
            path = None

        # Path can be false because of an exception, or a command result.
        if not path:
            raise AssertionError('_FindEbuildPath for %s failed.\n'
                                 'Is your tree clean? Try a rebuild?' %
                                 self.fullnamerev)

        logging.debug('%s -> %s', ' '.join(args), path)

        if not os.access(path, os.F_OK):
            raise AssertionError('Can\'t access %s', path)

        return path
コード例 #48
0
    def PerformStage(self):
        """Run the branch operation."""
        # Setup and initialize the repo.
        super(BranchUtilStage, self).PerformStage()

        repo_manifest = git.ManifestCheckout.Cached(self._build_root)
        checkouts = repo_manifest.ListCheckouts()

        logging.debug('Processing %d checkouts from manifest in parallel.',
                      len(checkouts))
        args = [[repo_manifest, x] for x in checkouts]
        parallel.RunTasksInProcessPool(self._ProcessCheckout,
                                       args,
                                       processes=16)

        if not self._run.options.delete_branch:
            self._FixUpManifests(repo_manifest)

        # Increment versions for a new branch.
        if not (self._run.options.delete_branch or self.rename_to):
            overlay_name = 'chromiumos/overlays/chromiumos-overlay'
            overlay_checkout = repo_manifest.FindCheckout(overlay_name)
            overlay_dir = overlay_checkout['local_path']
            push_remote = overlay_checkout['push_remote']
            self._IncrementVersionOnDiskForNewBranch(push_remote)

            source_branch = repo_manifest.default['revision']
            self._IncrementVersionOnDiskForSourceBranch(
                overlay_dir, push_remote, source_branch)
コード例 #49
0
  def _LoadFromFile(self):
    """Read the version file and set the version components"""
    with open(self.version_file, 'r') as version_fh:
      for line in version_fh:
        if not line.strip():
          continue

        match = self.FindValue('CHROME_BRANCH', line)
        if match:
          self.chrome_branch = match
          logging.debug('Set the Chrome branch number to:%s',
                        self.chrome_branch)
          continue

        match = self.FindValue('CHROMEOS_BUILD', line)
        if match:
          self.build_number = match
          logging.debug('Set the build version to:%s', self.build_number)
          continue

        match = self.FindValue('CHROMEOS_BRANCH', line)
        if match:
          self.branch_build_number = match
          logging.debug('Set the branch version to:%s',
                        self.branch_build_number)
          continue

        match = self.FindValue('CHROMEOS_PATCH', line)
        if match:
          self.patch_number = match
          logging.debug('Set the patch version to:%s', self.patch_number)
          continue

    logging.debug(self.VersionString())
コード例 #50
0
  def Run(self):
    """Run cros debug."""
    commandline.RunInsideChroot(self)
    self.options.Freeze()
    self._ReadOptions()
    with remote_access.ChromiumOSDeviceHandler(
        self.ssh_hostname, port=self.ssh_port, username=self.ssh_username,
        private_key=self.ssh_private_key) as device:
      self.board = cros_build_lib.GetBoard(device_board=device.board,
                                           override_board=self.options.board)
      logging.info('Board is %s', self.board)

      self.gdb_cmd = [
          'gdb_remote', '--ssh',
          '--board', self.board,
          '--remote', self.ssh_hostname,
      ]
      if self.ssh_port:
        self.gdb_cmd.extend(['--ssh_port', str(self.ssh_port)])

      if not (self.pid or self.exe):
        cros_build_lib.Die(
            'Must use --exe or --pid to specify the process to debug.')

      if self.pid:
        if self.list or self.exe:
          cros_build_lib.Die(
              '--list and --exe are disallowed when --pid is used.')
        self._DebugRunningProcess(self.pid)
        return

      if not self.exe.startswith('/'):
        cros_build_lib.Die('--exe must have a full pathname.')
      logging.debug('Executable path is %s', self.exe)
      if not device.IsFileExecutable(self.exe):
        cros_build_lib.Die(
            'File path "%s" does not exist or is not executable on device %s',
            self.exe, self.ssh_hostname)

      pids = device.GetRunningPids(self.exe)
      self._ListProcesses(device, pids)

      if self.list:
        # If '--list' flag is on, do not launch GDB.
        return

      if pids:
        choices = ['Start a new process under GDB']
        choices.extend(pids)
        idx = cros_build_lib.GetChoice(
            'Please select the process pid to debug (select [0] to start a '
            'new process):', choices)
        if idx == 0:
          self._DebugNewProcess()
        else:
          self._DebugRunningProcess(pids[idx - 1])
      else:
        self._DebugNewProcess()
コード例 #51
0
  def GetPackageInfo(self, packages, use_all=False, use_workon_only=False):
    """Get information about packages.

    Args:
      packages: list of package name fragments.  These will be mapped to
          canonical portage atoms via the same process as
          StartWorkingOnPackages().
      use_all: True iff instead of the provided package list, we should just
          stop working on all currently worked on atoms for the system in
          question.
      use_workon_only: True iff instead of the provided package list, we should
          stop working on all currently worked on atoms that define only a
          -9999 ebuild.

    Returns:
      Returns a list of PackageInfo tuples.
    """
    if use_all or use_workon_only:
      # You can't use info to find the source code from Chrome, since that
      # workflow is different.
      ebuilds = self._GetWorkonEbuilds(filter_workon=use_workon_only,
                                       include_chrome=False)
    else:
      atoms = self._GetCanonicalAtoms(packages)
      ebuilds = [self._FindEbuildForPackage(atom) for atom in atoms]

    ebuild_to_repos = {}
    for ebuild in ebuilds:
      workon_vars = portage_util.EBuild.GetCrosWorkonVars(
          ebuild, portage_util.EbuildToCP(ebuild))
      projects = workon_vars.project if workon_vars else []
      ebuild_to_repos[ebuild] = projects

    repository_to_source_path = {}
    repo_list_result = cros_build_lib.RunCommand(
        'repo list', shell=True, enter_chroot=True, capture_output=True,
        print_cmd=False)

    for line in repo_list_result.output.splitlines():
      pieces = line.split(' : ')
      if len(pieces) != 2:
        logging.debug('Ignoring malformed repo list output line: "%s"', line)
        continue

      source_path, repository = pieces
      repository_to_source_path[repository] = source_path

    result = []
    for ebuild in ebuilds:
      package = portage_util.EbuildToCP(ebuild)
      repos = ebuild_to_repos.get(ebuild, [])
      src_paths = [repository_to_source_path.get(repo) for repo in repos]
      src_paths = [path for path in src_paths if path]
      result.append(PackageInfo(package, repos, src_paths))

    result.sort()
    return result
コード例 #52
0
  def _FixUpManifests(self, repo_manifest):
    """Points the checkouts at the new branch in the manifests.

    Within the branch, make sure all manifests with projects that are
    "branchable" are checked out to "refs/heads/<new_branch>".  Do this
    by updating all manifests in the known manifest projects.
    """
    assert not self._run.options.delete_branch, 'Cannot fix a deleted branch.'

    # Use local branch ref.
    branch_ref = git.NormalizeRef(self.branch_name)

    logging.debug('Fixing manifest projects for new branch.')
    for project in site_config.params.MANIFEST_PROJECTS:
      manifest_checkout = repo_manifest.FindCheckout(project)
      manifest_dir = manifest_checkout['local_path']
      push_remote = manifest_checkout['push_remote']

      # Checkout revision can be either a sha1 or a branch ref.
      src_ref = manifest_checkout['revision']
      if not git.IsSHA1(src_ref):
        src_ref = git.NormalizeRemoteRef(push_remote, src_ref)

      git.CreateBranch(
          manifest_dir, manifest_version.PUSH_BRANCH, src_ref)

      # We want to process default.xml and official.xml + their imports.
      pending_manifests = [constants.DEFAULT_MANIFEST,
                           constants.OFFICIAL_MANIFEST]
      processed_manifests = []

      while pending_manifests:
        # Canonicalize the manifest name (resolve dir and symlinks).
        manifest_path = os.path.join(manifest_dir, pending_manifests.pop())
        manifest_path = os.path.realpath(manifest_path)

        # Don't process a manifest more than once.
        if manifest_path in processed_manifests:
          continue

        processed_manifests.append(manifest_path)

        if not os.path.exists(manifest_path):
          logging.info('Manifest not found: %s', manifest_path)
          continue

        logging.debug('Fixing manifest at %s.', manifest_path)
        included_manifests = self._UpdateManifest(manifest_path)
        pending_manifests += included_manifests

      git.RunGit(manifest_dir, ['add', '-A'], print_cmd=True)
      message = 'Fix up manifest after branching %s.' % branch_ref
      git.RunGit(manifest_dir, ['commit', '-m', message], print_cmd=True)
      push_to = git.RemoteRef(push_remote, branch_ref)
      git.GitPush(manifest_dir, manifest_version.PUSH_BRANCH, push_to,
                  skip=self.skip_remote_push)
コード例 #53
0
  def GetRootDev(cls, device):
    """Get the current root device on |device|.

    Args:
      device: a ChromiumOSDevice object, defines whose root device we
      want to fetch.
    """
    rootdev = device.RunCommand(
        ['rootdev', '-s'], capture_output=True).output.strip()
    logging.debug('Current root device is %s', rootdev)
    return rootdev
コード例 #54
0
def BuildTestSets(tests, chroot_available, network, jobs=1):
  """Build the tests to execute.

  Take care of special test handling like whether it needs to be inside or
  outside of the sdk, whether the test should be skipped, etc...

  Args:
    tests: List of tests to execute.
    chroot_available: Whether we can execute tests inside the sdk.
    network: Whether to execute network tests.
    jobs: How many jobs will we run in parallel.

  Returns:
    List of tests to execute and their full command line.
  """
  testsets = []
  for test in SortTests(tests, jobs=jobs):
    cmd = [test]

    # See if this test requires special consideration.
    status = SPECIAL_TESTS.get(test)
    if status is SKIP:
      logging.info('Skipping %s', test)
      continue
    elif status is INSIDE:
      if not cros_build_lib.IsInsideChroot():
        if not chroot_available:
          logging.info('Skipping %s: chroot not available', test)
          continue
        cmd = ['cros_sdk', '--', os.path.join('..', '..', 'chromite', test)]
    elif status is OUTSIDE:
      if cros_build_lib.IsInsideChroot():
        logging.info('Skipping %s: must be outside the chroot', test)
        continue
    else:
      mode = os.stat(test).st_mode
      if stat.S_ISREG(mode):
        if not mode & 0o111:
          logging.debug('Skipping %s: not executable', test)
          continue
      else:
        logging.debug('Skipping %s: not a regular file', test)
        continue

    # Build up the final test command.
    cmd.append('--verbose')
    if network:
      cmd.append('--network')
    cmd = ['timeout', '--preserve-status', '-k', '%sm' % TEST_SIG_TIMEOUT,
           '%sm' % TEST_TIMEOUT] + cmd

    testsets.append((test, cmd, tempfile.TemporaryFile()))

  return testsets
コード例 #55
0
  def simulate_builds(self, db, metadatas):
    """Simulate a series of Commit Queue master and slave builds.

    This method use the metadata objects in |metadatas| to simulate those
    builds insertions and updates to the cidb. All metadatas encountered
    after a particular master build will be assumed to be slaves of that build,
    until a new master build is encountered. Slave builds for a particular
    master will be simulated in parallel.

    The first element in |metadatas| must be a CQ master build.

    Args:
      db: A CIDBConnection instance.
      metadatas: A list of CBuildbotMetadata instances, sorted by start time.
    """
    m_iter = iter(metadatas)

    def is_master(m):
      return m.GetDict()['bot-config'] == 'master-paladin'

    next_master = m_iter.next()

    while next_master:
      master = next_master
      next_master = None
      assert is_master(master)
      master_build_id = _SimulateBuildStart(db, master)

      def simulate_slave(slave_metadata):
        build_id = _SimulateBuildStart(db, slave_metadata,
                                       master_build_id,
                                       important=True)
        _SimulateCQBuildFinish(db, slave_metadata, build_id)
        logging.debug('Simulated slave build %s on pid %s', build_id,
                      os.getpid())
        return build_id

      slave_metadatas = []
      for slave in m_iter:
        if is_master(slave):
          next_master = slave
          break
        slave_metadatas.append(slave)

      with parallel.BackgroundTaskRunner(simulate_slave, processes=15) as queue:
        for slave in slave_metadatas:
          queue.put([slave])

      # Yes, this introduces delay in the test. But this lets us do some basic
      # sanity tests on the |last_update| column later.
      time.sleep(1)
      _SimulateCQBuildFinish(db, master, master_build_id)
      logging.debug('Simulated master build %s', master_build_id)
コード例 #56
0
ファイル: fwgdb.py プロジェクト: qlb7707/webrtc_src
def ReadAll(fd, wait=0.03):
  """Read from |fd| until no more data has come for at least |wait| seconds."""
  data = ''
  try:
    while True:
      time.sleep(wait)
      data += os.read(fd, 4096)
  except OSError as e:
    if e.errno == errno.EAGAIN:
      logging.debug(data)
      return data
    raise
コード例 #57
0
  def SaveLicenseDump(self, save_file):
    """Save PackageInfo contents to a YAML file.

    This is used to cache license results between the emerge hook phase and
    credits page generation.

    Args:
      save_file: File to save the yaml contents into.
    """
    logging.debug('Saving license to %s', save_file)
    yaml_dump = self.__dict__.items()
    osutils.WriteFile(save_file, yaml.dump(yaml_dump), makedirs=True)
コード例 #58
0
def _GetPortList(desired_protocol, appc_port_list):
  """Get the list of ports opened for |desired_protocol| from |appc_port_list|.

  Args:
    desired_protocol: one of VALID_PROTOCOLS.
    appc_port_list: list of port specifications from a appc pod manifest.

  Returns:
    Instance of PortSpec.
  """
  # The port specification is optional.
  if appc_port_list is None:
    return PortSpec(False, [])

  json_lib.AssertIsInstance(appc_port_list, list, 'port specification list')

  allow_all = False
  port_list = []
  for port_dict in appc_port_list:
    json_lib.AssertIsInstance(port_dict, dict, 'port specification')

    # We don't actually use the port name, but it's handy for documentation
    # and standard adherence to enforce its existence.
    port_name = json_lib.GetValueOfType(
        port_dict, PORT_SPEC_NAME, unicode, 'port name')
    logging.debug('Validating appc specifcation of "%s"', port_name)
    port = json_lib.GetValueOfType(port_dict, PORT_SPEC_PORT, int, 'port')
    protocol = json_lib.GetValueOfType(
        port_dict, PORT_SPEC_PROTOCOL, unicode, 'protocol')

    # Validate everything before acting on it.
    if protocol not in VALID_PROTOCOLS:
      raise ValueError('Port protocol must be in %r, not "%s"' %
                       (VALID_PROTOCOLS, protocol))
    if protocol != desired_protocol:
      continue

    if port == -1:
      # Remember that we're going to return that all ports are opened, but
      # continue validating all the remaining specifications.
      allow_all = True
      continue

    # Now we know it's not the wildcard port, and that we've never declared
    # a wildcard for this protocol.
    if port >= 65536 or port <= 0:
      raise ValueError(
          'Port numbers must fit in 16 bits (invalid port=%d).' % port)

    port_list.append(port)

  return PortSpec(allow_all, port_list)
コード例 #59
0
  def GetSandboxSpec(self, appc_contents, sandbox_spec_name):
    """Create a SandboxSpec encoding the information in an appc pod manifest.

    Args:
      appc_contents: string contents of an appc pod manifest
      sandbox_spec_name: string unique name of this sandbox.

    Returns:
      an instance of SandboxSpec.
    """
    wrapper = SandboxSpecWrapper()
    overlay_name = None

    app_list = json_lib.GetValueOfType(
        appc_contents, KEY_APPS_LIST, list, 'app list')
    for app in app_list:
      json_lib.AssertIsInstance(app, dict, 'app')

      # Aid debugging of problems in specific apps.
      app_name = json_lib.GetValueOfType(
          app, KEY_APP_NAME, unicode, 'app name')
      if not IsValidAcName(app_name):
        raise ValueError('Application name "%s" contains illegal characters.' %
                         app_name)
      logging.debug('Processing application "%s".', app_name)

      # Get the name of the image, check that it's consistent other image names.
      image = json_lib.GetValueOfType(
          app, KEY_APP_IMAGE, dict, 'image specification for app')
      image_name = json_lib.GetValueOfType(
          image, KEY_APP_IMAGE_NAME, unicode, 'image name')
      if not IsValidAcName(image_name):
        raise ValueError('Image name "%s" contains illegal characters.' %
                         image_name)

      if overlay_name and overlay_name != image_name:
        raise ValueError(
            'All elements of "apps" must have the same image.name.')
      overlay_name = image_name

      # Add the executable corresponding to this app to our SandboxSpec.
      self._FillInExecutableFromApp(wrapper, app)

    if not overlay_name:
      raise ValueError('Overlays must declare at least one app')

    annotation_list = json_lib.GetValueOfType(
        appc_contents, KEY_ANNOTATIONS_LIST, list, 'list of all annotations')
    self._FillInEndpointNamesFromAnnotations(wrapper, annotation_list)

    wrapper.SetName(sandbox_spec_name)
    return wrapper.sandbox_spec