Exemple #1
0
 def testBackgroundTaskRunner(self):
     """Make sure BackgroundTaskRunner is mocked out."""
     with ParallelMock():
         parallel.RunTasksInProcessPool(self._Callback, [])
         self.assertEqual(0, self._calls)
         parallel.RunTasksInProcessPool(self._Callback, [[]])
         self.assertEqual(1, self._calls)
         parallel.RunTasksInProcessPool(self._Callback, [],
                                        processes=9,
                                        onexit=self._Callback)
         self.assertEqual(10, self._calls)
 def testBackgroundTaskRunner(self):
   """Make sure BackgroundTaskRunner is mocked out."""
   with ParallelMock():
     parallel.RunTasksInProcessPool(self._Callback, [])
     self.assertEqual(0, self._calls)
     result = parallel.RunTasksInProcessPool(self._Callback, [[]])
     self.assertEqual(1, self._calls)
     self.assertEqual([1], result)
     result = parallel.RunTasksInProcessPool(self._Callback, [], processes=9,
                                             onexit=self._Callback)
     self.assertEqual(10, self._calls)
     self.assertEqual([], result)
     result = parallel.RunTasksInProcessPool(self._Callback, [[]] * 10)
     self.assertEqual(list(range(11, 21)), result)
Exemple #3
0
 def UpdateLatestFiles(self):
   """Update the LATEST files since LKGM, in Google Storage."""
   ext_cfgs, int_cfgs = self.site_config.FindFullConfigsForBoard(board=None)
   versions = self._GetLatestCanaryVersions() + [self._old_lkgm]
   tasks = [[cfg, versions] for cfg in ext_cfgs + int_cfgs]
   parallel.RunTasksInProcessPool(self.UpdateLatestFilesForBot, tasks,
                                  processes=100)
Exemple #4
0
def main(argv):
    """Standard main routine.

    @param argv  Command line arguments including `sys.argv[0]`.

    """
    def balancer(i, board):
        """Balance the specified board.

      @param i The index of the board.
      @param board The board name.
      """
        if i > 0:
            _log_message('')
        _balance_board(arguments, afe, board, start_time, end_time)

    arguments = _parse_command(argv)
    end_time = time.time()
    start_time = end_time - 24 * 60 * 60
    afe = frontend.AFE(server=None)
    boards = arguments.boards
    if arguments.all_boards:
        boards = host_label_utils.get_all_boards(
            labels=[_POOL_PREFIX + arguments.pool])
    board_args = list(enumerate(boards))
    try:
        parallel.RunTasksInProcessPool(balancer, board_args, processes=8)
    except KeyboardInterrupt:
        pass
def _GetOverlayToEbuildsMap(options, overlays, package_list):
    """Get ebuilds for overlays.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    package_list: A list of packages passed from commandline to work on.

  Returns:
    A dict mapping each overlay to a list of ebuilds belonging to it.
  """
    root_version = manifest_version.VersionInfo.from_repo(options.buildroot)
    subdir_removal = manifest_version.VersionInfo('10363.0.0')
    require_subdir_support = root_version < subdir_removal

    overlay_ebuilds = {}
    inputs = [[
        overlay, options.all, package_list, options.force,
        require_subdir_support
    ] for overlay in overlays]
    result = parallel.RunTasksInProcessPool(portage_util.GetOverlayEBuilds,
                                            inputs)
    for idx, ebuilds in enumerate(result):
        overlay_ebuilds[overlays[idx]] = ebuilds

    return overlay_ebuilds
  def _CopyToDeviceInParallel(self, src, dest):
    """Chop source file in chunks, send them to destination in parallel.

    Transfer chunks of file in parallel and assemble in destination if the
    file size is larger than chunk size. Fall back to scp mode otherwise.

    Args:
      src: Local path as a string.
      dest: rsync/scp path of the form <host>:/<path> as a string.
    """
    src_filename = os.path.basename(src)
    chunk_prefix = src_filename + '_'
    with osutils.TempDir() as tempdir:
      chunk_path = os.path.join(tempdir, chunk_prefix)
      try:
        cmd = ['split', '-b', str(CHUNK_SIZE), src, chunk_path]
        cros_build_lib.run(cmd)
        input_list = [[chunk_file, dest, 'scp']
                      for chunk_file in glob.glob(chunk_path + '*')]
        parallel.RunTasksInProcessPool(self.CopyToDevice,
                                       input_list,
                                       processes=DEGREE_OF_PARALLELISM)
        logging.info('Assembling these chunks now.....')
        chunks = '%s/%s*' % (dest, chunk_prefix)
        final_dest = '%s/%s' % (dest, src_filename)
        assemble_cmd = ['cat', chunks, '>', final_dest]
        self.run(assemble_cmd)
        cleanup_cmd = ['rm', '-f', chunks]
        self.run(cleanup_cmd)
      except IOError:
        logging.err('Could not complete the payload transfer...')
        raise
    logging.info('Successfully copy %s to %s in chunks in parallel', src, dest)
def main(argv):
  options = ParseArgs(argv)

  if not cros_build_lib.IsInsideChroot():
    raise commandline.ChrootRequiredError()

  if os.geteuid() != 0:
    cros_build_lib.SudoRunCommand(sys.argv)
    return

  # sysroot must have a trailing / as the tree dictionary produced by
  # create_trees in indexed with a trailing /.
  sysroot = cros_build_lib.GetSysroot(options.board) + '/'
  trees = create_trees(target_root=sysroot, config_root=sysroot)

  vartree = trees[sysroot]['vartree']

  cache_dir = os.path.join(path_util.FindCacheDir(),
                           'cros_install_debug_syms-v' + CACHE_VERSION)

  if options.clearcache:
    osutils.RmDir(cache_dir, ignore_missing=True)

  binhost_cache = None
  if options.cachebinhost:
    binhost_cache = cache.DiskCache(cache_dir)

  boto_file = vartree.settings['BOTO_CONFIG']
  if boto_file:
    os.environ['BOTO_CONFIG'] = boto_file

  gs_context = gs.GSContext()
  symbols_mapping = RemoteSymbols(vartree, binhost_cache)

  if options.all:
    to_install = vartree.dbapi.cpv_all()
  else:
    to_install = [GetMatchingCPV(p, vartree.dbapi) for p in options.packages]

  to_install = [p for p in to_install
                if ShouldGetSymbols(p, vartree.dbapi, symbols_mapping)]

  if not to_install:
    logging.info('nothing to do, exit')
    return

  with DebugSymbolsInstaller(vartree, gs_context, sysroot,
                             not options.debug) as installer:
    args = [(p, symbols_mapping[p]) for p in to_install]
    parallel.RunTasksInProcessPool(installer.Install, args,
                                   processes=options.jobs)

  logging.debug('installation done, updating packages index file')
  packages_dir = os.path.join(sysroot, 'packages')
  packages_file = os.path.join(packages_dir, 'Packages')
  # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols
  # in the packages dir.
  pkgindex = binpkg.GrabLocalPackageIndex(packages_dir)
  with open(packages_file, 'w') as p:
    pkgindex.Write(p)
    def PerformStage(self):
        """Run the branch operation."""
        # Setup and initialize the repo.
        super(BranchUtilStage, self).PerformStage()

        repo_manifest = git.ManifestCheckout.Cached(self._build_root)
        checkouts = repo_manifest.ListCheckouts()

        logging.debug('Processing %d checkouts from manifest in parallel.',
                      len(checkouts))
        args = [[repo_manifest, x] for x in checkouts]
        parallel.RunTasksInProcessPool(self._ProcessCheckout,
                                       args,
                                       processes=16)

        if not self._run.options.delete_branch:
            self._FixUpManifests(repo_manifest)

        # Increment versions for a new branch.
        if not (self._run.options.delete_branch or self.rename_to):
            overlay_name = 'chromiumos/overlays/chromiumos-overlay'
            overlay_checkout = repo_manifest.FindCheckout(overlay_name)
            overlay_dir = overlay_checkout['local_path']
            push_remote = overlay_checkout['push_remote']
            self._IncrementVersionOnDiskForNewBranch(push_remote)

            source_branch = repo_manifest.default['revision']
            self._IncrementVersionOnDiskForSourceBranch(
                overlay_dir, push_remote, source_branch)
Exemple #9
0
    def ResetCheckouts(self, branch, fetch=False):
        """Updates |branch| in all Git checkouts in the manifest to their remotes.

    Args:
      branch: The branch to update.
      fetch: Indicates whether to sync the remotes before resetting.
    """
        if not self.manifest:
            logging.info('No manifest, skipping reset.')
            return

        def _Reset(checkout):
            path = checkout.GetPath()

            # There is no need to reset the branch if it doesn't exist.
            if not git.DoesCommitExistInRepo(path, branch):
                return

            if fetch:
                git.RunGit(path, ['fetch', '--all'])

            def _LogBranch():
                branches = git.RunGit(path,
                                      ['branch', '-vv']).output.splitlines()
                branch_line = [b for b in branches if branch in b]
                logging.info(branch_line)

            _LogBranch()
            git.RunGit(path, ['checkout', '-f', branch])
            logging.info('Resetting to %s', checkout['tracking_branch'])
            git.RunGit(path, ['reset', checkout['tracking_branch'], '--hard'])
            _LogBranch()

        parallel.RunTasksInProcessPool(
            _Reset, [[c] for c in self.manifest.ListCheckouts()])
def _WorkOnCommit(options, overlays, overlay_tracking_branch,
                  git_project_overlays, manifest, package_list):
    """Commit uprevs of overlays belonging to different git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    overlay_tracking_branch: A dict mapping from each overlay to its tracking
      branch.
    git_project_overlays: A dict mapping from each git repository to a list of
      its overlays.
    manifest: The manifest of the given source root.
    package_list: A list of packages passed from commandline to work on.
  """
    overlay_ebuilds = _GetOverlayToEbuildsMap(options, overlays, package_list)

    with parallel.Manager() as manager:
        # Contains the array of packages we actually revved.
        revved_packages = manager.list()
        new_package_atoms = manager.list()

        inputs = [[
            options, manifest, overlays_per_project, overlay_tracking_branch,
            overlay_ebuilds, revved_packages, new_package_atoms
        ] for overlays_per_project in git_project_overlays.itervalues()]
        parallel.RunTasksInProcessPool(_CommitOverlays, inputs)

        chroot_path = os.path.join(options.srcroot,
                                   constants.DEFAULT_CHROOT_DIR)
        if os.path.exists(chroot_path):
            CleanStalePackages(options.srcroot, options.boards.split(':'),
                               new_package_atoms)
        if options.drop_file:
            osutils.WriteFile(options.drop_file, ' '.join(revved_packages))
    def _GeneratePayloads(self, payloads):
        """Generate the payloads called for by a list of payload definitions.

    It will keep going, even if there is a failure.

    Args:
      payloads: gspath.Payload objects defining all of the payloads to generate.
      lock: gslock protecting this paygen_build run.

    Raises:
      Any arbitrary exception raised by CreateAndUploadPayload.
    """
        payloads_args = [(payload, self._ShouldSign(payload.tgt_image), True)
                         for payload in payloads]

        # Most of the operations in paygen for one single payload is single threaded
        # and mostly IO bound (downloading images, extracting partitions, waiting
        # for signers, signing payload, etc). The only part that requires special
        # attention is generating an unsigned payload which internally has a
        # massively parallel implementation. So, here we allow multiple processes to
        # run simultaneously and we restrict the number of processes that do the
        # unsigned payload generation by looking at the available memory and seeing
        # if additional runs would exceed allowed memory use thresholds (look at
        # the MemoryConsumptionSemaphore in utils.py).
        parallel.RunTasksInProcessPool(
            paygen_payload_lib.CreateAndUploadPayload, payloads_args)
def CleanStalePackages(boards, package_atoms):
  """Cleans up stale package info from a previous build.
  Args:
    boards: Boards to clean the packages from.
    package_atoms: The actual package atom to unmerge.
  """
  if package_atoms:
    cros_build_lib.Info('Cleaning up stale packages %s.' % package_atoms)

  # First unmerge all the packages for a board, then eclean it.
  # We need these two steps to run in order (unmerge/eclean),
  # but we can let all the boards run in parallel.
  def _CleanStalePackages(board):
    if board:
      suffix = '-' + board
      runcmd = cros_build_lib.RunCommand
    else:
      suffix = ''
      runcmd = cros_build_lib.SudoRunCommand

    if package_atoms:
      runcmd(['emerge' + suffix, '-q', '--unmerge'] + package_atoms,
             extra_env={'CLEAN_DELAY': '0'})
    runcmd(['eclean' + suffix, '-d', 'packages'],
           redirect_stdout=True, redirect_stderr=True)

  tasks = []
  for board in boards:
    tasks.append([board])
  tasks.append([None])

  parallel.RunTasksInProcessPool(_CleanStalePackages, tasks)
Exemple #13
0
def _UprevOverlays(manifest, overlays, overlay_ebuilds, revved_packages,
                   new_package_atoms, new_ebuild_files, removed_ebuild_files):
    """Execute uprevs for overlays in sequence.

  Args:
    manifest: The manifest of the given source root.
    overlays: A list over overlays to commit.
    overlay_ebuilds: A dict mapping overlays to their ebuilds.
    revved_packages: A shared list of revved packages.
    new_package_atoms: A shared list of new package atoms.
    new_ebuild_files: New stable ebuild paths.
    removed_ebuild_files: Old ebuild paths that were removed.
  """
    for overlay in overlays:
        if not os.path.isdir(overlay):
            logging.warning('Skipping %s, which is not a directory.', overlay)
            continue

        ebuilds = overlay_ebuilds.get(overlay, [])
        if not ebuilds:
            continue

        with parallel.Manager() as manager:
            # Contains the array of packages we actually revved.
            messages = manager.list()

            inputs = [[
                overlay, ebuild, manifest, new_ebuild_files,
                removed_ebuild_files, messages, revved_packages,
                new_package_atoms
            ] for ebuild in ebuilds]
            parallel.RunTasksInProcessPool(_WorkOnEbuild, inputs)
Exemple #14
0
def _GetOverlayToEbuildsMap(overlays, package_list, force):
    """Get ebuilds for overlays.

  Args:
    overlays: A list of overlays to work on.
    package_list: A list of packages passed from commandline to work on.
    force (bool): Whether to use packages even if in blacklist.

  Returns:
    A dict mapping each overlay to a list of ebuilds belonging to it.
  """
    root_version = manifest_version.VersionInfo.from_repo(
        constants.SOURCE_ROOT)
    subdir_removal = manifest_version.VersionInfo('10363.0.0')
    require_subdir_support = root_version < subdir_removal
    use_all = not package_list

    overlay_ebuilds = {}
    inputs = [[overlay, use_all, package_list, force, require_subdir_support]
              for overlay in overlays]
    result = parallel.RunTasksInProcessPool(portage_util.GetOverlayEBuilds,
                                            inputs)
    for idx, ebuilds in enumerate(result):
        overlay_ebuilds[overlays[idx]] = ebuilds

    return overlay_ebuilds
Exemple #15
0
def _WorkOnCommit(options, overlays, manifest, package_list):
  """Commit uprevs of overlays belonging to different git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    manifest: The manifest of the given source root.
    package_list: A list of packages passed from commandline to work on.
  """
  overlay_ebuilds = _GetOverlayToEbuildsMap(overlays, package_list,
                                            options.force)

  with parallel.Manager() as manager:
    # Contains the array of packages we actually revved.
    revved_packages = manager.list()
    new_package_atoms = manager.list()
    new_ebuild_files = manager.list()
    removed_ebuild_files = manager.list()

    inputs = [[manifest, [overlay], overlay_ebuilds, revved_packages,
               new_package_atoms, new_ebuild_files, removed_ebuild_files]
              for overlay in overlays]
    parallel.RunTasksInProcessPool(_UprevOverlays, inputs)

    if options.chroot and os.path.exists(options.chroot):
      CleanStalePackages(options.boards or [], new_package_atoms,
                         options.chroot)

    if options.dump_files:
      osutils.WriteFile('/tmp/revved_packages', '\n'.join(revved_packages))
      osutils.WriteFile('/tmp/new_ebuild_files', '\n'.join(new_ebuild_files))
      osutils.WriteFile('/tmp/removed_ebuild_files',
                        '\n'.join(removed_ebuild_files))
Exemple #16
0
 def setUpClass(cls):
     assert cros_build_lib.IsInsideChroot()
     logging.info('Generating board configs.')
     board_keys = binhost.GetAllImportantBoardKeys(cls.site_config)
     boards = set(key.board for key in board_keys)
     inputs = [[board, not cls.CACHING, False] for board in boards]
     parallel.RunTasksInProcessPool(binhost.GenConfigsForBoard, inputs)
     fetcher = binhost.CompatIdFetcher(caching=cls.CACHING)
     cls.COMPAT_IDS = fetcher.FetchCompatIds(list(board_keys))
Exemple #17
0
 def GetModificationTimes(self):
   """Get the latest modification time for each of the queued keys."""
   parallel.RunTasksInProcessPool(self._EnqueueModificationTime, self._tasks)
   mtimes = {}
   try:
     while True:
       key, mtime = self._result_queue.get_nowait()
       mtimes[key] = max((mtimes.get(key, 0), mtime))
   except Queue.Empty:
     return mtimes
def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)
    options.Freeze()

    snapshot_ref = options.snapshot_ref
    if snapshot_ref and not snapshot_ref.startswith('refs/'):
        snapshot_ref = BRANCH_REF_PREFIX + snapshot_ref

    repo = repo_util.Repository.Find(options.repo_path)
    if repo is None:
        cros_build_lib.Die('No repo found in --repo_path %r.',
                           options.repo_path)

    manifest = repo.Manifest(revision_locked=True)
    projects = list(manifest.Projects())

    # Check if projects need snapshots (in parallel).
    needs_snapshot_results = parallel.RunTasksInProcessPool(
        _NeedsSnapshot, [(repo.root, x) for x in projects])

    # Group snapshot-needing projects by project name.
    snapshot_projects = {}
    for project, needs_snapshot in zip(projects, needs_snapshot_results):
        if needs_snapshot:
            snapshot_projects.setdefault(project.name, []).append(project)

    if snapshot_projects and not snapshot_ref:
        cros_build_lib.Die('Some project(s) need snapshot refs but no '
                           '--snapshot-ref specified.')

    # Push snapshot refs (in parallel).
    with parallel.BackgroundTaskRunner(_GitPushProjectUpstream,
                                       repo.root,
                                       dry_run=options.dry_run,
                                       processes=options.jobs) as queue:
        for projects in snapshot_projects.values():
            # Since some projects (e.g. chromiumos/third_party/kernel) are checked out
            # multiple places, we may need to push each checkout to a unique ref.
            need_unique_refs = len(projects) > 1
            used_refs = set()
            for project in projects:
                if need_unique_refs:
                    ref = _MakeUniqueRef(project, snapshot_ref, used_refs)
                else:
                    ref = snapshot_ref
                # Update the upstream ref both for the push and the output XML.
                project.upstream = ref
                queue.put([project])

    dest = options.output_file
    if dest is None or dest == '-':
        dest = sys.stdout

    manifest.Write(dest)
Exemple #19
0
    def FetchCompatIds(self, board_keys):
        """Generate a dict mapping BoardKeys to their associated CompatId.

    Args:
      board_keys: A list of BoardKey objects to fetch.
    """
        # pylint: disable=method-hidden
        logging.info('Fetching CompatId objects...')
        with parallel.Manager() as manager:
            self.compat_ids = manager.dict()
            parallel.RunTasksInProcessPool(self._FetchCompatId, board_keys)
            return dict(self.compat_ids)
Exemple #20
0
  def GetMultipleChangeDetail(self, changes):
    """Query the gerrit server for multiple changes using GetChangeDetail.

    Args:
      changes: A sequence of gerrit change numbers.

    Returns:
      A list of the raw output of GetChangeDetail.
    """
    inputs = [[change] for change in changes]
    return parallel.RunTasksInProcessPool(self.GetChangeDetail, inputs,
                                          processes=self._NUM_PROCESSES)
def _WorkOnPush(options, overlay_tracking_branch, git_project_overlays):
    """Push uprevs of overlays belonging to differet git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlay_tracking_branch: A dict mapping from each overlay to its tracking
      branch.
    git_project_overlays: A dict mapping from each git repository to a list of
      its overlays.
  """
    inputs = [[options, overlays_per_project, overlay_tracking_branch]
              for overlays_per_project in git_project_overlays.values()]
    parallel.RunTasksInProcessPool(_PushOverlays, inputs)
Exemple #22
0
def clean_stale_packages(new_package_atoms, build_targets, chroot=None):
    """Cleans up stale package info from a previous build."""
    if new_package_atoms:
        logging.info('Cleaning up stale packages %s.', new_package_atoms)

    chroot = chroot or Chroot()

    if cros_build_lib.IsOutsideChroot() and not chroot.exists():
        logging.warning('Unable to clean packages. No chroot to enter.')
        return

    # First unmerge all the packages for a board, then eclean it.
    # We need these two steps to run in order (unmerge/eclean),
    # but we can let all the boards run in parallel.
    def _do_clean_stale_packages(board):
        if board:
            suffix = '-' + board
            runcmd = cros_build_lib.run
        else:
            suffix = ''
            runcmd = cros_build_lib.sudo_run

        if cros_build_lib.IsOutsideChroot():
            # Setup runcmd with the chroot arguments once.
            runcmd = functools.partial(runcmd,
                                       enter_chroot=True,
                                       chroot_args=chroot.get_enter_args())

        emerge, eclean = 'emerge' + suffix, 'eclean' + suffix
        if not osutils.FindMissingBinaries([emerge, eclean]):
            if new_package_atoms:
                # If nothing was found to be unmerged, emerge will exit(1).
                result = runcmd([emerge, '-q', '--unmerge'] +
                                new_package_atoms,
                                extra_env={'CLEAN_DELAY': '0'},
                                check=False,
                                cwd=constants.SOURCE_ROOT)
                if result.returncode not in (0, 1):
                    raise cros_build_lib.RunCommandError(
                        'unexpected error', result)

            runcmd([eclean, '-d', 'packages'],
                   cwd=constants.SOURCE_ROOT,
                   capture_output=True)

    tasks = []
    for build_target in build_targets:
        tasks.append([build_target.name])
    tasks.append([None])

    parallel.RunTasksInProcessPool(_do_clean_stale_packages, tasks)
Exemple #23
0
 def testExceptionRaising(self):
     self.StartPatcher(BackgroundTaskVerifier())
     for fn in (self._SystemExit, self._KeyboardInterrupt):
         for task in (lambda: parallel.RunTasksInProcessPool(fn, [[]]),
                      lambda: parallel.RunParallelSteps([fn])):
             output_str = ex_str = None
             with self.OutputCapturer() as capture:
                 try:
                     task()
                 except parallel.BackgroundFailure as ex:
                     output_str = capture.GetStdout()
                     ex_str = str(ex)
             self.assertTrue('Traceback' in ex_str)
             self.assertEqual(output_str, _GREETING)
Exemple #24
0
    def uprev(self, package_list=None, force=False):
        """Uprev ebuilds.

    Uprev ebuilds for the packages in package_list. If package_list is not
    specified, uprevs all ebuilds for overlays in self.overlays.

    Args:
      package_list (list[str]): A list of packages to uprev.
      force: Boolean indicating whether or not to consider blacklisted ebuilds.
    """
        # Use all found packages if an explicit package_list is not given.
        use_all = not bool(package_list)
        self._populate_overlay_ebuilds(use_all=use_all,
                                       package_list=package_list,
                                       force=force)

        with parallel.Manager() as manager:
            # Contains the list of packages we actually revved.
            self._revved_packages = manager.list()
            # The new package atoms for cleanup.
            self._new_package_atoms = manager.list()
            # The list of added ebuild files.
            self._new_ebuild_files = manager.list()
            # The list of removed ebuild files.
            self._removed_ebuild_files = manager.list()

            inputs = [[overlay] for overlay in self.overlays]
            parallel.RunTasksInProcessPool(self._uprev_overlay, inputs)

            self._revved_packages = list(self._revved_packages)
            self._new_package_atoms = list(self._new_package_atoms)
            self._new_ebuild_files = list(self._new_ebuild_files)
            self._removed_ebuild_files = list(self._removed_ebuild_files)

        self._clean_stale_packages()

        if self.output_dir and os.path.exists(self.output_dir):
            # Write out dumps of the results. This is largely meant for sanity
            # checking results.
            osutils.WriteFile(os.path.join(self.output_dir, 'revved_packages'),
                              '\n'.join(self._revved_packages))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'new_package_atoms'),
                '\n'.join(self._new_package_atoms))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'new_ebuild_files'),
                '\n'.join(self._new_ebuild_files))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'removed_ebuild_files'),
                '\n'.join(self._removed_ebuild_files))
Exemple #25
0
    def FetchChanges(self, changes, manifest=None):
        """Fetch the specified changes, if needed.

    If we're an external builder, internal changes are filtered out.

    Args:
      changes: A list of changes to fetch.
      manifest: A ManifestCheckout instance representing what we're working on.

    Returns:
      A list of the filtered changes and a list of
      cros_patch.ChangeNotInManifest instances for changes not in manifest.
    """
        by_repo = {}
        changes_to_fetch = []
        not_in_manifest = []
        for change in changes:
            try:
                self._helper_pool.ForChange(change)
            except GerritHelperNotAvailable:
                # Internal patches are irrelevant to external builders.
                logging.info('Skipping internal patch: %s', change)
                continue

            repo = None
            try:
                repo = self.GetGitRepoForChange(change,
                                                strict=True,
                                                manifest=manifest)
            except cros_patch.ChangeNotInManifest as e:
                logging.info("Skipping patch %s as it's not in manifest.",
                             change)
                not_in_manifest.append(e)
                continue

            by_repo.setdefault(repo, []).append(change)
            changes_to_fetch.append(change)

        # Fetch changes in parallel. The change.Fetch() method modifies the
        # 'change' object, so make sure we grab all of that information.
        with parallel.Manager() as manager:
            fetched_changes = manager.dict()

            fetch_repo = functools.partial(_FetchChangesForRepo,
                                           fetched_changes, by_repo)
            parallel.RunTasksInProcessPool(fetch_repo,
                                           [[repo] for repo in by_repo])

            return [fetched_changes[c.id]
                    for c in changes_to_fetch], not_in_manifest
Exemple #26
0
  def GetMultipleChangeDetail(self, changes, verbose=False):
    """Query the gerrit server for multiple changes using GetChangeDetail.

    Args:
      changes: A sequence of gerrit change numbers.
      verbose: (optional) Whether to return more properties of the change

    Returns:
      A list of the raw output of GetChangeDetail.
    """
    inputs = [[change] for change in changes]
    return parallel.RunTasksInProcessPool(
        lambda c: self.GetChangeDetail(c, verbose=verbose),
        inputs, processes=self._NUM_PROCESSES)
  def _VerifyExceptionRaised(self, fn, exc_type):
    """A helper function to verify the correct |exc_type| is raised."""
    for task in (lambda: parallel.RunTasksInProcessPool(fn, [[]]),
                 lambda: parallel.RunParallelSteps([fn])):
      output_str = ex_str = ex = None
      with self.OutputCapturer() as capture:
        with self.assertRaises(parallel.BackgroundFailure) as ex:
          task()
        output_str = capture.GetStdout()
        ex_str = str(ex.exception)

      self.assertTrue(exc_type in [x.type for x in ex.exception.exc_infos])
      self.assertEqual(output_str, _GREETING)
      self.assertTrue(str(exc_type) in ex_str)
Exemple #28
0
def CleanStalePackages(boards, package_atoms, chroot):
    """Cleans up stale package info from a previous build.

  Args:
    boards: Boards to clean the packages from.
    package_atoms: A list of package atoms to unmerge.
    chroot (str): The chroot path.
  """
    if package_atoms:
        logging.info('Cleaning up stale packages %s.', package_atoms)

    # First unmerge all the packages for a board, then eclean it.
    # We need these two steps to run in order (unmerge/eclean),
    # but we can let all the boards run in parallel.
    def _CleanStalePackages(board):
        if board:
            suffix = '-' + board
            runcmd = cros_build_lib.run
        else:
            suffix = ''
            runcmd = cros_build_lib.sudo_run

        chroot_args = ['--chroot', chroot] if chroot else None
        emerge, eclean = 'emerge' + suffix, 'eclean' + suffix
        if not osutils.FindMissingBinaries([emerge, eclean]):
            if package_atoms:
                # If nothing was found to be unmerged, emerge will exit(1).
                result = runcmd([emerge, '-q', '--unmerge'] +
                                list(package_atoms),
                                enter_chroot=True,
                                chroot_args=chroot_args,
                                extra_env={'CLEAN_DELAY': '0'},
                                error_code_ok=True,
                                cwd=constants.SOURCE_ROOT)
                if result.returncode not in (0, 1):
                    raise cros_build_lib.RunCommandError(
                        'unexpected error', result)
            runcmd([eclean, '-d', 'packages'],
                   cwd=constants.SOURCE_ROOT,
                   enter_chroot=True,
                   chroot_args=chroot_args,
                   redirect_stdout=True,
                   redirect_stderr=True)

    tasks = []
    for board in boards:
        tasks.append([board])
    tasks.append([None])

    parallel.RunTasksInProcessPool(_CleanStalePackages, tasks)
Exemple #29
0
def main(argv):
    """Standard main routine.

    @param argv  Command line arguments including `sys.argv[0]`.

    """
    def balancer(i, board, pool):
      """Balance the specified board.

      @param i The index of the board.
      @param board The board name.
      @param pool The pool to rebalance for the board.
      """
      if i > 0:
          _log_message('')
      _balance_board(arguments, afe, board, pool, start_time, end_time)

    arguments = _parse_command(argv)
    end_time = time.time()
    start_time = end_time - 24 * 60 * 60
    afe = frontend.AFE(server=None)
    boards = arguments.boards
    pools = (lab_inventory.CRITICAL_POOLS
             if arguments.pool == _ALL_CRITICAL_POOLS
             else [arguments.pool])
    board_info = []
    if arguments.all_boards:
        inventory = lab_inventory.get_inventory(afe)
        for pool in pools:
            if _too_many_broken_boards(inventory, pool, arguments):
                _log_error('Refusing to balance all boards for %s pool, '
                           'too many boards with at least 1 broken DUT '
                           'detected.', pool)
            else:
                boards_in_pool = inventory.get_managed_boards(pool=pool)
                current_len_board_info = len(board_info)
                board_info.extend([(i + current_len_board_info, board, pool)
                                   for i, board in enumerate(boards_in_pool)])
    else:
        # We have specified boards with a specified pool, setup the args to the
        # balancer properly.
        for pool in pools:
            current_len_board_info = len(board_info)
            board_info.extend([(i + current_len_board_info, board, pool)
                               for i, board in enumerate(boards)])
    try:
        parallel.RunTasksInProcessPool(balancer, board_info, processes=8)
    except KeyboardInterrupt:
        pass
Exemple #30
0
    def _uprev_overlay(self, overlay):
        """Execute uprevs for an overlay.

    Args:
      overlay: The overlay to uprev.
    """
        if not os.path.isdir(overlay):
            logging.warning('Skipping %s, which is not a directory.', overlay)
            return

        ebuilds = self._overlay_ebuilds.get(overlay, [])
        if not ebuilds:
            return

        inputs = [[overlay, ebuild] for ebuild in ebuilds]
        parallel.RunTasksInProcessPool(self._uprev_ebuild, inputs)