Exemplo n.º 1
0
def _UprevOverlays(manifest, overlays, overlay_ebuilds, revved_packages,
                   new_package_atoms, new_ebuild_files, removed_ebuild_files):
    """Execute uprevs for overlays in sequence.

  Args:
    manifest: The manifest of the given source root.
    overlays: A list over overlays to commit.
    overlay_ebuilds: A dict mapping overlays to their ebuilds.
    revved_packages: A shared list of revved packages.
    new_package_atoms: A shared list of new package atoms.
    new_ebuild_files: New stable ebuild paths.
    removed_ebuild_files: Old ebuild paths that were removed.
  """
    for overlay in overlays:
        if not os.path.isdir(overlay):
            logging.warning('Skipping %s, which is not a directory.', overlay)
            continue

        ebuilds = overlay_ebuilds.get(overlay, [])
        if not ebuilds:
            continue

        with parallel.Manager() as manager:
            # Contains the array of packages we actually revved.
            messages = manager.list()

            inputs = [[
                overlay, ebuild, manifest, new_ebuild_files,
                removed_ebuild_files, messages, revved_packages,
                new_package_atoms
            ] for ebuild in ebuilds]
            parallel.RunTasksInProcessPool(_WorkOnEbuild, inputs)
 def testSigterm(self):
     """Tests that parallel.Manager() ignores SIGTERM."""
     with parallel.Manager() as manager:
         queue = manager.Queue()
         os.kill(manager._process.pid, signal.SIGTERM)
         with self.assertRaises(Queue.Empty):
             queue.get(block=False)
 def _ParallelHelloWorld(self):
     """Write 'hello world' to stdout using multiple processes."""
     with parallel.Manager() as manager:
         queue = manager.Queue()
         with parallel.BackgroundTaskRunner(self._HelloWorld, queue=queue):
             queue.put([])
             self.printed_hello.wait()
Exemplo n.º 4
0
    def testWorkOnEbuildWithNewPackage(self):
        """Test _WorkOnEbuild with new packages."""
        overlay = self._overlays[0]
        ebuild = EbuildMock('ebuild')

        with parallel.Manager() as manager:
            revved_packages = manager.list()
            new_package_atoms = manager.list()

            messages = manager.list()
            ebuild_paths_to_add = manager.list()
            ebuild_paths_to_remove = manager.list()

            cros_mark_as_stable._WorkOnEbuild(overlay, ebuild, self._manifest,
                                              self._commit_options,
                                              ebuild_paths_to_add,
                                              ebuild_paths_to_remove, messages,
                                              revved_packages,
                                              new_package_atoms)
            self.assertCountEqual(ebuild_paths_to_add, ['ebuild_new_ebuild'])
            self.assertCountEqual(ebuild_paths_to_remove,
                                  ['ebuild_old_ebuild'])
            self.assertCountEqual(
                messages,
                [cros_mark_as_stable._GIT_COMMIT_MESSAGE % 'ebuild_package'])
            self.assertCountEqual(revved_packages, ['ebuild_package'])
            self.assertCountEqual(new_package_atoms, ['=ebuild_new_package'])
    def testChildExits(self):
        """Create a child and a grandchild. The child should die with the parent."""
        def GrandChild():
            parallel.ExitWithParent()
            time.sleep(9)

        def Child(queue):
            grand_child = multiprocessing.Process(target=GrandChild)
            grand_child.start()
            queue.put(grand_child.pid)
            time.sleep(9)

        with parallel.Manager() as manager:
            q = manager.Queue()
            child = multiprocessing.Process(target=lambda: Child(q))
            child.start()
            grand_child_pid = q.get(timeout=1)

        # Before we kill the child, the grandchild should be running:
        self.assertTrue(os.path.isdir('/proc/%d' % grand_child_pid))
        os.kill(child.pid, signal.SIGKILL)

        # (shortly) after we kill the child, the grandchild should kill itself.
        # We can't use os.waitpid because the grandchild process is not a child
        # process of ours. Just wait 20 seconds - this should be enough even if the
        # machine is under load.
        timeout_util.WaitForReturnTrue(
            lambda: not os.path.isdir('/proc/%d' % grand_child_pid),
            20,
            period=0.05)
Exemplo n.º 6
0
def _CreateTsMonFlushingProcess(options):
    """Creates a separate process to flush ts_mon metrics.

  Useful for multiprocessing scenarios where we don't want multiple ts-mon
  threads send contradictory metrics. Instead, functions in
  chromite.lib.metrics will send their calls to a Queue, which is consumed by a
  dedicated flushing process.

  Args:
    options: An argparse options object to configure ts-mon with.

  Side effects:
    Sets chromite.lib.metrics.MESSAGE_QUEUE, which causes the metric functions
    to send their calls to the Queue instead of creating the metrics.
  """
    # If this is nested, we don't need to create another queue and another
    # message consumer. Do nothing to continue to use the existing queue.
    if metrics.MESSAGE_QUEUE or metrics.FLUSHING_PROCESS:
        return

    with parallel.Manager() as manager:
        message_q = manager.Queue()

        metrics.FLUSHING_PROCESS = multiprocessing.Process(
            target=lambda: _SetupAndConsumeMessages(message_q, options))
        metrics.FLUSHING_PROCESS.start()

        # this makes the chromite.lib.metric functions use the queue.
        # note - we have to do this *after* forking the ConsumeMessages process.
        metrics.MESSAGE_QUEUE = message_q

        try:
            yield message_q
        finally:
            _CleanupMetricsFlushingProcess()
Exemplo n.º 7
0
    def setUp(self):
        # List of all stages that would have been called as part of this run.
        self.called_stages = []

        # Simple new function that redirects RunStage to record all stages to be
        # run rather than mock them completely. These can be used in a test to
        # assert something has been called.
        def run_stage(_class_instance, stage_name, *_args, **_kwargs):
            self.called_stages.append(stage_name)

        # Parallel version.
        def run_parallel_stages(_class_instance, stages_list):
            for stage in stages_list:
                self.called_stages.append(type(stage))

        self.buildroot = os.path.join(self.tempdir, 'buildroot')
        chroot_path = os.path.join(self.buildroot,
                                   constants.DEFAULT_CHROOT_DIR)
        osutils.SafeMakedirs(os.path.join(chroot_path, 'tmp'))

        # Parallel mock is used since some work in simple builders get done on a
        # separate process.
        self.StartPatcher(parallel_unittest.ParallelMock())
        self.PatchObject(generic_builders.Builder, '_RunStage', new=run_stage)
        self.PatchObject(simple_builders.SimpleBuilder,
                         '_RunParallelStages',
                         new=run_parallel_stages)
        self.PatchObject(cbuildbot_run._BuilderRunBase,
                         'GetVersion',
                         return_value='R32-1234.0.0')

        self._manager = parallel.Manager()
        self._manager.__enter__()
Exemplo n.º 8
0
def _WorkOnCommit(options, overlays, manifest, package_list):
  """Commit uprevs of overlays belonging to different git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    manifest: The manifest of the given source root.
    package_list: A list of packages passed from commandline to work on.
  """
  overlay_ebuilds = _GetOverlayToEbuildsMap(overlays, package_list,
                                            options.force)

  with parallel.Manager() as manager:
    # Contains the array of packages we actually revved.
    revved_packages = manager.list()
    new_package_atoms = manager.list()
    new_ebuild_files = manager.list()
    removed_ebuild_files = manager.list()

    inputs = [[manifest, [overlay], overlay_ebuilds, revved_packages,
               new_package_atoms, new_ebuild_files, removed_ebuild_files]
              for overlay in overlays]
    parallel.RunTasksInProcessPool(_UprevOverlays, inputs)

    if options.chroot and os.path.exists(options.chroot):
      CleanStalePackages(options.boards or [], new_package_atoms,
                         options.chroot)

    if options.dump_files:
      osutils.WriteFile('/tmp/revved_packages', '\n'.join(revved_packages))
      osutils.WriteFile('/tmp/new_ebuild_files', '\n'.join(new_ebuild_files))
      osutils.WriteFile('/tmp/removed_ebuild_files',
                        '\n'.join(removed_ebuild_files))
Exemplo n.º 9
0
def _WorkOnCommit(options, overlays, overlay_tracking_branch,
                  git_project_overlays, manifest, package_list):
    """Commit uprevs of overlays belonging to different git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    overlay_tracking_branch: A dict mapping from each overlay to its tracking
      branch.
    git_project_overlays: A dict mapping from each git repository to a list of
      its overlays.
    manifest: The manifest of the given source root.
    package_list: A list of packages passed from commandline to work on.
  """
    overlay_ebuilds = _GetOverlayToEbuildsMap(options, overlays, package_list)

    with parallel.Manager() as manager:
        # Contains the array of packages we actually revved.
        revved_packages = manager.list()
        new_package_atoms = manager.list()

        inputs = [[
            options, manifest, overlays_per_project, overlay_tracking_branch,
            overlay_ebuilds, revved_packages, new_package_atoms
        ] for overlays_per_project in git_project_overlays.itervalues()]
        parallel.RunTasksInProcessPool(_CommitOverlays, inputs)

        chroot_path = os.path.join(options.srcroot,
                                   constants.DEFAULT_CHROOT_DIR)
        if os.path.exists(chroot_path):
            CleanStalePackages(options.srcroot, options.boards.split(':'),
                               new_package_atoms)
        if options.drop_file:
            osutils.WriteFile(options.drop_file, ' '.join(revved_packages))
Exemplo n.º 10
0
    def setUp(self):
        self.buildroot = os.path.join(self.tempdir, 'buildroot')
        osutils.SafeMakedirs(self.buildroot)
        # Always stub RunCommmand out as we use it in every method.
        self.site_config = config_lib_unittest.MockSiteConfig()
        self.build_config = config_lib_unittest.MockBuildConfig()
        self.bot_id = self.build_config.name
        self.build_config['master'] = False
        self.build_config['important'] = False

        # Use the cbuildbot parser to create properties and populate default values.
        self.parser = cbuildbot._CreateParser()

        argv = ['-r', self.buildroot, '--buildbot', '--debug', self.bot_id]
        self.options, _ = cbuildbot._ParseCommandLine(self.parser, argv)
        self.options.bootstrap = False
        self.options.clean = False
        self.options.resume = False
        self.options.sync = False
        self.options.build = False
        self.options.uprev = False
        self.options.tests = False
        self.options.archive = False
        self.options.remote_test_status = False
        self.options.patches = None
        self.options.prebuilts = False

        self._manager = parallel.Manager()
        self._manager.__enter__()
        self.run = cbuildbot_run.BuilderRun(self.options, self.site_config,
                                            self.build_config, self._manager)

        self.rc.AddCmdResult(
            [constants.PATH_TO_CBUILDBOT, '--reexec-api-version'],
            output=constants.REEXEC_API_VERSION)
    def _initConfig(self,
                    bot_id,
                    master=False,
                    extra_argv=None,
                    override_hw_test_config=None,
                    models=None):
        """Return normal options/build_config for |bot_id|"""
        site_config = config_lib.GetConfig()
        build_config = copy.deepcopy(site_config[bot_id])
        build_config['master'] = master
        build_config['important'] = False
        if models:
            build_config['models'] = models

        # Use the cbuildbot parser to create properties and populate default values.
        parser = cbuildbot._CreateParser()
        argv = (
            ['-r', self.buildroot, '--buildbot', '--debug', '--nochromesdk'] +
            (extra_argv if extra_argv else []) + [bot_id])
        options = cbuildbot.ParseCommandLine(parser, argv)

        # Yikes.
        options.managed_chrome = build_config['sync_chrome']

        # Iterate through override and update HWTestConfig attributes.
        if override_hw_test_config:
            for key, val in override_hw_test_config.items():
                for hw_test in build_config.hw_tests:
                    setattr(hw_test, key, val)

        return cbuildbot_run.BuilderRun(options, site_config, build_config,
                                        parallel.Manager())
Exemplo n.º 12
0
def singleton_manager(monkeypatch):
  """Force tests to use a singleton Manager and automatically clean it up."""
  m = parallel.Manager()

  def our_manager():
    return m

  monkeypatch.setattr(parallel, 'Manager', our_manager)
  yield
  m.shutdown()
Exemplo n.º 13
0
    def FetchCompatIds(self, board_keys):
        """Generate a dict mapping BoardKeys to their associated CompatId.

    Args:
      board_keys: A list of BoardKey objects to fetch.
    """
        # pylint: disable=method-hidden
        logging.info('Fetching CompatId objects...')
        with parallel.Manager() as manager:
            self.compat_ids = manager.dict()
            parallel.RunTasksInProcessPool(self._FetchCompatId, board_keys)
            return dict(self.compat_ids)
Exemplo n.º 14
0
  def setUp(self):
    # Prepare a fake build root in self.tempdir, save at self.build_root.
    self.build_root = os.path.join(self.tempdir, self.BUILDROOT)
    osutils.SafeMakedirs(os.path.join(self.build_root, '.repo'))

    self._manager = parallel.Manager()
    self._manager.__enter__()

    # These are here to make pylint happy.  Values filled in by _Prepare.
    self._bot_id = None
    self._current_board = None
    self._boards = None
    self._run = None
Exemplo n.º 15
0
    def uprev(self, package_list=None, force=False):
        """Uprev ebuilds.

    Uprev ebuilds for the packages in package_list. If package_list is not
    specified, uprevs all ebuilds for overlays in self.overlays.

    Args:
      package_list (list[str]): A list of packages to uprev.
      force: Boolean indicating whether or not to consider blacklisted ebuilds.
    """
        # Use all found packages if an explicit package_list is not given.
        use_all = not bool(package_list)
        self._populate_overlay_ebuilds(use_all=use_all,
                                       package_list=package_list,
                                       force=force)

        with parallel.Manager() as manager:
            # Contains the list of packages we actually revved.
            self._revved_packages = manager.list()
            # The new package atoms for cleanup.
            self._new_package_atoms = manager.list()
            # The list of added ebuild files.
            self._new_ebuild_files = manager.list()
            # The list of removed ebuild files.
            self._removed_ebuild_files = manager.list()

            inputs = [[overlay] for overlay in self.overlays]
            parallel.RunTasksInProcessPool(self._uprev_overlay, inputs)

            self._revved_packages = list(self._revved_packages)
            self._new_package_atoms = list(self._new_package_atoms)
            self._new_ebuild_files = list(self._new_ebuild_files)
            self._removed_ebuild_files = list(self._removed_ebuild_files)

        self._clean_stale_packages()

        if self.output_dir and os.path.exists(self.output_dir):
            # Write out dumps of the results. This is largely meant for sanity
            # checking results.
            osutils.WriteFile(os.path.join(self.output_dir, 'revved_packages'),
                              '\n'.join(self._revved_packages))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'new_package_atoms'),
                '\n'.join(self._new_package_atoms))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'new_ebuild_files'),
                '\n'.join(self._new_ebuild_files))
            osutils.WriteFile(
                os.path.join(self.output_dir, 'removed_ebuild_files'),
                '\n'.join(self._removed_ebuild_files))
Exemplo n.º 16
0
def SetupStats():
    """Prepare a given category to collect stats.

  This must be called BEFORE any new processes that might read or write to
  these stat values are created. It is safe to call this more than once,
  but most efficient to only make a single call.
  """
    # Pylint thinks our manager has no members.
    m = parallel.Manager()

    # pylint: disable=global-statement
    # Create a new stats collection structure that is multiprocess usable.
    global _STATS_COLLECTION
    _STATS_COLLECTION = m.list()
Exemplo n.º 17
0
    def FetchChanges(self, changes, manifest=None):
        """Fetch the specified changes, if needed.

    If we're an external builder, internal changes are filtered out.

    Args:
      changes: A list of changes to fetch.
      manifest: A ManifestCheckout instance representing what we're working on.

    Returns:
      A list of the filtered changes and a list of
      cros_patch.ChangeNotInManifest instances for changes not in manifest.
    """
        by_repo = {}
        changes_to_fetch = []
        not_in_manifest = []
        for change in changes:
            try:
                self._helper_pool.ForChange(change)
            except GerritHelperNotAvailable:
                # Internal patches are irrelevant to external builders.
                logging.info('Skipping internal patch: %s', change)
                continue

            repo = None
            try:
                repo = self.GetGitRepoForChange(change,
                                                strict=True,
                                                manifest=manifest)
            except cros_patch.ChangeNotInManifest as e:
                logging.info("Skipping patch %s as it's not in manifest.",
                             change)
                not_in_manifest.append(e)
                continue

            by_repo.setdefault(repo, []).append(change)
            changes_to_fetch.append(change)

        # Fetch changes in parallel. The change.Fetch() method modifies the
        # 'change' object, so make sure we grab all of that information.
        with parallel.Manager() as manager:
            fetched_changes = manager.dict()

            fetch_repo = functools.partial(_FetchChangesForRepo,
                                           fetched_changes, by_repo)
            parallel.RunTasksInProcessPool(fetch_repo,
                                           [[repo] for repo in by_repo])

            return [fetched_changes[c.id]
                    for c in changes_to_fetch], not_in_manifest
  def setUp(self):
    self.buildstore = FakeBuildStore()

    self.buildroot = os.path.join(self.tempdir, 'buildroot')
    self.workspace = os.path.join(self.tempdir, 'workspace')
    self.chroot_path = os.path.join(self.tempdir, 'chroot')

    self._manager = parallel.Manager()
    # Pylint-1.9 has a false positive on this for some reason.
    self._manager.__enter__()  # pylint: disable=no-value-for-parameter

    self.site_config = CreateMockSiteConfig()

    self.mock_run_stage = self.PatchObject(
        generic_builders.Builder, '_RunStage')
    def setUp(self):
        # Prepare a fake build root in self.tempdir, save at self.build_root.
        self.build_root = os.path.join(self.tempdir, self.BUILDROOT)
        osutils.SafeMakedirs(os.path.join(self.build_root, '.repo'))

        self._manager = parallel.Manager()
        # Pylint-1.9 has a false positive on this for some reason.
        self._manager.__enter__()  # pylint: disable=no-value-for-parameter

        # These are here to make pylint happy.  Values filled in by _Prepare.
        self._bot_id = None
        self._current_board = None
        self._boards = None
        self._run = None
        self._model = None
        self.buildstore = FakeBuildStore()
Exemplo n.º 20
0
def _CreateTsMonFlushingProcess(setup_args, setup_kwargs):
  """Creates a separate process to flush ts_mon metrics.

  Useful for multiprocessing scenarios where we don't want multiple ts-mon
  threads send contradictory metrics. Instead, functions in
  chromite.lib.metrics will send their calls to a Queue, which is consumed by a
  dedicated flushing process.

  Args:
    setup_args: Arguments sent to SetupTsMonGlobalState in the child process
    setup_kwargs: Keyword arguments sent to SetupTsMonGlobalState in the child
      process

  Side effects:
    Sets chromite.lib.metrics.MESSAGE_QUEUE, which causes the metric functions
    to send their calls to the Queue instead of creating the metrics.
  """
  # If this is nested, we don't need to create another queue and another
  # message consumer. Do nothing to continue to use the existing queue.
  if metrics.MESSAGE_QUEUE:
    return

  with parallel.Manager() as manager:
    message_q = manager.Queue()

    p = multiprocessing.Process(
        target=lambda: _ConsumeMessages(message_q, setup_args, setup_kwargs))
    p.start()

    # this makes the chromite.lib.metric functions use the queue.
    # note - we have to do this *after* forking the ConsumeMessages process.
    metrics.MESSAGE_QUEUE = message_q

    try:
      yield message_q
    finally:
      # Now that there is no longer a process to listen to the Queue, re-set it
      # to None so that any future metrics are created within this process.
      metrics.MESSAGE_QUEUE = None
      # Send the sentinal value for "flush one more time and exit".
      message_q.put(None)
      logging.info("Waiting for ts_mon flushing process to finish...")
      p.join(timeout=FLUSH_INTERVAL*2)
      if p.is_alive():
        p.terminate()
      if p.exitcode:
        logging.warning("ts_mon_config flushing process did not exit cleanly.")
Exemplo n.º 21
0
    def testEnqueue(self):
        """Test that _Indirect enqueues messages correctly."""
        metric = metrics.Boolean

        with parallel.Manager() as manager:
            q = manager.Queue()
            self.PatchObject(metrics, 'MESSAGE_QUEUE', q)

            proxy_metric = metric('foo')
            proxy_metric.example('arg1', 'arg2')

            message = q.get(timeout=10)

        self.assertEqual(
            message,
            metrics.MetricCall(metric.__name__, ('foo', ), {}, 'example',
                               ('arg1', 'arg2'), {}, False))
Exemplo n.º 22
0
  def setUp(self):
    # List of all stages that would have been called as part of this run.
    self.called_stages = []

    # Map from stage class to exception to be raised when stage is run.
    self.stage_exceptions = {}

    # VM test stages that are run by SimpleBuilder._RunVMTests.
    self.all_vm_test_stages = [vm_test_stages.VMTestStage,
                               tast_test_stages.TastVMTestStage]

    self.buildstore = FakeBuildStore()
    # Simple new function that redirects RunStage to record all stages to be
    # run rather than mock them completely. These can be used in a test to
    # assert something has been called.
    def run_stage(_class_instance, stage_name, *args, **_kwargs):
      # It's more useful to record the actual stage that's wrapped within
      # RepeatStage or RetryStage.
      if stage_name in [generic_stages.RepeatStage, generic_stages.RetryStage]:
        stage_name = args[1]

      self.called_stages.append(stage_name)
      if stage_name in self.stage_exceptions:
        raise self.stage_exceptions[stage_name]

    # Parallel version.
    def run_parallel_stages(_class_instance, *_args):
      # Since parallel stages are forked processes, we can't actually
      # update anything here unless we want to do interprocesses comms.
      pass

    self.buildroot = os.path.join(self.tempdir, 'buildroot')
    chroot_path = os.path.join(self.buildroot, constants.DEFAULT_CHROOT_DIR)
    osutils.SafeMakedirs(os.path.join(chroot_path, 'tmp'))

    self.PatchObject(generic_builders.Builder, '_RunStage',
                     new=run_stage)
    self.PatchObject(simple_builders.SimpleBuilder, '_RunParallelStages',
                     new=run_parallel_stages)
    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersion',
                     return_value='R32-1234.0.0')

    self._manager = parallel.Manager()
    # Pylint-1.9 has a false positive on this for some reason.
    self._manager.__enter__()  # pylint: disable=no-value-for-parameter
Exemplo n.º 23
0
    def setUp(self):
        # Always stub RunCommmand out as we use it in every method.
        self._bot_id = 'amd64-generic-paladin'
        self.buildstore = FakeBuildStore()
        site_config = config_lib_unittest.MockSiteConfig()
        build_config = site_config[self._bot_id]
        self.build_root = '/fake_root'
        # This test compares log output from the stages, so turn on buildbot
        # logging.
        logging.EnableBuildbotMarkers()

        self.db = fake_cidb.FakeCIDBConnection()
        cidb.CIDBConnectionFactory.SetupMockCidb(self.db)

        # Create a class to hold
        class Options(object):
            """Dummy class to hold option values."""

        options = Options()
        options.archive_base = 'gs://dontcare'
        options.buildroot = self.build_root
        options.debug = False
        options.prebuilts = False
        options.clobber = False
        options.nosdk = False
        options.remote_trybot = False
        options.latest_toolchain = False
        options.buildnumber = 1234
        options.android_rev = None
        options.chrome_rev = None
        options.branch = 'dontcare'
        options.chrome_root = False
        options.build_config_name = ''

        self._manager = parallel.Manager()
        # Pylint-1.9 has a false positive on this for some reason.
        self._manager.__enter__()  # pylint: disable=no-value-for-parameter

        self._run = cbuildbot_run.BuilderRun(options, site_config,
                                             build_config, self._manager)

        results_lib.Results.Clear()
Exemplo n.º 24
0
  def testArgs(self):
    """Test that we can pass args down to the task."""
    with parallel.Manager() as manager:
      results = manager.Queue()
      arg2s = set((1, 2, 3))
      with parallel.BackgroundTaskRunner(_BackgroundTaskRunnerArgs, results,
                                         'arg1', kwarg1='kwarg1') as queue:
        for arg2 in arg2s:
          queue.put((arg2,))

      # Since the queue is unordered, need to handle arg2 specially.
      result_arg2s = set()
      for _ in range(3):
        result = results.get()
        self.assertEqual(result[0], 'arg1')
        result_arg2s.add(result[1])
        self.assertEqual(result[2], 'kwarg1')
        self.assertEqual(result[3], None)
      self.assertEqual(arg2s, result_arg2s)
      self.assertEqual(results.empty(), True)
def _WorkOnCommit(options, overlays, overlay_tracking_branch,
                  git_project_overlays, manifest, package_list):
    """Commit uprevs of overlays belonging to different git projects in parallel.

  Args:
    options: The options object returned by the argument parser.
    overlays: A list of overlays to work on.
    overlay_tracking_branch: A dict mapping from each overlay to its tracking
      branch.
    git_project_overlays: A dict mapping from each git repository to a list of
      its overlays.
    manifest: The manifest of the given source root.
    package_list: A list of packages passed from commandline to work on.
  """
    # We cleaned up self referential ebuilds by this version, but don't enforce
    # the check on older ones to avoid breaking factory/firmware branches.
    root_version = manifest_version.VersionInfo.from_repo(options.buildroot)
    no_self_repos_version = manifest_version.VersionInfo('13099.0.0')
    reject_self_repo = root_version >= no_self_repos_version

    overlay_ebuilds = _GetOverlayToEbuildsMap(options, overlays, package_list)

    with parallel.Manager() as manager:
        # Contains the array of packages we actually revved.
        revved_packages = manager.list()
        new_package_atoms = manager.list()

        inputs = [[
            options, manifest, overlays_per_project, overlay_tracking_branch,
            overlay_ebuilds, revved_packages, new_package_atoms,
            reject_self_repo
        ] for overlays_per_project in git_project_overlays.values()]
        parallel.RunTasksInProcessPool(_CommitOverlays, inputs)

        chroot_path = os.path.join(options.buildroot,
                                   constants.DEFAULT_CHROOT_DIR)
        if os.path.exists(chroot_path):
            CleanStalePackages(options.buildroot, options.boards.split(':'),
                               new_package_atoms)
        if options.drop_file:
            osutils.WriteFile(options.drop_file, ' '.join(revved_packages))
Exemplo n.º 26
0
  def testEnqueue(self):
    """Test that _Indirect enqueues messages correctly."""
    metric = metrics.Boolean

    with parallel.Manager() as manager:
      q = manager.Queue()
      self.PatchObject(metrics, 'MESSAGE_QUEUE', q)

      proxy_metric = metric('foo')
      proxy_metric.example('arg1', {'field_name': 'value'})

      message = q.get(timeout=10)

    expected_metric_kwargs = {
        'field_spec': [ts_mon.StringField('field_name')],
        'description': 'No description.',
    }
    self.assertEqual(
        message,
        metrics.MetricCall(metric.__name__, ('foo',), expected_metric_kwargs,
                           'example', ('arg1', {'field_name': 'value'}), {},
                           False))
Exemplo n.º 27
0
    def testWorkOnEbuildWithoutNewPackage(self):
        """Test _WorkOnEbuild without new packages."""
        ebuild = EbuildMock('ebuild', new_package=False)
        overlay = self._overlays[0]

        with parallel.Manager() as manager:
            revved_packages = manager.list()
            new_package_atoms = manager.list()

            messages = manager.list()
            ebuild_paths_to_add = manager.list()
            ebuild_paths_to_remove = manager.list()

            cros_mark_as_stable._WorkOnEbuild(overlay, ebuild, self._manifest,
                                              self._commit_options,
                                              ebuild_paths_to_add,
                                              ebuild_paths_to_remove, messages,
                                              revved_packages,
                                              new_package_atoms)
            self.assertEqual(list(ebuild_paths_to_add), [])
            self.assertEqual(list(ebuild_paths_to_remove), [])
            self.assertEqual(list(messages), [])
            self.assertEqual(list(revved_packages), [])
            self.assertEqual(list(new_package_atoms), [])
Exemplo n.º 28
0
    def setUp(self):
        self._manager = parallel.Manager()

        # Mimic entering a 'with' statement.
        self._manager.__enter__()
def _CommitOverlays(options,
                    manifest,
                    overlays,
                    overlay_tracking_branch,
                    overlay_ebuilds,
                    revved_packages,
                    new_package_atoms,
                    reject_self_repo=True):
    """Commit uprevs for overlays in sequence.

  Args:
    options: The options object returned by the argument parser.
    manifest: The manifest of the given source root.
    overlays: A list over overlays to commit.
    overlay_tracking_branch: A dict mapping from each overlay to its tracking
      branch.
    overlay_ebuilds: A dict mapping overlays to their ebuilds.
    revved_packages: A shared list of revved packages.
    new_package_atoms: A shared list of new package atoms.
    reject_self_repo: Whether to abort if the ebuild lives in the same git
        repo as it is tracking for uprevs.
  """
    for overlay in overlays:
        if not os.path.isdir(overlay):
            logging.warning('Skipping %s, which is not a directory.', overlay)
            continue

        # Note we intentionally work from the non push tracking branch;
        # everything built thus far has been against it (meaning, http mirrors),
        # thus we should honor that.  During the actual push, the code switches
        # to the correct urls, and does an appropriate rebasing.
        tracking_branch = overlay_tracking_branch[overlay]

        existing_commit = git.GetGitRepoRevision(overlay)

        # Make sure we run in the top-level git directory in case we are
        # adding/removing an overlay in existing_commit.
        git_root = git.FindGitTopLevel(overlay)
        if git_root is None:
            cros_build_lib.Die('No git repo at overlay directory %s.', overlay)

        work_branch = GitBranch(constants.STABLE_EBUILD_BRANCH,
                                tracking_branch,
                                cwd=git_root)
        work_branch.CreateBranch()
        if not work_branch.Exists():
            cros_build_lib.Die('Unable to create stabilizing branch in %s' %
                               overlay)

        # In the case of uprevving overlays that have patches applied to them,
        # include the patched changes in the stabilizing branch.
        git.RunGit(git_root, ['rebase', existing_commit])

        ebuilds = overlay_ebuilds.get(overlay, [])
        if ebuilds:
            with parallel.Manager() as manager:
                # Contains the array of packages we actually revved.
                messages = manager.list()
                ebuild_paths_to_add = manager.list()
                ebuild_paths_to_remove = manager.list()

                inputs = [[
                    overlay, ebuild, manifest, options, ebuild_paths_to_add,
                    ebuild_paths_to_remove, messages, revved_packages,
                    new_package_atoms, reject_self_repo
                ] for ebuild in ebuilds]
                parallel.RunTasksInProcessPool(_WorkOnEbuild, inputs)

                if ebuild_paths_to_add:
                    logging.info(
                        'Adding new stable ebuild paths %s in overlay %s.',
                        ebuild_paths_to_add, overlay)
                    git.RunGit(overlay, ['add'] + list(ebuild_paths_to_add))

                if ebuild_paths_to_remove:
                    logging.info('Removing old ebuild paths %s in overlay %s.',
                                 ebuild_paths_to_remove, overlay)
                    git.RunGit(overlay,
                               ['rm', '-f'] + list(ebuild_paths_to_remove))

                if messages:
                    portage_util.EBuild.CommitChange('\n\n'.join(messages),
                                                     overlay)
Exemplo n.º 30
0
    def setUp(self):
        self._manager = parallel.Manager()

        # Mimic entering a 'with' statement.
        # Pylint-1.9 has a false positive on this for some reason.
        self._manager.__enter__()  # pylint: disable=no-value-for-parameter