class LoaderTest(unittest.TestCase):
  def setUp(self):
    self.build_configuration = BuildConfiguration()
    self.working_set = WorkingSet()
    for entry in working_set.entries:
      self.working_set.add_entry(entry)

  def tearDown(self):
    Goal.clear()

  @contextmanager
  def create_register(self, build_file_aliases=None, register_goals=None, global_subsystems=None, module_name='register'):

    package_name = b'__test_package_{0}'.format(uuid.uuid4().hex)
    self.assertFalse(package_name in sys.modules)

    package_module = types.ModuleType(package_name)
    sys.modules[package_name] = package_module
    try:
      register_module_fqn = b'{0}.{1}'.format(package_name, module_name)
      register_module = types.ModuleType(register_module_fqn)
      setattr(package_module, module_name, register_module)
      sys.modules[register_module_fqn] = register_module

      def register_entrypoint(function_name, function):
        if function:
          setattr(register_module, function_name, function)

      register_entrypoint('build_file_aliases', build_file_aliases)
      register_entrypoint('global_subsystems', global_subsystems)
      register_entrypoint('register_goals', register_goals)

      yield package_name
    finally:
      del sys.modules[package_name]

  def assert_empty_aliases(self):
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(0, len(registered_aliases.targets))
    self.assertEqual(0, len(registered_aliases.objects))
    self.assertEqual(0, len(registered_aliases.context_aware_object_factories))
    self.assertEqual(self.build_configuration.subsystem_types(), set())

  def test_load_valid_empty(self):
    with self.create_register() as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()

  def test_load_valid_partial_aliases(self):
    aliases = BuildFileAliases.create(targets={'bob': DummyTarget},
                                      objects={'obj1': DummyObject1,
                                               'obj2': DummyObject2})
    with self.create_register(build_file_aliases=lambda: aliases) as backend_package:
      load_backend(self.build_configuration, backend_package)
      registered_aliases = self.build_configuration.registered_aliases()
      self.assertEqual(DummyTarget, registered_aliases.targets['bob'])
      self.assertEqual(DummyObject1, registered_aliases.objects['obj1'])
      self.assertEqual(DummyObject2, registered_aliases.objects['obj2'])
      self.assertEqual(self.build_configuration.subsystem_types(),
                       set([DummySubsystem1, DummySubsystem2]))

  def test_load_valid_partial_goals(self):
    def register_goals():
      Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask))

    with self.create_register(register_goals=register_goals) as backend_package:
      Goal.clear()
      self.assertEqual(0, len(Goal.all()))

      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()
      self.assertEqual(1, len(Goal.all()))

      task_names = Goal.by_name('jack').ordered_task_names()
      self.assertEqual(1, len(task_names))

      task_name = task_names[0]
      self.assertEqual('jill', task_name)

  def test_load_invalid_entrypoint(self):
    def build_file_aliases(bad_arg):
      return BuildFileAliases.create()

    with self.create_register(build_file_aliases=build_file_aliases) as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_invalid_module(self):
    with self.create_register(module_name='register2') as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_missing_plugin(self):
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['Foobar'])


  def get_mock_plugin(self, name, version, reg=None, alias=None, after=None):
    """Make a fake Distribution (optionally with entry points)

    Note the entry points do not actually point to code in the returned distribution --
    the distribution does not even have a location and does not contain any code, just metadata.

    A module is synthesized on the fly and installed into sys.modules under a random name.
    If optional entry point callables are provided, those are added as methods to the module and
    their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked
    metadata added to the returned dist.

    :param str name: project_name for distribution (see pkg_resources)
    :param str version: version for distribution (see pkg_resources)
    :param callable reg: Optional callable for goal registration entry point
    :param callable alias: Optional callable for build_file_aliases entry point
    :param callable after: Optional callable for load_after list entry point
    """

    plugin_pkg = b'demoplugin{0}'.format(uuid.uuid4().hex)
    pkg = types.ModuleType(plugin_pkg)
    sys.modules[plugin_pkg] = pkg
    module_name = b'{0}.{1}'.format(plugin_pkg, 'demo')
    plugin = types.ModuleType(module_name)
    setattr(pkg, 'demo', plugin)
    sys.modules[module_name] = plugin

    metadata = {}
    entry_lines = []

    if reg is not None:
      setattr(plugin, 'foo', reg)
      entry_lines.append('register_goals = {}:foo\n'.format(module_name))

    if alias is not None:
      setattr(plugin, 'bar', alias)
      entry_lines.append('build_file_aliases = {}:bar\n'.format(module_name))

    if after is not None:
      setattr(plugin, 'baz', after)
      entry_lines.append('load_after = {}:baz\n'.format(module_name))

    if entry_lines:
      entry_data = '[pantsbuild.plugin]\n{}\n'.format('\n'.join(entry_lines))
      metadata = {'entry_points.txt': entry_data}

    return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata))

  def load_plugins(self, plugins):
    load_plugins(self.build_configuration, plugins, load_from=self.working_set)

  def test_plugin_load_and_order(self):
    d1 = self.get_mock_plugin('demo1', '0.0.1', after=lambda: ['demo2'])
    d2 = self.get_mock_plugin('demo2', '0.0.3')
    self.working_set.add(d1)

    # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2'].
    with self.assertRaises(PluginLoadOrderError):
      self.load_plugins(['demo1', 'demo2'])

    # Attempting to load 'demo2' first should fail as it is not (yet) installed.
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['demo2', 'demo1'])

    # Installing demo2 and then loading in correct order should work though.
    self.working_set.add(d2)
    self.load_plugins(['demo2>=0.0.2', 'demo1'])

    # But asking for a bad (not installed) version fails.
    with self.assertRaises(VersionConflict):
      self.load_plugins(['demo2>=0.0.5'])

  def test_plugin_installs_goal(self):
    def reg_goal():
      Goal.by_name('plugindemo').install(TaskRegistrar('foo', DummyTask))
    self.working_set.add(self.get_mock_plugin('regdemo', '0.0.1', reg=reg_goal))

    # Start without the custom goal.
    self.assertEqual(0, len(Goal.by_name('plugindemo').ordered_task_names()))

    # Load plugin which registers custom goal.
    self.load_plugins(['regdemo'])

    # Now the custom goal exists.
    self.assertEqual(1, len(Goal.by_name('plugindemo').ordered_task_names()))
    self.assertEqual('foo', Goal.by_name('plugindemo').ordered_task_names()[0])

  def test_plugin_installs_alias(self):
    def reg_alias():
      return BuildFileAliases.create(targets={'pluginalias': DummyTarget},
                                     objects={'FROMPLUGIN1': DummyObject1,
                                              'FROMPLUGIN2': DummyObject2})
    self.working_set.add(self.get_mock_plugin('aliasdemo', '0.0.1', alias=reg_alias))

    # Start with no aliases.
    self.assert_empty_aliases()

    # Now load the plugin which defines aliases.
    self.load_plugins(['aliasdemo'])

    # Aliases now exist.
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(DummyTarget, registered_aliases.targets['pluginalias'])
    self.assertEqual(DummyObject1, registered_aliases.objects['FROMPLUGIN1'])
    self.assertEqual(DummyObject2, registered_aliases.objects['FROMPLUGIN2'])
    self.assertEqual(self.build_configuration.subsystem_types(),
                     {DummySubsystem1, DummySubsystem2})

  def test_subsystems(self):
    def global_subsystems():
      return {DummySubsystem1, DummySubsystem2}
    with self.create_register(global_subsystems=global_subsystems) as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assertEqual(self.build_configuration.subsystem_types(),
                       {DummySubsystem1, DummySubsystem2})
Example #2
0
class BaseTest(unittest.TestCase):
  """A baseclass useful for tests requiring a temporary buildroot."""

  def build_path(self, relpath):
    """Returns the canonical BUILD file path for the given relative build path."""
    if os.path.basename(relpath).startswith('BUILD'):
      return relpath
    else:
      return os.path.join(relpath, 'BUILD')

  def create_dir(self, relpath):
    """Creates a directory under the buildroot.

    relpath: The relative path to the directory from the build root.
    """
    path = os.path.join(self.build_root, relpath)
    safe_mkdir(path)
    return path

  def create_file(self, relpath, contents='', mode='wb'):
    """Writes to a file under the buildroot.

    relpath:  The relative path to the file from the build root.
    contents: A string containing the contents of the file - '' by default..
    mode:     The mode to write to the file in - over-write by default.
    """
    path = os.path.join(self.build_root, relpath)
    with safe_open(path, mode=mode) as fp:
      fp.write(contents)
    return path

  def add_to_build_file(self, relpath, target):
    """Adds the given target specification to the BUILD file at relpath.

    relpath: The relative path to the BUILD file from the build root.
    target:  A string containing the target definition as it would appear in a BUILD file.
    """
    self.create_file(self.build_path(relpath), target, mode='a')
    cls = self.address_mapper._build_file_type
    return cls(root_dir=self.build_root, relpath=self.build_path(relpath))

  def make_target(self,
                  spec='',
                  target_type=Target,
                  dependencies=None,
                  resources = None,
                  derived_from=None,
                  **kwargs):
    address = SyntheticAddress.parse(spec)
    target = target_type(name=address.target_name,
                         address=address,
                         build_graph=self.build_graph,
                         **kwargs)
    dependencies = dependencies or []
    dependencies.extend(resources or [])

    self.build_graph.inject_target(target,
                                   dependencies=[dep.address for dep in dependencies],
                                   derived_from=derived_from)
    return target

  @property
  def alias_groups(self):
    return BuildFileAliases.create(targets={'target': Dependencies})

  def setUp(self):
    super(BaseTest, self).setUp()
    Goal.clear()
    Subsystem.reset()

    self.real_build_root = BuildRoot().path

    self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT'))
    self.addCleanup(safe_rmtree, self.build_root)

    self.pants_workdir = os.path.join(self.build_root, '.pants.d')
    safe_mkdir(self.pants_workdir)

    self.options = defaultdict(dict)  # scope -> key-value mapping.
    self.options[''] = {
      'pants_workdir': self.pants_workdir,
      'pants_supportdir': os.path.join(self.build_root, 'build-support'),
      'pants_distdir': os.path.join(self.build_root, 'dist'),
      'pants_configdir': os.path.join(self.build_root, 'config'),
      'cache_key_gen_version': '0-test',
    }

    BuildRoot().path = self.build_root
    self.addCleanup(BuildRoot().reset)

    # We need a pants.ini, even if empty. get_buildroot() uses its presence.
    self.create_file('pants.ini')
    self._build_configuration = BuildConfiguration()
    self._build_configuration.register_aliases(self.alias_groups)
    self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root)
    self.address_mapper = BuildFileAddressMapper(self.build_file_parser, FilesystemBuildFile)
    self.build_graph = BuildGraph(address_mapper=self.address_mapper)

  def reset_build_graph(self):
    """Start over with a fresh build graph with no targets in it."""
    self.address_mapper = BuildFileAddressMapper(self.build_file_parser, FilesystemBuildFile)
    self.build_graph = BuildGraph(address_mapper=self.address_mapper)

  def set_options_for_scope(self, scope, **kwargs):
    self.options[scope].update(kwargs)

  def context(self, for_task_types=None, options=None, target_roots=None,
              console_outstream=None, workspace=None):
    for_task_types = for_task_types or []
    options = options or {}

    option_values = defaultdict(dict)
    registered_global_subsystems = set()
    bootstrap_option_values = None  # We fill these in after registering bootstrap options.

    # We provide our own test-only registration implementation, bypassing argparse.
    # When testing we set option values directly, so we don't care about cmd-line flags, config,
    # env vars etc. In fact, for test isolation we explicitly don't want to look at those.
    # All this does is make the names available in code, with the default values.
    # Individual tests can then override the option values they care about.
    def register_func(on_scope):
      def register(*rargs, **rkwargs):
        scoped_options = option_values[on_scope]
        default = rkwargs.get('default')
        if default is None and rkwargs.get('action') == 'append':
          default = []
        for flag_name in rargs:
          option_name = flag_name.lstrip('-').replace('-', '_')
          scoped_options[option_name] = default
      register.bootstrap = bootstrap_option_values
      register.scope = on_scope
      return register

    # TODO: This sequence is a bit repetitive of the real registration sequence.

    # Register bootstrap options and grab their default values for use in subsequent registration.
    register_bootstrap_options(register_func(Options.GLOBAL_SCOPE), self.build_root)
    bootstrap_option_values = create_option_values(copy.copy(option_values[Options.GLOBAL_SCOPE]))

    # Now register the remaining global scope options.
    register_global_options(register_func(Options.GLOBAL_SCOPE))

    # Now register task and subsystem options for relevant tasks.
    for task_type in for_task_types:
      scope = task_type.options_scope
      if scope is None:
        raise TaskError('You must set a scope on your task type before using it in tests.')
      task_type.register_options(register_func(scope))
      for subsystem in (set(task_type.global_subsystems()) |
                        self._build_configuration.subsystem_types()):
        if subsystem not in registered_global_subsystems:
          subsystem.register_options(register_func(subsystem.qualify_scope(Options.GLOBAL_SCOPE)))
          registered_global_subsystems.add(subsystem)
      for subsystem in task_type.task_subsystems():
        subsystem.register_options(register_func(subsystem.qualify_scope(scope)))

    # Now default option values override with any caller-specified values.
    # TODO(benjy): Get rid of the options arg, and require tests to call set_options.

    for scope, opts in options.items():
      for key, val in opts.items():
        option_values[scope][key] = val

    for scope, opts in self.options.items():
      for key, val in opts.items():
        option_values[scope][key] = val

    # Make inner scopes inherit option values from their enclosing scopes.
    # Iterating in sorted order guarantees that we see outer scopes before inner scopes,
    # and therefore only have to inherit from our immediately enclosing scope.
    for scope in sorted(option_values.keys()):
      if scope != Options.GLOBAL_SCOPE:
        enclosing_scope = scope.rpartition('.')[0]
        opts = option_values[scope]
        for key, val in option_values.get(enclosing_scope, {}).items():
          if key not in opts:  # Inner scope values override the inherited ones.
            opts[key] = val

    context = create_context(options=option_values,
                             target_roots=target_roots,
                             build_graph=self.build_graph,
                             build_file_parser=self.build_file_parser,
                             address_mapper=self.address_mapper,
                             console_outstream=console_outstream,
                             workspace=workspace)
    Subsystem._options = context.options
    return context

  def tearDown(self):
    SourceRoot.reset()
    FilesystemBuildFile.clear_cache()

  def target(self, spec):
    """Resolves the given target address to a Target object.

    address: The BUILD target address to resolve.

    Returns the corresponding Target or else None if the address does not point to a defined Target.
    """
    address = SyntheticAddress.parse(spec)
    self.build_graph.inject_address_closure(address)
    return self.build_graph.get_target(address)

  def targets(self, spec):
    """Resolves a target spec to one or more Target objects.

    spec: Either BUILD target address or else a target glob using the siblings ':' or
          descendants '::' suffixes.

    Returns the set of all Targets found.
    """

    spec_parser = CmdLineSpecParser(self.build_root, self.address_mapper)
    addresses = list(spec_parser.parse_addresses(spec))
    for address in addresses:
      self.build_graph.inject_address_closure(address)
    targets = [self.build_graph.get_target(address) for address in addresses]
    return targets

  def create_files(self, path, files):
    """Writes to a file under the buildroot with contents same as file name.

     path:  The relative path to the file from the build root.
     files: List of file names.
    """
    for f in files:
      self.create_file(os.path.join(path, f), contents=f)

  def create_library(self, path, target_type, name, sources=None, **kwargs):
    """Creates a library target of given type at the BUILD file at path with sources

     path: The relative path to the BUILD file from the build root.
     target_type: valid pants target type.
     name: Name of the library target.
     sources: List of source file at the path relative to path.
     **kwargs: Optional attributes that can be set for any library target.
       Currently it includes support for resources, java_sources, provides
       and dependencies.
    """
    if sources:
      self.create_files(path, sources)
    self.add_to_build_file(path, dedent('''
          %(target_type)s(name='%(name)s',
            %(sources)s
            %(resources)s
            %(java_sources)s
            %(provides)s
            %(dependencies)s
          )
        ''' % dict(target_type=target_type,
                   name=name,
                   sources=('sources=%s,' % repr(sources)
                              if sources else ''),
                   resources=('resources=["%s"],' % kwargs.get('resources')
                              if 'resources' in kwargs else ''),
                   java_sources=('java_sources=[%s],'
                                 % ','.join(map(lambda str_target: '"%s"' % str_target,
                                                kwargs.get('java_sources')))
                                 if 'java_sources' in kwargs else ''),
                   provides=('provides=%s,' % kwargs.get('provides')
                              if 'provides' in kwargs else ''),
                   dependencies=('dependencies=%s,' % kwargs.get('dependencies')
                              if 'dependencies' in kwargs else ''),
                   )))
    return self.target('%s:%s' % (path, name))

  def create_resources(self, path, name, *sources):
    return self.create_library(path, 'resources', name, sources)

  @contextmanager
  def workspace(self, *buildfiles):
    with temporary_dir() as root_dir:
      with BuildRoot().temporary(root_dir):
        with pushd(root_dir):
          for buildfile in buildfiles:
            touch(os.path.join(root_dir, buildfile))
          yield os.path.realpath(root_dir)

  def populate_compile_classpath(self, context, classpath=None):
    """
    Helps actual test cases to populate the 'compile_classpath' products data mapping
    in the context, which holds the classpath value for targets.

    :param context: The execution context where the products data mapping lives.
    :param classpath: a list of classpath strings. If not specified, [os.path.join(self.buildroot, 'none')] will be used.
    """
    classpath = classpath or [os.path.join(self.build_root, 'none')]
    compile_classpaths = context.products.get_data('compile_classpath', lambda: UnionProducts())
    compile_classpaths.add_for_targets(context.targets(), [('default', entry) for entry in classpath])

  @contextmanager
  def add_data(self, context_products, data_type, target, *products):
    make_products = lambda: defaultdict(MultipleRootedProducts)
    data_by_target = context_products.get_data(data_type, make_products)
    with temporary_dir() as outdir:
      def create_product(product):
        abspath = os.path.join(outdir, product)
        with safe_open(abspath, mode='w') as fp:
          fp.write(product)
        return abspath
      data_by_target[target].add_abs_paths(outdir, map(create_product, products))
      yield temporary_dir

  @contextmanager
  def add_products(self, context_products, product_type, target, *products):
    product_mapping = context_products.get(product_type)
    with temporary_dir() as outdir:
      def create_product(product):
        with safe_open(os.path.join(outdir, product), mode='w') as fp:
          fp.write(product)
        return product
      product_mapping.add(target, outdir, map(create_product, products))
      yield temporary_dir