def test_remove_from_ws__removes_all_entries(): ws = WorkingSet([]) dist1 = create_dist("a", "1.0", location="a10") dist2 = create_dist("a", "2.0", location="a20") assert dist1 not in ws assert dist2 not in ws ws.add(dist1) assert dist1 in ws assert dist1.location in ws.entries assert dist2 not in ws assert dist2.location not in ws.entries ws.add_entry(dist2.location) assert dist1 in ws assert dist1.location in ws.entries assert dist2 not in ws assert dist2.location in ws.entries dependency.remove_from_ws(ws, dist2) assert dist1 not in ws assert dist2 not in ws assert len([d for d in ws]) == 0
def tests_instantiate_integration_registries(self): """ Ensure that the integration registries, specifically the root registry, be instantiated (or re-instantiated) in a way that satisfies expectations of integration test creators. """ make_dummy_dist(self, ( ('entry_points.txt', '\n'.join([ '[calmjs.registry]', 'dummy.module = calmjs.module:ModuleRegistry', 'other.module = calmjs.module:ModuleRegistry', ])), ), 'somepkg', '1.0') working_set = WorkingSet([self._calmjs_testing_tmpdir]) registry = utils.instantiate_integration_registries( working_set, None, 'dummy.module', ) dummy_module = registry.get('dummy.module') other_module = registry.get('other.module') self.assertEqual('dummy.module', dummy_module.registry_name) self.assertIsNone(registry.get('dummy.module.tests')) make_dummy_dist(self, ( ('entry_points.txt', '\n'.join([ '[calmjs.registry]', 'dummy.module.tests = calmjs.module:ModuleRegistry', ])), ), 'somepkg.testing', '1.0') # re-add the tmpdir to reinitialize the working set with the # newly added entry points working_set.add_entry(self._calmjs_testing_tmpdir) reinstantiated_registry = utils.instantiate_integration_registries( working_set, registry, 'dummy.module', 'dummy.module.tests', ) # ensure that it is the same instance, as this could be used to # reinstantiate the registry with the additional entries. self.assertIs(registry, reinstantiated_registry) # the inner registries should be renewed. self.assertIsNot(dummy_module, registry.get('dummy.module')) # the not reinstantiated version is not renewed self.assertIs(other_module, registry.get('other.module')) # the newly added entry points should resolve now. self.assertIsNotNone(registry.get('dummy.module.tests'))
class LoaderTest(unittest.TestCase): def setUp(self): self.build_configuration = BuildConfiguration() self.working_set = WorkingSet() for entry in working_set.entries: self.working_set.add_entry(entry) def tearDown(self): Goal.clear() @contextmanager def create_register(self, build_file_aliases=None, register_goals=None, global_subsystems=None, module_name='register'): package_name = b'__test_package_{0}'.format(uuid.uuid4().hex) self.assertFalse(package_name in sys.modules) package_module = types.ModuleType(package_name) sys.modules[package_name] = package_module try: register_module_fqn = b'{0}.{1}'.format(package_name, module_name) register_module = types.ModuleType(register_module_fqn) setattr(package_module, module_name, register_module) sys.modules[register_module_fqn] = register_module def register_entrypoint(function_name, function): if function: setattr(register_module, function_name, function) register_entrypoint('build_file_aliases', build_file_aliases) register_entrypoint('global_subsystems', global_subsystems) register_entrypoint('register_goals', register_goals) yield package_name finally: del sys.modules[package_name] def assert_empty_aliases(self): registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(0, len(registered_aliases.targets)) self.assertEqual(0, len(registered_aliases.objects)) self.assertEqual(0, len(registered_aliases.context_aware_object_factories)) self.assertEqual(self.build_configuration.subsystem_types(), set()) def test_load_valid_empty(self): with self.create_register() as backend_package: load_backend(self.build_configuration, backend_package) self.assert_empty_aliases() def test_load_valid_partial_aliases(self): aliases = BuildFileAliases.create(targets={'bob': DummyTarget}, objects={'obj1': DummyObject1, 'obj2': DummyObject2}) with self.create_register(build_file_aliases=lambda: aliases) as backend_package: load_backend(self.build_configuration, backend_package) registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(DummyTarget, registered_aliases.targets['bob']) self.assertEqual(DummyObject1, registered_aliases.objects['obj1']) self.assertEqual(DummyObject2, registered_aliases.objects['obj2']) self.assertEqual(self.build_configuration.subsystem_types(), set([DummySubsystem1, DummySubsystem2])) def test_load_valid_partial_goals(self): def register_goals(): Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask)) with self.create_register(register_goals=register_goals) as backend_package: Goal.clear() self.assertEqual(0, len(Goal.all())) load_backend(self.build_configuration, backend_package) self.assert_empty_aliases() self.assertEqual(1, len(Goal.all())) task_names = Goal.by_name('jack').ordered_task_names() self.assertEqual(1, len(task_names)) task_name = task_names[0] self.assertEqual('jill', task_name) def test_load_invalid_entrypoint(self): def build_file_aliases(bad_arg): return BuildFileAliases.create() with self.create_register(build_file_aliases=build_file_aliases) as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.build_configuration, backend_package) def test_load_invalid_module(self): with self.create_register(module_name='register2') as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.build_configuration, backend_package) def test_load_missing_plugin(self): with self.assertRaises(PluginNotFound): self.load_plugins(['Foobar']) def get_mock_plugin(self, name, version, reg=None, alias=None, after=None): """Make a fake Distribution (optionally with entry points) Note the entry points do not actually point to code in the returned distribution -- the distribution does not even have a location and does not contain any code, just metadata. A module is synthesized on the fly and installed into sys.modules under a random name. If optional entry point callables are provided, those are added as methods to the module and their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked metadata added to the returned dist. :param str name: project_name for distribution (see pkg_resources) :param str version: version for distribution (see pkg_resources) :param callable reg: Optional callable for goal registration entry point :param callable alias: Optional callable for build_file_aliases entry point :param callable after: Optional callable for load_after list entry point """ plugin_pkg = b'demoplugin{0}'.format(uuid.uuid4().hex) pkg = types.ModuleType(plugin_pkg) sys.modules[plugin_pkg] = pkg module_name = b'{0}.{1}'.format(plugin_pkg, 'demo') plugin = types.ModuleType(module_name) setattr(pkg, 'demo', plugin) sys.modules[module_name] = plugin metadata = {} entry_lines = [] if reg is not None: setattr(plugin, 'foo', reg) entry_lines.append('register_goals = {}:foo\n'.format(module_name)) if alias is not None: setattr(plugin, 'bar', alias) entry_lines.append('build_file_aliases = {}:bar\n'.format(module_name)) if after is not None: setattr(plugin, 'baz', after) entry_lines.append('load_after = {}:baz\n'.format(module_name)) if entry_lines: entry_data = '[pantsbuild.plugin]\n{}\n'.format('\n'.join(entry_lines)) metadata = {'entry_points.txt': entry_data} return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata)) def load_plugins(self, plugins): load_plugins(self.build_configuration, plugins, load_from=self.working_set) def test_plugin_load_and_order(self): d1 = self.get_mock_plugin('demo1', '0.0.1', after=lambda: ['demo2']) d2 = self.get_mock_plugin('demo2', '0.0.3') self.working_set.add(d1) # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2']. with self.assertRaises(PluginLoadOrderError): self.load_plugins(['demo1', 'demo2']) # Attempting to load 'demo2' first should fail as it is not (yet) installed. with self.assertRaises(PluginNotFound): self.load_plugins(['demo2', 'demo1']) # Installing demo2 and then loading in correct order should work though. self.working_set.add(d2) self.load_plugins(['demo2>=0.0.2', 'demo1']) # But asking for a bad (not installed) version fails. with self.assertRaises(VersionConflict): self.load_plugins(['demo2>=0.0.5']) def test_plugin_installs_goal(self): def reg_goal(): Goal.by_name('plugindemo').install(TaskRegistrar('foo', DummyTask)) self.working_set.add(self.get_mock_plugin('regdemo', '0.0.1', reg=reg_goal)) # Start without the custom goal. self.assertEqual(0, len(Goal.by_name('plugindemo').ordered_task_names())) # Load plugin which registers custom goal. self.load_plugins(['regdemo']) # Now the custom goal exists. self.assertEqual(1, len(Goal.by_name('plugindemo').ordered_task_names())) self.assertEqual('foo', Goal.by_name('plugindemo').ordered_task_names()[0]) def test_plugin_installs_alias(self): def reg_alias(): return BuildFileAliases.create(targets={'pluginalias': DummyTarget}, objects={'FROMPLUGIN1': DummyObject1, 'FROMPLUGIN2': DummyObject2}) self.working_set.add(self.get_mock_plugin('aliasdemo', '0.0.1', alias=reg_alias)) # Start with no aliases. self.assert_empty_aliases() # Now load the plugin which defines aliases. self.load_plugins(['aliasdemo']) # Aliases now exist. registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(DummyTarget, registered_aliases.targets['pluginalias']) self.assertEqual(DummyObject1, registered_aliases.objects['FROMPLUGIN1']) self.assertEqual(DummyObject2, registered_aliases.objects['FROMPLUGIN2']) self.assertEqual(self.build_configuration.subsystem_types(), {DummySubsystem1, DummySubsystem2}) def test_subsystems(self): def global_subsystems(): return {DummySubsystem1, DummySubsystem2} with self.create_register(global_subsystems=global_subsystems) as backend_package: load_backend(self.build_configuration, backend_package) self.assertEqual(self.build_configuration.subsystem_types(), {DummySubsystem1, DummySubsystem2})
class LoaderTest(unittest.TestCase): def setUp(self): self.build_configuration = BuildConfiguration() self.working_set = WorkingSet() for entry in working_set.entries: self.working_set.add_entry(entry) def tearDown(self): Goal.clear() @contextmanager def create_register(self, build_file_aliases=None, register_goals=None, global_subsystems=None, module_name='register'): package_name = b'__test_package_{0}'.format(uuid.uuid4().hex) self.assertFalse(package_name in sys.modules) package_module = types.ModuleType(package_name) sys.modules[package_name] = package_module try: register_module_fqn = b'{0}.{1}'.format(package_name, module_name) register_module = types.ModuleType(register_module_fqn) setattr(package_module, module_name, register_module) sys.modules[register_module_fqn] = register_module def register_entrypoint(function_name, function): if function: setattr(register_module, function_name, function) register_entrypoint('build_file_aliases', build_file_aliases) register_entrypoint('global_subsystems', global_subsystems) register_entrypoint('register_goals', register_goals) yield package_name finally: del sys.modules[package_name] def assert_empty_aliases(self): registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(0, len(registered_aliases.target_types)) self.assertEqual(0, len(registered_aliases.target_macro_factories)) self.assertEqual(0, len(registered_aliases.objects)) self.assertEqual(0, len(registered_aliases.context_aware_object_factories)) self.assertEqual(self.build_configuration.subsystems(), set()) def test_load_valid_empty(self): with self.create_register() as backend_package: load_backend(self.build_configuration, backend_package) self.assert_empty_aliases() def test_load_valid_partial_aliases(self): aliases = BuildFileAliases(targets={'bob': DummyTarget}, objects={'obj1': DummyObject1, 'obj2': DummyObject2}) with self.create_register(build_file_aliases=lambda: aliases) as backend_package: load_backend(self.build_configuration, backend_package) registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(DummyTarget, registered_aliases.target_types['bob']) self.assertEqual(DummyObject1, registered_aliases.objects['obj1']) self.assertEqual(DummyObject2, registered_aliases.objects['obj2']) self.assertEqual(self.build_configuration.subsystems(), {DummySubsystem1, DummySubsystem2}) def test_load_valid_partial_goals(self): def register_goals(): Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask)) with self.create_register(register_goals=register_goals) as backend_package: Goal.clear() self.assertEqual(0, len(Goal.all())) load_backend(self.build_configuration, backend_package) self.assert_empty_aliases() self.assertEqual(1, len(Goal.all())) task_names = Goal.by_name('jack').ordered_task_names() self.assertEqual(1, len(task_names)) task_name = task_names[0] self.assertEqual('jill', task_name) def test_load_invalid_entrypoint(self): def build_file_aliases(bad_arg): return BuildFileAliases() with self.create_register(build_file_aliases=build_file_aliases) as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.build_configuration, backend_package) def test_load_invalid_module(self): with self.create_register(module_name='register2') as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.build_configuration, backend_package) def test_load_missing_plugin(self): with self.assertRaises(PluginNotFound): self.load_plugins(['Foobar']) def get_mock_plugin(self, name, version, reg=None, alias=None, after=None): """Make a fake Distribution (optionally with entry points) Note the entry points do not actually point to code in the returned distribution -- the distribution does not even have a location and does not contain any code, just metadata. A module is synthesized on the fly and installed into sys.modules under a random name. If optional entry point callables are provided, those are added as methods to the module and their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked metadata added to the returned dist. :param string name: project_name for distribution (see pkg_resources) :param string version: version for distribution (see pkg_resources) :param callable reg: Optional callable for goal registration entry point :param callable alias: Optional callable for build_file_aliases entry point :param callable after: Optional callable for load_after list entry point """ plugin_pkg = b'demoplugin{0}'.format(uuid.uuid4().hex) pkg = types.ModuleType(plugin_pkg) sys.modules[plugin_pkg] = pkg module_name = b'{0}.{1}'.format(plugin_pkg, 'demo') plugin = types.ModuleType(module_name) setattr(pkg, 'demo', plugin) sys.modules[module_name] = plugin metadata = {} entry_lines = [] if reg is not None: setattr(plugin, 'foo', reg) entry_lines.append('register_goals = {}:foo\n'.format(module_name)) if alias is not None: setattr(plugin, 'bar', alias) entry_lines.append('build_file_aliases = {}:bar\n'.format(module_name)) if after is not None: setattr(plugin, 'baz', after) entry_lines.append('load_after = {}:baz\n'.format(module_name)) if entry_lines: entry_data = '[pantsbuild.plugin]\n{}\n'.format('\n'.join(entry_lines)) metadata = {'entry_points.txt': entry_data} return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata)) def load_plugins(self, plugins): load_plugins(self.build_configuration, plugins, self.working_set) def test_plugin_load_and_order(self): d1 = self.get_mock_plugin('demo1', '0.0.1', after=lambda: ['demo2']) d2 = self.get_mock_plugin('demo2', '0.0.3') self.working_set.add(d1) # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2']. with self.assertRaises(PluginLoadOrderError): self.load_plugins(['demo1', 'demo2']) # Attempting to load 'demo2' first should fail as it is not (yet) installed. with self.assertRaises(PluginNotFound): self.load_plugins(['demo2', 'demo1']) # Installing demo2 and then loading in correct order should work though. self.working_set.add(d2) self.load_plugins(['demo2>=0.0.2', 'demo1']) # But asking for a bad (not installed) version fails. with self.assertRaises(VersionConflict): self.load_plugins(['demo2>=0.0.5']) def test_plugin_installs_goal(self): def reg_goal(): Goal.by_name('plugindemo').install(TaskRegistrar('foo', DummyTask)) self.working_set.add(self.get_mock_plugin('regdemo', '0.0.1', reg=reg_goal)) # Start without the custom goal. self.assertEqual(0, len(Goal.by_name('plugindemo').ordered_task_names())) # Load plugin which registers custom goal. self.load_plugins(['regdemo']) # Now the custom goal exists. self.assertEqual(1, len(Goal.by_name('plugindemo').ordered_task_names())) self.assertEqual('foo', Goal.by_name('plugindemo').ordered_task_names()[0]) def test_plugin_installs_alias(self): def reg_alias(): return BuildFileAliases(targets={'pluginalias': DummyTarget}, objects={'FROMPLUGIN1': DummyObject1, 'FROMPLUGIN2': DummyObject2}) self.working_set.add(self.get_mock_plugin('aliasdemo', '0.0.1', alias=reg_alias)) # Start with no aliases. self.assert_empty_aliases() # Now load the plugin which defines aliases. self.load_plugins(['aliasdemo']) # Aliases now exist. registered_aliases = self.build_configuration.registered_aliases() self.assertEqual(DummyTarget, registered_aliases.target_types['pluginalias']) self.assertEqual(DummyObject1, registered_aliases.objects['FROMPLUGIN1']) self.assertEqual(DummyObject2, registered_aliases.objects['FROMPLUGIN2']) self.assertEqual(self.build_configuration.subsystems(), {DummySubsystem1, DummySubsystem2}) def test_subsystems(self): def global_subsystems(): return {DummySubsystem1, DummySubsystem2} with self.create_register(global_subsystems=global_subsystems) as backend_package: load_backend(self.build_configuration, backend_package) self.assertEqual(self.build_configuration.subsystems(), {DummySubsystem1, DummySubsystem2})
class LoaderTest(unittest.TestCase): def setUp(self): self.bc_builder = BuildConfiguration.Builder() self.working_set = WorkingSet() for entry in working_set.entries: self.working_set.add_entry(entry) @contextmanager def create_register( self, build_file_aliases=None, register_goals=None, global_subsystems=None, rules=None, target_types=None, module_name="register", ): package_name = f"__test_package_{uuid.uuid4().hex}" self.assertFalse(package_name in sys.modules) package_module = types.ModuleType(package_name) sys.modules[package_name] = package_module try: register_module_fqn = f"{package_name}.{module_name}" register_module = types.ModuleType(register_module_fqn) setattr(package_module, module_name, register_module) sys.modules[register_module_fqn] = register_module def register_entrypoint(function_name, function): if function: setattr(register_module, function_name, function) register_entrypoint("build_file_aliases", build_file_aliases) register_entrypoint("global_subsystems", global_subsystems) register_entrypoint("register_goals", register_goals) register_entrypoint("rules", rules) register_entrypoint("target_types", target_types) yield package_name finally: del sys.modules[package_name] def assert_empty(self): build_configuration = self.bc_builder.create() registered_aliases = build_configuration.registered_aliases self.assertEqual(0, len(registered_aliases.objects)) self.assertEqual( 0, len(registered_aliases.context_aware_object_factories)) self.assertEqual(build_configuration.optionables, FrozenOrderedSet()) self.assertEqual(0, len(build_configuration.rules)) self.assertEqual(0, len(build_configuration.target_types)) def test_load_valid_empty(self): with self.create_register() as backend_package: load_backend(self.bc_builder, backend_package) self.assert_empty() def test_load_valid_partial_aliases(self): aliases = BuildFileAliases(objects={ "obj1": DummyObject1, "obj2": DummyObject2 }) with self.create_register( build_file_aliases=lambda: aliases) as backend_package: load_backend(self.bc_builder, backend_package) build_configuration = self.bc_builder.create() registered_aliases = build_configuration.registered_aliases self.assertEqual(DummyObject1, registered_aliases.objects["obj1"]) self.assertEqual(DummyObject2, registered_aliases.objects["obj2"]) self.assertEqual(build_configuration.optionables, FrozenOrderedSet([DummySubsystem])) def test_load_invalid_entrypoint(self): def build_file_aliases(bad_arg): return BuildFileAliases() with self.create_register( build_file_aliases=build_file_aliases) as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.bc_builder, backend_package) def test_load_invalid_module(self): with self.create_register(module_name="register2") as backend_package: with self.assertRaises(BuildConfigurationError): load_backend(self.bc_builder, backend_package) def test_load_missing_plugin(self): with self.assertRaises(PluginNotFound): self.load_plugins(["Foobar"]) @staticmethod def get_mock_plugin(name, version, reg=None, alias=None, after=None, rules=None, target_types=None): """Make a fake Distribution (optionally with entry points) Note the entry points do not actually point to code in the returned distribution -- the distribution does not even have a location and does not contain any code, just metadata. A module is synthesized on the fly and installed into sys.modules under a random name. If optional entry point callables are provided, those are added as methods to the module and their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked metadata added to the returned dist. :param string name: project_name for distribution (see pkg_resources) :param string version: version for distribution (see pkg_resources) :param callable reg: Optional callable for goal registration entry point :param callable alias: Optional callable for build_file_aliases entry point :param callable after: Optional callable for load_after list entry point :param callable rules: Optional callable for rules entry point :param callable target_types: Optional callable for target_types entry point """ plugin_pkg = f"demoplugin{uuid.uuid4().hex}" pkg = types.ModuleType(plugin_pkg) sys.modules[plugin_pkg] = pkg module_name = f"{plugin_pkg}.demo" plugin = types.ModuleType(module_name) setattr(pkg, "demo", plugin) sys.modules[module_name] = plugin metadata = {} entry_lines = [] if reg is not None: setattr(plugin, "foo", reg) entry_lines.append(f"register_goals = {module_name}:foo\n") if alias is not None: setattr(plugin, "bar", alias) entry_lines.append(f"build_file_aliases = {module_name}:bar\n") if after is not None: setattr(plugin, "baz", after) entry_lines.append(f"load_after = {module_name}:baz\n") if rules is not None: setattr(plugin, "qux", rules) entry_lines.append(f"rules = {module_name}:qux\n") if target_types is not None: setattr(plugin, "tofu", target_types) entry_lines.append(f"target_types = {module_name}:tofu\n") if entry_lines: entry_data = "[pantsbuild.plugin]\n{}\n".format( "\n".join(entry_lines)) metadata = {"entry_points.txt": entry_data} return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata)) def load_plugins(self, plugins): load_plugins(self.bc_builder, plugins, self.working_set) def test_plugin_load_and_order(self): d1 = self.get_mock_plugin("demo1", "0.0.1", after=lambda: ["demo2"]) d2 = self.get_mock_plugin("demo2", "0.0.3") self.working_set.add(d1) # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2']. with self.assertRaises(PluginLoadOrderError): self.load_plugins(["demo1", "demo2"]) # Attempting to load 'demo2' first should fail as it is not (yet) installed. with self.assertRaises(PluginNotFound): self.load_plugins(["demo2", "demo1"]) # Installing demo2 and then loading in correct order should work though. self.working_set.add(d2) self.load_plugins(["demo2>=0.0.2", "demo1"]) # But asking for a bad (not installed) version fails. with self.assertRaises(VersionConflict): self.load_plugins(["demo2>=0.0.5"]) def test_plugin_installs_alias(self): def reg_alias(): return BuildFileAliases(objects={ "FROMPLUGIN1": DummyObject1, "FROMPLUGIN2": DummyObject2 }, ) self.working_set.add( self.get_mock_plugin("aliasdemo", "0.0.1", alias=reg_alias)) # Start with no aliases. self.assert_empty() # Now load the plugin which defines aliases. self.load_plugins(["aliasdemo"]) # Aliases now exist. build_configuration = self.bc_builder.create() registered_aliases = build_configuration.registered_aliases self.assertEqual(DummyObject1, registered_aliases.objects["FROMPLUGIN1"]) self.assertEqual(DummyObject2, registered_aliases.objects["FROMPLUGIN2"]) self.assertEqual(build_configuration.optionables, FrozenOrderedSet([DummySubsystem])) def test_rules(self): def backend_rules(): return [example_rule] with self.create_register(rules=backend_rules) as backend_package: load_backend(self.bc_builder, backend_package) self.assertEqual(self.bc_builder.create().rules, FrozenOrderedSet([example_rule.rule])) def plugin_rules(): return [example_plugin_rule] self.working_set.add( self.get_mock_plugin("this-plugin-rules", "0.0.1", rules=plugin_rules)) self.load_plugins(["this-plugin-rules"]) self.assertEqual( self.bc_builder.create().rules, FrozenOrderedSet([example_rule.rule, example_plugin_rule.rule]), ) def test_target_types(self): def target_types(): return [DummyTarget, DummyTarget2] with self.create_register( target_types=target_types) as backend_package: load_backend(self.bc_builder, backend_package) assert self.bc_builder.create().target_types == FrozenOrderedSet( [DummyTarget, DummyTarget2]) class PluginTarget(Target): alias = "plugin_tgt" core_fields = () def plugin_targets(): return [PluginTarget] self.working_set.add( self.get_mock_plugin("new-targets", "0.0.1", target_types=plugin_targets)) self.load_plugins(["new-targets"]) assert self.bc_builder.create().target_types == FrozenOrderedSet( [DummyTarget, DummyTarget2, PluginTarget]) def test_backend_plugin_ordering(self): def reg_alias(): return BuildFileAliases(objects={"override-alias": DummyObject2}) self.working_set.add( self.get_mock_plugin("pluginalias", "0.0.1", alias=reg_alias)) plugins = ["pluginalias==0.0.1"] aliases = BuildFileAliases(objects={"override-alias": DummyObject1}) with self.create_register( build_file_aliases=lambda: aliases) as backend_module: backends = [backend_module] build_configuration = load_backends_and_plugins( plugins, self.working_set, backends, bc_builder=self.bc_builder) # The backend should load first, then the plugins, therefore the alias registered in # the plugin will override the alias registered by the backend registered_aliases = build_configuration.registered_aliases self.assertEqual(DummyObject2, registered_aliases.objects["override-alias"])
class SetupEggSubRecipe(BaseDownloadSubRecipe): @property @reify def index_url(self): return self.options.get( 'index', self.recipe.buildout['buildout'].get('index', default_index_url)) @property @reify def find_links_urls(self): return self.options.get( 'find-links', self.recipe.buildout['buildout'].get('find-links', '')).split() @property @reify def source_key_processors(self): return { 'egg': lambda x: [('egg', x.strip())], 'eggs': lambda x: [('egg', y.strip()) for y in x.splitlines()], } @property @reify def source_option_processors(self): ret = super(SetupEggSubRecipe, self).source_option_processors.copy() ret.update({ 'build': string_as_bool, 'build-dependencies': string_as_bool, 'extra-paths': lambda x: [x.strip() for x in x.splitlines()], 'egg-path': lambda x: [x.strip() for x in x.splitlines()], }) return ret @property @reify def allowed_options(self): ret = copy(super(SetupEggSubRecipe, self).allowed_options) ret.extend([ 'egg-name', 'find-egg', 'path', 'signature', 'url', ]) for stage in ['after-build']: ret.extend([ self.resolve_stage('patch', stage=stage), self.resolve_stage('patch-options', stage=stage), self.resolve_stage('patch-binary', stage=stage), ]) return ret def initialize(self): super(SetupEggSubRecipe, self).initialize() if self.recipe.options.get_as_bool('split-working-set', False): self.working_set = WorkingSet([]) else: self.working_set = self.recipe.working_set self.index = get_index(self.index_url, self.find_links_urls) def default_eggs_directory(self, develop=False): if develop and 'develop-eggs-directory' in self.recipe.buildout['buildout']: return self.recipe.buildout['buildout']['develop-eggs-directory'] elif 'eggs-directory' in self.recipe.buildout['buildout']: return self.recipe.buildout['buildout']['eggs-directory'] else: return os.path.join(os.path.dirname(sys.argv[0]), '..', 'eggs') def populate_source(self, source, dependency=False): super(SetupEggSubRecipe, self).populate_source( source, load_options=not dependency) if 'egg' not in source: source['egg'] = self.name source['requirement'] = Requirement.parse(source['egg']) source['egg'] = str(source['requirement']) source['find-requirement'] = Requirement.parse(source['find-egg']) \ if 'find-egg' in source else source['requirement'] source['find-egg'] = str(source['find-requirement']) source.setdefault('build', True) egg_directories = [] if 'develop-eggs-directory' in self.recipe.buildout['buildout']: egg_directories.append(self.recipe.buildout['buildout']['develop-eggs-directory']) if 'eggs-directory' in self.recipe.buildout['buildout']: egg_directories.append(self.recipe.buildout['buildout']['eggs-directory']) source.setdefault('egg-path', [source['location']] if 'location' in source else [] + source.get('extra-paths', []) + egg_directories + buildout_and_setuptools_path) source.setdefault('location', self.default_eggs_directory(develop=source.get('develop', False))) source['egg-environment'] = Environment(source['egg-path']) source['build-options'] = {} if not dependency: for src_key, dst_key in [(key, re.sub('-', '_', key)) for key in [option for option in self.options if option in BUILD_EXT_OPTIONS]]: source['build-options'][dst_key] = self.options[src_key] source.setdefault('signature', self.resolve_signature(source)) def process_source(self, source): if self.working_set.find(source['requirement']) is not None: return if source['build']: self.build_source(source) self.patch_source(source, cwdkey='build-directory', stage='after-build') self.install_source(source) def acquire_source(self, source, destkey='working-directory'): candidates = self.requirement_match_list(source['egg-environment'], source['requirement'], strip_signature=source['signature']) if not candidates or self.recipe.newest: if 'url' not in source: if self.recipe.offline: raise UserError( '''Couldn't download index "{}" in offline mode.'''.format(self.index)) self.index.find_packages(source['find-requirement']) distributions = self.requirement_match_list( self.index, source['find-requirement'], requirement_type=self.requirement_type(source)) if not distributions: raise UserError('''No distributions available for requirement "{}".'''.format( source['find-egg'])) if not candidates or distributions[0].parsed_version > candidates[0].parsed_version: source['url'] = distributions[0].location source['egg-name'] = distributions[0].egg_name() else: source['source-directory'] = candidates[0].location source['build'] = False source['egg-name'] = candidates[0].egg_name() if 'source-directory' not in source: self.logger.info("Getting distribution for '{}'.".format( source['requirement'].project_name)) super(SetupEggSubRecipe, self).acquire_source(source, destkey=destkey) else: source['source-directory'] = candidates[0].location source['build'] = False source['egg-name'] = candidates[0].egg_name() if source.get('build-dependencies', True): sourceenv = Environment([source['source-directory']]) for key in sourceenv: for dist in sourceenv[key]: for dependency_requirement in dist.requires(): dependency_source = {'egg': str( dependency_requirement), 'parent-egg': str(source['egg'])} self.sources.insert(self.sources.index( source), dependency_source) self.populate_source( dependency_source, dependency=True) self.prepare_source(dependency_source) def build_source(self, source): self.logger.info('''Building: {}'''.format(source['egg-name'])) undo = [] setup_py = os.path.join(source['source-directory'], 'setup.py') try: setup_cfg = os.path.join(source['source-directory'], 'setup.cfg') if os.path.exists(setup_cfg): os.rename(setup_cfg, setup_cfg + '-develop-aside') def restore_old_setup(): if os.path.exists(setup_cfg): os.remove(setup_cfg) os.rename(setup_cfg + '-develop-aside', setup_cfg) undo.append(restore_old_setup) else: open(setup_cfg, 'w').close() undo.append(lambda: os.remove(setup_cfg)) updates = {} if source['build-options']: updates['build_ext'] = source['build-options'] if source['signature']: updates['egg_info'] = { 'tag_build': "_{}".format(source['signature']), } setuptools_edit_config(setup_cfg, updates) setup_cmd_fd, setup_cmd = mkstemp(dir=source['source-directory']) setup_cmd_fh = os.fdopen(setup_cmd_fd, 'w') undo.append(lambda: os.remove(setup_cmd)) undo.append(setup_cmd_fh.close) setup_cmd_fh.write((setup_template % dict( setuptools=setuptools_location, setupdir=source['source-directory'], setup=setup_py, __file__=setup_py, )).encode()) setup_cmd_fh.flush() build_directory = mkdtemp('build', dir=source['source-directory']) action_args = [] if source.get('develop', False) is True: action = 'develop' action_args.append('-Z') else: action = 'easy_install' action_args.append(source['source-directory']) args = [source['executable'], setup_cmd, action, '-mxNd', build_directory] if self.log_level < logging.INFO: args += ['-v'] elif self.log_level > logging.INFO: args += ['-q'] args += action_args self.logger.debug('''Running: {}'''.format(' '.join(args))) self.recipe.call(*args, stdout_log_level=logging.DEBUG) source['build-directory'] = build_directory finally: for obj in reversed(undo): obj() def install_source(self, source, destkey='location'): if 'build-directory' not in source: return env = Environment([source['build-directory']]) self.recipe.mkdir(source[destkey]) for dists in [env[x] for x in env]: for src_dist in dists: dst_dist = src_dist.clone( location=os.path.join(source[destkey], "{}.{}".format(src_dist.egg_name(), { EGG_DIST: 'egg', DEVELOP_DIST: 'egg-link', }[src_dist.precedence]))) { EGG_DIST: lambda src, dst: self.recipe.copy(src, dst) if os.path.isdir(src) else self.recipe.extract_archive(src, dst), DEVELOP_DIST: os.rename, }[src_dist.precedence](src_dist.location, dst_dist.location) # redo_pyc(newloc) self.working_set.add_entry(dst_dist.location) self.logger.info('''Got {}.'''.format( str(dst_dist.egg_name()))) @classmethod def requirement_match_list(cls, index, requirement, requirement_type=None, prefer_final=True, strip_signature=''): def mangle_candidate(dist): if strip_signature: dist = dist.clone(version=re.sub( r'_{}$'.format(strip_signature), '', dist.version)) return dist candidates = [candidate for candidate in index[requirement.project_name] if mangle_candidate(candidate) in requirement] if not candidates: return [] if requirement_type is not None: candidates = [candidate for candidate in candidates if candidate.precedence == requirement_type] if prefer_final: final_candidates = [candidate for candidate in candidates if not candidate.parsed_version.is_prerelease] if final_candidates: candidates = final_candidates best = [] bestv = None for candidate in candidates: candidatev = candidate.parsed_version if not bestv or candidatev > bestv: best = [candidate] bestv = candidatev elif candidatev == bestv: best.append(candidate) best.sort() return best @classmethod def requirement_type(cls, source): egg_type = source.get('egg-type', None) try: return { 'source': SOURCE_DIST, 'binary': BINARY_DIST, None: None, }[egg_type] except KeyError: return None @classmethod def resolve_signature(cls, source): struct = [] for key, value in source['build-options'].items(): struct.append((key, value)) for key in ('patches', 'patch-options', 'patch-binary'): if key in source: struct.append(source[key]) struct = tuple(struct) # pylint: disable=redefined-variable-type if not struct: return None base = string.digits + string.letters base_length = len(base) ret = '' struct_hash = abs(hash(struct)) while struct_hash > 0: ret = base[struct_hash % base_length] + ret struct_hash /= base_length return "{}_{}".format(SIGNATURE_MARKER, ret)
class PasterFactoryTests(TestCase): """Tests for the Paster factory and filter functions.""" def setUp(self): super(PasterFactoryTests, self).setUp() self.global_config = {"__file__": "/path/to/paster.ini"} self.repo_dirs = [] self.repo_names = ("server_new.export", "server_old.export") self.entry_points = {"main": make_app, "gzip": make_gzip_filter, "limitinput": make_limit_input_filter} for rname in self.repo_names: self.repo_dirs.append(import_repo_to_dir(rname)) # Test import to see if paste.deploy is available try: from paste.deploy.converters import asbool from pkg_resources import WorkingSet self.working_set = WorkingSet() self.working_set.add_entry(_BASE_PKG_DIR) except ImportError: raise SkipTest("paste.deploy not available") def tearDown(self): super(PasterFactoryTests, self).tearDown() for rdir in self.repo_dirs: shutil.rmtree(rdir) root = getLogger() if root.handlers: root.removeHandler(root.handlers[0]) def test_cwd(self): cwd = os.getcwd() os.chdir(self.repo_dirs[0]) app = make_app(self.global_config) os.chdir(cwd) self.assertIn("/", app.backend.repos) def test_badrepo(self): self.assertRaises(IndexError, make_app, self.global_config, foo="/") def test_repo(self): rname = self.repo_names[0] local_config = {rname: self.repo_dirs[0]} app = make_app(self.global_config, **local_config) self.assertIn("/%s" % rname, app.backend.repos) def _get_repo_parents(self): repo_parents = [] for rdir in self.repo_dirs: repo_parents.append(os.path.split(rdir)[0]) return repo_parents def test_append_git(self): app = make_app(self.global_config, append_git=True, serve_dirs=self._get_repo_parents()) for rname in self.repo_names: self.assertIn("/%s.git" % rname, app.backend.repos) def test_serve_dirs(self): app = make_app(self.global_config, serve_dirs=self._get_repo_parents()) for rname in self.repo_names: self.assertIn("/%s" % rname, app.backend.repos) def _test_wrap(self, factory, wrapper): app = make_app(self.global_config, serve_dirs=self._get_repo_parents()) wrapped_app = factory(self.global_config)(app) self.assertTrue(isinstance(wrapped_app, wrapper)) def test_make_gzip_filter(self): self._test_wrap(make_gzip_filter, GunzipFilter) def test_make_limit_input_filter(self): self._test_wrap(make_limit_input_filter, LimitedInputFilter) def test_entry_points(self): test_points = {} for group in ("paste.app_factory", "paste.filter_factory"): for ep in self.working_set.iter_entry_points(group): test_points[ep.name] = ep.load() for ep_name, ep in self.entry_points.items(): self.assertTrue(test_points[ep_name] is ep)