def __call__(self, package_names): """ Generic artifact builder function. Arguments: package_names List of package names to be built Returns True if the build is successful without errors, False if errors were found or if no artifacts were built. """ result = True registry = get(self.registry_name) for package_name in package_names: metadata = {} for entry_point, export_target in registry.iter_export_targets_for( package_name): builder = next(registry.generate_builder( entry_point, export_target), None) if not builder: # immediate failure if builder does not exist. result = False continue entries = registry.execute_builder(*builder) # whether the builder produced an artifact entry. result = bool(entries) and result metadata.update(entries) # whether the package as a whole produced artifacts entries. result = bool(metadata) and result registry.update_artifact_metadata(package_name, metadata) return result
def test_karma_test_files_located(self): karma_config = karma.build_base_config() karma_config['files'] = ['example/package/lib.js'] spec = Spec( karma_config=karma_config, build_dir=mkdtemp(self), rjs_loader_plugin_registry=get(RJS_LOADER_PLUGIN_REGISTRY_NAME), export_module_names=['preexported'], test_module_paths_map={ 'example/package/tests/test_some_module': '/src/example/package/tests/test_some_module.js', 'example/package/tests/some_test_data': '/src/example/package/tests/some_test_data.js', }, ) with pretty_logging(stream=StringIO()): karma_requirejs(spec) with open(spec['karma_requirejs_test_script'], encoding='utf-8') as fd: script = es5(fd.read()) # this is the node for the json in the build file deps = json.loads(str(script.children()[0].children()[0].initializer)) tests = json.loads(str(script.children()[1].children()[0].initializer)) self.assertEqual(['example/package/tests/test_some_module'], tests) self.assertEqual( ['preexported', 'example/package/tests/some_test_data'], deps)
def karma_verify_package_artifacts(package_names=[], **kwargs): """ The kwargs are there so that runtime (or other external users) can pass in arguments to control certain execution aspects of the tests. """ result = True # Should the value of the registry be arguments? Not doing that for # now to limit the scope of the implementation. main_registry = get('calmjs.artifacts') test_registry = get('calmjs.artifacts.tests') # Note that this set of loops more or less duplicates the helper # calmjs.artifact.ArtifactBuilder, but there are differences given # that it also assume the production of metadata, while this simply # does not do anything of that sort. for package in package_names: for entry_point, export_target in \ test_registry.iter_export_targets_for(package): builder = next(test_registry.generate_builder( entry_point, export_target), None) if not builder: # immediate failure if builder does not exist. result = False continue result = result and _execute_builder( test_registry, builder, kwargs) # Check also for the artifact registry for any definitions that # do not have a corresponding test defined. tests_missing = False if not any(test_registry.iter_export_targets_for(package)): if not any(main_registry.iter_builders_for(package)): logger.info( "no artifacts or tests defined for package '%s'", package) else: tests_missing = True logger.error( "no test found for artifacts declared for package '%s'", package ) result = result and not tests_missing return result
def has_calmjs_artifact_declarations(cmd, registry_name='calmjs.artifacts'): """ For a distutils command to verify that the artifact build step is possible. """ return any(get(registry_name).iter_records_for( cmd.distribution.get_name()))
def test_auto_self_reference(self): # ensure that the identity is returned working_set = mocks.WorkingSet({ 'calmjs.registry': [ # correct self-referential definition 'calmjs.registry = calmjs.registry:Registry', 'calmjsregistry = calmjs.registry:Registry', ], 'calmjsregistry': [ # unrelated self-referential definition 'calmjs.registry = calmjs.registry:Registry', # incorrect self-referential type 'calmjsregistry = calmjs.module:ModuleRegistry', ], }) # stub out real working sets because usage of standard APIs stub_item_attr_value(self, calmjs.registry, 'working_set', working_set) stub_item_attr_value(self, calmjs.base, 'working_set', working_set) with pretty_logging(stream=mocks.StringIO()) as stream: registry = calmjs.registry.Registry('calmjs.registry') self.assertFalse(registry.records) mismatched = registry.get('calmjsregistry') # not the same name self.assertTrue(isinstance(mismatched, calmjs.registry.Registry)) self.assertIsNot(mismatched, registry) # correct identity self.assertIs(registry, registry.get('calmjs.registry')) self.assertIn('calmjs.registry', registry.records) # unrelated registry also unrelated = mismatched.get('calmjs.registry') self.assertTrue(isinstance(unrelated, calmjs.registry.Registry)) self.assertIsNot(unrelated, registry) mistyped = mismatched.get('calmjsregistry') # not a None self.assertTrue(mistyped) # also not identity, as they are not the same type. self.assertIsNot(mistyped, mismatched) self.assertIn( "registry 'calmjs.registry' has entry point 'calmjs.registry = " "calmjs.registry:Registry' which is the identity registration", stream.getvalue(), )
def _flatten_extras_json(pkg_names, find_dists, working_set): # registry key must be explicit here as it was designed for this. dep_keys = set(get(json_key_registry).iter_records()) dists = find_dists(pkg_names, working_set=working_set) return flatten_dist_egginfo_json( dists, filename=json_filename, dep_keys=dep_keys, working_set=working_set )
def test_initialize_integration(self): # Use the global set and see that the defaults are registered registry = get('calmjs.rjs.loader_plugin') text = registry.get('text') self.assertTrue(isinstance(text, TextPlugin)) # should return the identity as they should all be the same. self.assertIs(text.registry, registry) self.assertIs(text.registry.get('text'), text)
def test_integrated(self): """ Since this module already immediately declares that, this can simply be a quick integration test. """ reg = get('calmjs.extras_keys') results = set(reg.iter_records()) self.assertIn('node_modules', results)
def test_module_registry_through_registry(self): """ Show that the module registry instantiated through the global registry's get function will result in the resulting module registry being populated properly with the module entries when using a constrained set of entry points. """ working_set = mocks.WorkingSet({ 'calmjs.module': [ 'module1 = calmjs.testing.module1', 'module2 = calmjs.testing.module2', 'module3 = calmjs.testing.module3', ], 'calmjs.reserved': [ 'calmjs.module = calmjs.testing', ], __name__: [ 'calmjs.module = calmjs.module:ModuleRegistry', ]}, dist=Distribution(project_name='calmjs.testing', version='0.0') ) utils.stub_mod_working_set(self, [calmjs.base], working_set) # Not going to use the global registry, and using our custom # reservation entry local_root_registry = Registry( __name__, 'calmjs.testing', _working_set=working_set) with pretty_logging(stream=mocks.StringIO()): # silences "distribution 'calmjs.testing 0.0' not found" # warnings from stdout produced by the indexer, as the # provided working_set is invalid with entry points that do # not have a valid distribution. global_registry = get('calmjs.module') registry = local_root_registry.get_record('calmjs.module') self.assertIsNot(registry, global_registry) self.assertEqual( sorted(k for k, v in registry.iter_records()), [ 'calmjs.testing.module1', 'calmjs.testing.module2', 'calmjs.testing.module3', ] ) results = registry.get_records_for_package('calmjs.testing') self.assertEqual(sorted(results.keys()), [ 'calmjs/testing/module1/hello', 'calmjs/testing/module2/helper', 'calmjs/testing/module2/index', 'calmjs/testing/module3/math', ]) module1 = registry.get_record('calmjs.testing.module1') self.assertIn('calmjs/testing/module1/hello', module1)
def prepare_spec_advice_packages(self, spec, **kwargs): reg = get(CALMJS_TOOLCHAIN_ADVICE) advice_packages = kwargs.get(ADVICE_PACKAGES) or [] if isinstance(advice_packages, (list, tuple)): logger.debug( 'prepare spec with optional advices from packages %r', advice_packages ) for pkg_name in advice_packages: reg.process_toolchain_spec_package( self.toolchain, spec, pkg_name)
def test_integration_setup_and_teardown(self): from calmjs.registry import get # See that the standard registry has what we expected std_extra_keys = list(get('calmjs.extras_keys').iter_records()) self.assertNotEqual(std_extra_keys, ['fake_modules']) self.assertIn('node_modules', std_extra_keys) self.assertIn('node_modules', std_extra_keys) TestCase = type('TestCase', (unittest.TestCase,), {}) utils.setup_class_integration_environment(TestCase) self.assertIn(TestCase.dist_dir, self.mock_tempfile.dirs) registry = get('calmjs.registry') self.assertEqual(TestCase.registry_name, 'calmjs.module.simulated') self.assertTrue(registry.get('calmjs.module.simulated')) # works using the global function self.assertTrue(get('calmjs.module.simulated')) # See that the registry fake_modules actually got registered extra_keys = list(get('calmjs.extras_keys').iter_records()) self.assertIn('fake_modules', extra_keys) utils.teardown_class_integration_environment(TestCase) # the mock registry is unchanged self.assertTrue(registry.get('calmjs.module.simulated')) # global changes should no longer be in effect. self.assertIsNone(get('calmjs.module.simulated')) self.assertFalse(exists(TestCase.dist_dir)) # See that the registry fake_modules actually got registered std_extra_keys = list(get('calmjs.extras_keys').iter_records()) self.assertNotEqual(std_extra_keys, ['fake_modules']) self.assertIn('node_modules', std_extra_keys)
def get_extras_json(pkg_names, working_set=None): """ Only extract the extras_json information for the given packages 'pkg_names'. """ working_set = working_set or default_working_set dep_keys = set(get(json_key_registry).iter_records()) dists = pkg_names_to_dists(pkg_names, working_set=working_set) return flatten_dist_egginfo_json( dists, filename=json_filename, dep_keys=dep_keys, working_set=working_set )
def flatten_extras_calmjs(pkg_names, working_set=None): """ Traverses through the dependency graph of packages 'pkg_names' and flattens all the egg_info calmjs registry information. """ working_set = working_set or default_working_set # registry key must be explicit here as it was designed for this. dep_keys = set(get('calmjs.extras_keys').iter_records()) dists = find_packages_requirements_dists(pkg_names, working_set=working_set) return flatten_dist_egginfo_json(dists, filename=EXTRAS_CALMJS_JSON, dep_keys=dep_keys, working_set=working_set)
def get_module_registry_dependencies(pkg_names, registry_key='calmjs.module', working_set=None): """ For the given packages 'pkg_names' and the registry identified by 'registry_key', resolve the exported location for just the package. """ working_set = working_set or default_working_set registry = get(registry_key) if not isinstance(registry, BaseModuleRegistry): return {} result = {} for pkg_name in pkg_names: result.update(registry.get_records_for_package(pkg_name)) return result
def get_module_registry_dependencies( pkg_names, registry_name=registry_name, working_set=None): """ Get dependencies for the given package names from module registry identified by registry name. For the given packages 'pkg_names' and the registry identified by 'registry_name', resolve the exported location for just the package. """ working_set = working_set or default_working_set registry = get(registry_name) if not isinstance(registry, BaseModuleRegistry): return {} result = {} for pkg_name in pkg_names: result.update(registry.get_records_for_package(pkg_name)) return result
def test_module_registry_through_registry(self): """ Show that the module registry instantiated through the global registry's get function will result in the resulting module registry being populated properly with the module entries when using a constrained set of entry points. """ working_set = mocks.WorkingSet( { 'calmjs.module': [ 'module1 = calmjs.testing.module1', 'module2 = calmjs.testing.module2', 'module3 = calmjs.testing.module3', ], __name__: [ 'calmjs.module = calmjs.module:ModuleRegistry', ] }, dist=Distribution(project_name='calmjs.testing')) utils.stub_mod_working_set(self, [calmjs.base], working_set) # Not going to use the global registry local_root_registry = Registry(__name__) global_registry = get('calmjs.module') registry = local_root_registry.get_record('calmjs.module') self.assertIsNot(registry, global_registry) self.assertEqual(sorted(k for k, v in registry.iter_records()), [ 'calmjs.testing.module1', 'calmjs.testing.module2', 'calmjs.testing.module3', ]) results = registry.get_records_for_package('calmjs.testing') self.assertEqual(sorted(results.keys()), [ 'calmjs/testing/module1/hello', 'calmjs/testing/module2/helper', 'calmjs/testing/module2/index', 'calmjs/testing/module3/math', ]) module1 = registry.get_record('calmjs.testing.module1') self.assertIn('calmjs/testing/module1/hello', module1)
def flatten_module_registry_dependencies(pkg_names, registry_key='calmjs.module', working_set=None): """ For the given packages 'pkg_names' and the registry identified by 'registry_key', resolve and flatten all the exported locations. """ working_set = working_set or default_working_set result = {} registry = get(registry_key) if not isinstance(registry, BaseModuleRegistry): return result dists = find_packages_requirements_dists(pkg_names, working_set=working_set) for dist in dists: result.update(registry.get_records_for_package(dist.project_name)) return result
def _flatten_module_registry_dependencies( pkg_names, registry_name, find_dists, working_set): """ Flatten dependencies for the given package names from module registry identified by registry name using the find_dists function on the given working_set. For the given packages 'pkg_names' and the registry identified by 'registry_name', resolve and flatten all the exported locations. """ result = {} registry = get(registry_name) if not isinstance(registry, BaseModuleRegistry): return result dists = find_dists(pkg_names, working_set=working_set) for dist in dists: result.update(registry.get_records_for_package(dist.project_name)) return result
def test_karma_setup_files(self): karma_config = karma.build_base_config() karma_config['files'] = ['example/package/lib.js'] spec = Spec( karma_config=karma_config, build_dir=mkdtemp(self), rjs_loader_plugin_registry=get(RJS_LOADER_PLUGIN_REGISTRY_NAME), ) with pretty_logging(stream=StringIO()) as s: karma_requirejs(spec) self.assertNotIn("no rjs loader plugin registry ", s.getvalue()) self.assertEqual(spec['karma_config']['files'], [ spec['karma_requirejs_test_config'], spec['karma_requirejs_test_script'], { 'pattern': 'example/package/lib.js', 'included': False, }, ])
def __init__(self, registry_name, *a, **kw): # TODO whenever there is time to move the BaseRegistry to a # bootstrap module of some sort (that will break tests that # override calmjs.base.working_set if done naively), and have # the calmjs.registry.Registry inherit from that (and this # module also to maintain the BaseRegistry import location, # this import should be moved to the top from calmjs.registry import get # resolve parent before the parent class, as the construction # should be able to reference this parent. parent_name = self.resolve_parent_registry_name(registry_name) _parent = kw.pop('_parent', NotImplemented) if _parent is NotImplemented: self.parent = get(parent_name) else: self.parent = _parent if not self.parent: raise ValueError( "could not construct child module registry '%s' as its " "parent registry '%s' could not be found" % (registry_name, parent_name)) super(BaseChildModuleRegistry, self).__init__(registry_name, *a, **kw)
def _generate_bundle_maps(package_names, working_dir, method_map, method_key): map_method = acquire_method(method_map, method_key) # the extras keys will be treated as valid Node.js package manager # subdirectories. valid_pkgmgr_dirs = set(get('calmjs.extras_keys').iter_records()) extras_calmjs = map_method(package_names) bundle_sourcepath = {} for mgr in extras_calmjs: if mgr not in valid_pkgmgr_dirs: continue basedir = join(working_dir, mgr) if not isdir(basedir): if extras_calmjs[mgr]: logger.warning( "acquired extras_calmjs needs from '%s', but working " "directory '%s' does not contain it; bundling may fail.", mgr, working_dir) continue # pragma: no cover for k, v in extras_calmjs[mgr].items(): bundle_sourcepath[k] = join(basedir, *(v.split('/'))) return bundle_sourcepath
def __init__(self, registry=JINJA_TEMPLATE_REGISTRY_NAME, env=None): """ By default, the engine can be created without arguments which will initialize using the default registry. It is possible to initialize using other arguments, but this is unsupported by the main system, and only useful for certain specialized implementations. """ self.registry = ( registry if isinstance(registry, JinjaTemplateRegistry) else get(registry) ) self.env = env if env else Environment( autoescape=True, loader=NunjaLoader(self.registry) ) # this filter is to match with nunjucks version (which calls # JSON.stringify in JavaScript); construct a partial which is a # callable to json.dumps with default parameters that mimic the # JavaScript version of the called function. self.env.filters['dump'] = partial( json.dumps, sort_keys=True, separators=(',', ':'))
def test_registry_reserved(self): make_dummy_dist(self, (( 'entry_points.txt', '[calmjs.reserved]\n' 'calmjs.r1 = calmjs\n' 'calmjs.r3 = an.external\n' '\n' '[calmjs.registry]\n' 'calmjs.r1 = calmjs.module:ModuleRegistry\n' 'calmjs.r2 = calmjs.module:ModuleRegistry\n' 'calmjs.r3 = calmjs.module:ModuleRegistry\n' ),), 'calmjs', '1.0') make_dummy_dist(self, (( 'requires.txt', 'calmjs', ), ( 'entry_points.txt', '[calmjs.reserved]\n' 'calmjs.r1 = an.external\n' 'calmjs.r2 = calmjs\n' 'calmjs.r3 = calmjs\n' '\n' '[calmjs.registry]\n' 'calmjs.r1 = calmjs.testing.module3.module:CustomModuleRegistry\n' 'calmjs.r2 = calmjs.testing.module3.module:CustomModuleRegistry\n' 'calmjs.r3 = calmjs.testing.module3.module:CustomModuleRegistry\n' ),), 'an.external', '2.0') working_set = pkg_resources.WorkingSet([self._calmjs_testing_tmpdir]) with pretty_logging(stream=mocks.StringIO()) as stream: registry = calmjs.registry.Registry( 'calmjs.registry', _working_set=working_set) from calmjs.testing.module3.module import CustomModuleRegistry from calmjs.module import ModuleRegistry r1 = registry.get('calmjs.r1') r2 = registry.get('calmjs.r2') r3 = registry.get('calmjs.r3') # since this one is reserved to calmjs, not registered self.assertFalse(isinstance(r1, CustomModuleRegistry)) self.assertTrue(isinstance(r1, ModuleRegistry)) # whatever this is. self.assertTrue(isinstance(r2, ModuleRegistry)) # this one is reserved to an.external self.assertTrue(isinstance(r3, CustomModuleRegistry)) log = stream.getvalue() self.assertIn( "registry 'calmjs.r1' for 'calmjs.registry' is reserved for " "package 'calmjs'", log ) self.assertIn( "registry 'calmjs.r3' for 'calmjs.registry' is reserved for " "package 'an.external'", log ) self.assertIn( "registry 'calmjs.r2' for 'calmjs.registry' is already registered", log )
def generate_bundle_sourcepaths( package_names, working_dir=None, method=_default): """ Acquire the bundle source maps through the calmjs registry system. Arguments: package_names The names of the package to acquire the sources for. working_dir The working directory. Defaults to current working directory. method The method to acquire the bundle sources for the given module. Choices are between 'all', 'explicit', 'none', or 'empty'. 'all' Traverse the dependency graph for the specified package and acquire the declarations. [default] 'explicit' Only acquire the bundle sources declared for the specified package. 'none' Produce an empty source map. For requirejs, this means the default fallback behavior of loading from the base_dir (i.e. the build_dir) which will result in error on missing files. However this is left here for low level manipulation and/or usage. 'empty' Same as all, but all paths will be replaced with 'empty:'. This effectively achieves the same effect as 'none', however in a way that should not error if the packages at hand have declared all the extra sources used in the extras_calmjs under the appropriate keys. Defaults to 'all'. """ working_dir = working_dir if working_dir else getcwd() methods = acquire_method(extras_calmjs_methods, method) acquire_extras_calmjs, joiner = methods # the extras keys will be treated as valid Node.js package manager # subdirectories. valid_pkgmgr_dirs = set(get('calmjs.extras_keys').iter_records()) extras_calmjs = acquire_extras_calmjs(package_names) bundle_sourcepath = {} for mgr in extras_calmjs: if mgr not in valid_pkgmgr_dirs: continue basedir = join(working_dir, mgr) if not isdir(basedir): if extras_calmjs[mgr]: logger.warning( "acquired extras_calmjs needs from '%s', but working " "directory '%s' does not contain it; bundling may fail.", mgr, working_dir ) continue # pragma: no cover for k, v in extras_calmjs[mgr].items(): bundle_sourcepath[k] = joiner(basedir, *(v.split('/'))) return bundle_sourcepath
def test_integrated_get(self): # test that the default registry is registered. self.assertTrue(isinstance(get('calmjs.artifacts'), ArtifactRegistry))
def karma_requirejs(spec): """ An advice for the karma runtime before execution of karma that is needed for integrating the requirejs framework for testing into karma; needed when RJSToolchain was used for artifact generation. This advice should be registered to BEFORE_KARMA by RJSToolchain. This will modify the related items in spec for the generation of the karma.conf.js to ensure compatibility with requirejs idioms for the execution of the tests through karma. """ # Importing this here as these modules may not be available, so to # avoid potential issues, import them within the scope of this # function; this function should never be called if the calmjs.dev # python package is not available for import (and the setup should # not add this to a valid advice). try: from calmjs.dev import karma except ImportError: logger.error( "package 'calmjs.dev' not available; cannot apply requirejs " "specific information without karma being available." ) return required_keys = [karma.KARMA_CONFIG, BUILD_DIR] for key in required_keys: if key not in spec: logger.error( "'%s' not provided by spec; aborting configuration for karma " "test runner", key ) raise ToolchainAbort("spec missing key '%s'" % key) config = spec.get(karma.KARMA_CONFIG) config_files = config.get('files', []) build_dir = spec.get(BUILD_DIR) spec_update_loaderplugin_registry( spec, default=get(RJS_LOADER_PLUGIN_REGISTRY_NAME)) plugin_registry = spec[CALMJS_LOADERPLUGIN_REGISTRY] test_module_paths_map = spec.get(TEST_MODULE_PATHS_MAP, {}) test_conf = plugin_registry.modname_targetpath_mapping_to_config_paths( test_module_paths_map) # Ensure '/absolute' is prefixed like so to eliminate spurious error # messages in the test runner, simply because the requirejs plugin # will try to go through this mechanism to find a timestamp and fail # to find its expected path, triggering the unwanted messages. This # naive prefixing is actually consistent for all platforms including # Windows... new_paths = { # however, the actual path fragments need to be split and joined # with the web standard '/' separator. k: '/absolute' + '/'.join(v.split(sep)) for k, v in test_conf['paths'].items() } test_conf['paths'] = new_paths test_config_path = spec['karma_requirejs_test_config'] = join( build_dir, 'requirejs_test_config.js') with open(test_config_path, 'w') as fd: fd.write(UMD_REQUIREJS_JSON_EXPORT_HEADER) json_dump(test_conf, fd) fd.write(UMD_REQUIREJS_JSON_EXPORT_FOOTER) # Export all the module dependencies first so they get pre-loaded # and thus be able to be loaded synchronously by test modules. deps = sorted(spec.get('export_module_names', [])) if spec.get(ARTIFACT_PATHS): # TODO have a flag of some sort for flagging this as optional. deps.extend(process_artifacts(spec.get(ARTIFACT_PATHS))) test_prefix = spec.get(TEST_FILENAME_PREFIX, TEST_FILENAME_PREFIX_DEFAULT) tests = [] # Process tests separately; include them iff the filename starts # with test, otherwise they are just provided as dependency modules. for k, v in test_module_paths_map.items(): if basename(v).startswith(test_prefix): tests.append(k) else: deps.append(k) test_script_path = spec['karma_requirejs_test_script'] = join( build_dir, 'karma_test_init.js') with open(test_script_path, 'w') as fd: fd.write(TEST_SCRIPT_TEMPLATE % (json_dumps(deps), json_dumps(tests))) frameworks = ['requirejs'] frameworks.extend(config['frameworks']) config['frameworks'] = frameworks # rebuild the files listing in specific ordering as the load order # matters from within a test browser spawned by karma. files = [] # first include the configuration files files.extend(spec.get(CONFIG_JS_FILES, [])) # then append the test configuration path files.append(test_config_path) # then the script files.append(test_script_path) # then extend the configured paths but do not auto-include them. files.extend({'pattern': f, 'included': False} for f in config_files) # update the file listing with modifications; this will be written # out as part of karma.conf.js by the KarmaRuntime. config['files'] = files
def karma_requirejs(spec): """ An advice for the karma runtime before execution of karma that is needed for integrating the requirejs framework for testing into karma; needed when RJSToolchain was used for artifact generation. This advice should be registered to BEFORE_KARMA by RJSToolchain. This will modify the related items in spec for the generation of the karma.conf.js to ensure compatibility with requirejs idioms for the execution of the tests through karma. """ # Importing this here as these modules may not be available, so to # avoid potential issues, import them within the scope of this # function; this function should never be called if the calmjs.dev # python package is not available for import (and the setup should # not add this to a valid advice). try: from calmjs.dev import karma except ImportError: logger.error( "package 'calmjs.dev' not available; cannot apply requirejs " "specific information without karma being available.") return required_keys = [karma.KARMA_CONFIG, BUILD_DIR] for key in required_keys: if key not in spec: logger.error( "'%s' not provided by spec; aborting configuration for karma " "test runner", key) raise ToolchainAbort("spec missing key '%s'" % key) config = spec.get(karma.KARMA_CONFIG) config_files = config.get('files', []) build_dir = spec.get(BUILD_DIR) plugin_registry = spec.get(RJS_LOADER_PLUGIN_REGISTRY) if not plugin_registry: logger.warning( 'no rjs loader plugin registry provided in spec; ' "falling back to default registry '%s'", RJS_LOADER_PLUGIN_REGISTRY_NAME) plugin_registry = get(RJS_LOADER_PLUGIN_REGISTRY_NAME) test_module_paths_map = spec.get(TEST_MODULE_PATHS_MAP, {}) test_conf = plugin_registry.modname_target_mapping_to_config_paths( test_module_paths_map) # Ensure '/absolute' is prefixed like so to eliminate spurious error # messages in the test runner, simply because the requirejs plugin # will try to go through this mechanism to find a timestamp and fail # to find its expected path, triggering the unwanted messages. This # naive prefixing is actually consistent for all platforms including # Windows... new_paths = { # however, the actual path fragments need to be split and joined # with the web standard '/' separator. k: '/absolute' + '/'.join(v.split(sep)) for k, v in test_conf['paths'].items() } test_conf['paths'] = new_paths test_config_path = spec['karma_requirejs_test_config'] = join( build_dir, 'requirejs_test_config.js') with open(test_config_path, 'w') as fd: fd.write(UMD_REQUIREJS_JSON_EXPORT_HEADER) json_dump(test_conf, fd) fd.write(UMD_REQUIREJS_JSON_EXPORT_FOOTER) # Export all the module dependencies first so they get pre-loaded # and thus be able to be loaded synchronously by test modules. deps = sorted(spec.get('export_module_names', [])) if spec.get(ARTIFACT_PATHS): # TODO have a flag of some sort for flagging this as optional. deps.extend(process_artifacts(spec.get(ARTIFACT_PATHS))) test_prefix = spec.get(TEST_FILENAME_PREFIX, TEST_FILENAME_PREFIX_DEFAULT) tests = [] # Process tests separately; include them iff the filename starts # with test, otherwise they are just provided as dependency modules. for k, v in test_module_paths_map.items(): if basename(v).startswith(test_prefix): tests.append(k) else: deps.append(k) test_script_path = spec['karma_requirejs_test_script'] = join( build_dir, 'karma_test_init.js') with open(test_script_path, 'w') as fd: fd.write(TEST_SCRIPT_TEMPLATE % (json_dumps(deps), json_dumps(tests))) frameworks = ['requirejs'] frameworks.extend(config['frameworks']) config['frameworks'] = frameworks # rebuild the files listing in specific ordering as the load order # matters from within a test browser spawned by karma. files = [] # first include the configuration files files.extend(spec.get(CONFIG_JS_FILES, [])) # then append the test configuration path files.append(test_config_path) # then the script files.append(test_script_path) # then extend the configured paths but do not auto-include them. files.extend({'pattern': f, 'included': False} for f in config_files) # update the file listing with modifications; this will be written # out as part of karma.conf.js by the KarmaRuntime. config['files'] = files
def prepare(self, spec): """ Attempts to locate the r.js binary if not already specified. If the binary file was not found, RJSRuntimeError will be raised. """ loader_plugin_registry = get(spec.get(RJS_LOADER_PLUGIN_REGISTRY_KEY)) loader_plugin_registry = spec[RJS_LOADER_PLUGIN_REGISTRY] = ( loader_plugin_registry or self.loader_plugin_registry) if self.rjs_bin_key not in spec: which_bin = spec[self.rjs_bin_key] = ( self.which() or self.which_with_node_modules()) if which_bin is None: raise RJSRuntimeError( "unable to locate '%s'" % self.binary) logger.debug("using '%s' as '%s'", which_bin, self.binary) elif not exists(spec[self.rjs_bin_key]): # should we check whether target can be executed? raise RJSRuntimeError( "'%s' does not exist; cannot be used as '%s' binary" % ( spec[self.rjs_bin_key], self.rjs_bin ) ) # with requirejs, it would be nice to also build a simple config # that can be used from within node with the stuff in just the # build directory - if this wasn't already defined for some # reason. spec['requirejs_config_js'] = join( spec['build_dir'], self.requirejs_config_name) spec['node_config_js'] = join( spec['build_dir'], self.node_config_name) spec['build_manifest_path'] = join( spec[BUILD_DIR], self.build_manifest_name) if EXPORT_TARGET not in spec: raise RJSRuntimeError( "'%s' not found in spec" % EXPORT_TARGET) # no effect if EXPORT_TARGET already absolute. spec[EXPORT_TARGET] = spec[EXPORT_TARGET] = self.join_cwd( spec[EXPORT_TARGET]) # Only providing the standard web one, as the node version is # for internal testing spec[CONFIG_JS_FILES] = [spec['requirejs_config_js']] if not isdir(dirname(spec[EXPORT_TARGET])): raise RJSRuntimeError( "'%s' will not be writable" % EXPORT_TARGET) logger.debug( "'%s' declared to be '%s'", EXPORT_TARGET, spec[EXPORT_TARGET] ) keys = ('requirejs_config_js', 'build_manifest_path') matched = [k for k in keys if spec[EXPORT_TARGET] == spec[k]] if matched: raise RJSRuntimeError( "'%s' must not be same as '%s'" % (EXPORT_TARGET, matched[0])) plugin_source_map = spec['plugin_source_map'] = {} raw_plugins = spec.get(REQUIREJS_PLUGINS, {}) for key, value in raw_plugins.items(): handler = loader_plugin_registry.get_record(key) if handler: # assume handler will do the job. plugin_source_map.update(value) logger.debug("found handler for '%s' loader plugin", key) else: logger.warning( "handler for '%s' loader plugin not found in registry; " "as arguments associated with requirejs loader plugins " "are specific, processing is disabled and the following " "names will not be compiled into the target: %s", key, sorted(value.keys()), ) # setup own advice. rjs_advice(spec)
def generate_bundle_source_maps(package_names, working_dir=None, method=_default): """ Acquire the bundle source maps through the calmjs registry system. Arguments: package_names The names of the package to acquire the sources for. working_dir The working directory. Defaults to current working directory. method The method to acquire the bundle sources for the given module. Choices are between 'all', 'explicit', 'none', or 'empty'. 'all' Traverse the dependency graph for the specified package and acquire the declarations. [default] 'explicit' Only acquire the bundle sources declared for the specified package. 'none' Produce an empty source map. For requirejs, this means the default fallback behavior of loading from the base_dir (i.e. the build_dir) which will result in error on missing files. However this is left here for low level manipulation and/or usage. 'empty' Same as all, but all paths will be replaced with 'empty:'. This effectively achieves the same effect as 'none', however in a way that should not error if the packages at hand have declared all the extra sources used in the extras_calmjs under the appropriate keys. Defaults to 'all'. """ working_dir = working_dir if working_dir else getcwd() methods = acquire_method(extras_calmjs_methods, method) acquire_extras_calmjs, joiner = methods # the extras keys will be treated as valid Node.js package manager # subdirectories. valid_pkgmgr_dirs = set(get('calmjs.extras_keys').iter_records()) extras_calmjs = acquire_extras_calmjs(package_names) bundle_source_map = {} for mgr in extras_calmjs: if mgr not in valid_pkgmgr_dirs: continue basedir = join(working_dir, mgr) if not isdir(basedir): if extras_calmjs[mgr]: logger.warning( "acquired extras_calmjs needs from '%s', but working " "directory '%s' does not contain it; bundling may fail.", mgr, working_dir) continue # pragma: no cover for k, v in extras_calmjs[mgr].items(): bundle_source_map[k] = joiner(basedir, *(v.split('/'))) return bundle_source_map