def test_karma_runtime_integration_ignore_error(self): stub_stdouts(self) target = join(mkdtemp(self), 'target') build_dir = mkdtemp(self) stub_item_attr_value( self, mocks, 'dummy', ToolchainRuntime(NullToolchain()), ) make_dummy_dist(self, (( 'entry_points.txt', '[calmjs.runtime]\n' 'null = calmjs.testing.mocks:dummy\n' ),), 'example.package', '1.0') working_set = WorkingSet([self._calmjs_testing_tmpdir]) rt = KarmaRuntime(self.driver, working_set=working_set) result = rt([ '-I', 'null', '--export-target', target, '--build-dir', build_dir, ]) self.assertIn('karma_config_path', result) self.assertTrue(exists(result['karma_config_path'])) self.assertFalse(result.get('karma_abort_on_test_failure')) self.assertIn( "karma exited with return code 1; continuing as specified", sys.stderr.getvalue() ) # ensure coverage isn't run at all. coverage_report_dir = join(build_dir, 'coverage') self.assertFalse(exists(coverage_report_dir))
def test_find_node_modules_basedir(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check self.assertEqual([], driver.find_node_modules_basedir()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) self.assertEqual([p1, p2], driver.find_node_modules_basedir()) # create the node_modules in the working directory defined for # the driver instance, and unset NODE_PATH driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) self.assertEqual([dwd_wd_nm], driver.find_node_modules_basedir()) # combine the two, they should be in this order, where the # working directory has higher precedence over NODE_PATH driver.node_path = p1 self.assertEqual([dwd_wd_nm, p1], driver.find_node_modules_basedir())
def test_nested(self): # the target is 'namespace/text_file.txt' build_dir = mkdtemp(self) srcdir = join(mkdtemp(self), 'namespace') mkdir(srcdir) spec = {'build_dir': build_dir} source = join(srcdir, 'text_file.txt') with open(source, 'w') as fd: fd.write('a text file\n') toolchain = None # this one is not necessary for text. modname = 'text!namespace/text_file.txt' target = 'text!namespace/text_file.txt' modpath = 'text!namespace/text_file.txt' with pretty_logging( 'calmjs.rjs.loaderplugin', stream=StringIO()) as stream: result = loaderplugin.TextPlugin(None)( toolchain, spec, modname, source, target, modpath) self.assertEqual(stream.getvalue(), '') self.assertTrue(exists(join(build_dir, 'namespace', 'text_file.txt'))) bundled_modpaths, bundled_targets, module_name = result self.assertEqual(bundled_modpaths, { 'text!namespace/text_file.txt': 'text!namespace/text_file.txt', }) self.assertEqual(bundled_targets, { 'namespace/text_file': 'namespace/text_file', 'namespace/text_file.txt': 'namespace/text_file.txt', }) self.assertEqual(module_name, ['text!namespace/text_file.txt'])
def test_karma_setup_not_webpack_artifact(self): karma_config = karma.build_base_config() src_dir = mkdtemp(self) fake_artifact = join(src_dir, 'fake_artifact.js') with open(fake_artifact, 'w') as fd: fd.write('(function(root, factory) { factory() })') fd.write('(this, function() {});') build_dir = mkdtemp(self) spec = Spec( karma_config=karma_config, build_dir=build_dir, test_module_paths_map={ 'some/package/tests/test_module': '/src/some/package/tests/test_module.js' }, artifact_paths=[fake_artifact], toolchain_bin_path=self.setup_fake_webpack(), ) with pretty_logging(stream=StringIO()) as s: karma_webpack(spec) log = s.getvalue() self.assertIn("unable to extract calmjs related exports from", log) self.assertIn(fake_artifact, log)
def test_build_bundle_no_indent(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_sourcepath = {} transpile_sourcepath.update(self._example_package_map) bundle_sourcepath = {} export_target = join(bundle_dir, 'example.package.js') rjs = toolchain.RJSToolchain() spec = Spec( transpile_sourcepath=transpile_sourcepath, bundle_sourcepath=bundle_sourcepath, export_target=export_target, build_dir=build_dir, transpile_no_indent=True, ) rjs(spec) self.assertTrue(exists(export_target)) stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' '%s\n' 'var main = requirejs("example/package/main");\n' 'main.main(true);\n', spec['node_config_js'], ) bad_file = join('example', 'package', 'bad.js') patt = re.compile('%s:%d(:%d)?' % ( bad_file.replace('\\', '\\\\'), self._bad_notdefinedsymbol[0], self._bad_notdefinedsymbol[1], )) self.assertTrue(patt.search(stderr)) self.assertEqual(stdout, '2\n4\n')
def test_rjs_core_compiled_failure_bad_template(self): remember_cwd(self) chdir(self._env_root) build_dir = mkdtemp(self) src_dir = mkdtemp(self) src_template = join(src_dir, 'template.nja') with open(src_template, 'w') as fd: fd.write('<p>Hello {%World%}</p>') spec = Spec( build_dir=build_dir, plugin_sourcepath={ 'text!mold/dummy/template.nja': src_template, }, bundle_sourcepath={}, ) build_dir = mkdtemp(self) rjs(spec, ()) with pretty_logging('nunja', stream=StringIO()) as stream: spec.handle(BEFORE_COMPILE) err = stream.getvalue() self.assertIn('ERROR', err) self.assertIn('failed to precompile', err) self.assertIn('Template render error: (mold/dummy/template.nja)', err)
def test_compile_plugin_error(self): working_dir = utils.mkdtemp(self) mock_requirejs_text(working_dir) src_dir = utils.mkdtemp(self) src = join(src_dir, 'mod.js') with open(src, 'w') as fd: fd.write('hello world') # prepare targets target = 'target.txt' rjs = toolchain.RJSToolchain() spec = Spec(**{ 'build_dir': self.build_dir, rjs.rjs_bin_key: join(self.build_dir, 'r.js'), 'export_target': join(working_dir, 'export.js'), 'bundle_sourcepath': {}, LOADERPLUGIN_SOURCEPATH_MAPS: { 'unregistered/mod': {} }, 'working_dir': working_dir, }) with pretty_logging(logger='calmjs', stream=mocks.StringIO()) as s: rjs.prepare(spec) rjs.compile_loaderplugin_entry(spec, ( 'unregistered/mod!target.txt', src, target, 'target.txt')) self.assertIn( "loaderplugin handler found for plugin entry " "'unregistered/mod!target.txt'", s.getvalue())
def test_compile_plugin_base(self): build_dir = utils.mkdtemp(self) src_dir = utils.mkdtemp(self) src = join(src_dir, 'mod.js') with open(src, 'w') as fd: fd.write('hello world') # prepare targets target1 = 'mod1.txt' target2 = join('namespace', 'mod2.txt') target3 = join('nested', 'namespace', 'mod3.txt') target4 = 'namespace.mod4.txt' rjs = toolchain.RJSToolchain() spec = { 'build_dir': build_dir, toolchain.RJS_LOADER_PLUGIN_REGISTRY: rjs.loader_plugin_registry, } rjs.compile_plugin(spec, [ ('text!mod1.txt', src, target1, 'mod1'), ('text!namespace/mod2.txt', src, target2, 'mod2'), ('text!nested/namespace/mod3.txt', src, target3, 'mod3'), ('text!namespace.mod4.txt', src, target4, 'mod4'), ]) self.assertTrue(exists(join(build_dir, target1))) self.assertTrue(exists(join(build_dir, target2))) self.assertTrue(exists(join(build_dir, target3))) self.assertTrue(exists(join(build_dir, target4)))
def test_pkg_manager_init_working_dir(self): self.setup_requirements_json() remember_cwd(self) original = mkdtemp(self) os.chdir(original) cwd = mkdtemp(self) target = join(cwd, 'requirements.json') driver = cli.PackageManagerDriver( pkg_manager_bin='mgr', pkgdef_filename='requirements.json', dep_keys=('require', ), working_dir=cwd, ) driver.pkg_manager_init('calmpy.pip', interactive=False) self.assertFalse(exists(join(original, 'requirements.json'))) self.assertTrue(exists(target)) with open(target) as fd: result = json.load(fd) self.assertEqual(result, { "require": { "setuptools": "25.1.6" }, "name": "calmpy.pip", })
def test_build_bundle_no_indent(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_source_map = {} transpile_source_map.update(self._example_package_map) bundle_source_map = {} export_target = join(bundle_dir, 'example.package.js') rjs = toolchain.RJSToolchain() spec = Spec( transpile_source_map=transpile_source_map, bundle_source_map=bundle_source_map, export_target=export_target, build_dir=build_dir, transpile_no_indent=True, ) rjs(spec) self.assertTrue(exists(export_target)) stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' '%s\n' 'var main = requirejs("example/package/main");\n' 'main.main(true);\n', spec['node_config_js'], ) self.assertIn( join('example', 'package', 'bad.js') + ':%d:%d' % ( self._bad_notdefinedsymbol ), stderr ) self.assertEqual(stdout, '2\n4\n')
def test_build_bundle_standard(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_source_map = {} transpile_source_map.update(self._example_package_map) bundle_source_map = {} export_target = join(bundle_dir, 'example.package.js') rjs = toolchain.RJSToolchain() spec = Spec( transpile_source_map=transpile_source_map, bundle_source_map=bundle_source_map, export_target=export_target, build_dir=build_dir, ) rjs(spec) self.assertTrue(exists(export_target)) # verify that the bundle works with node stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' 'var define = requirejs.define;\n' '%s\n' 'var main = requirejs("example/package/main");\n' 'main.main();\n', export_target, ) self.assertEqual(stderr, '') self.assertEqual(stdout, '2\n4\n')
def test_nested(self): # the target is 'namespace/text_file.txt' build_dir = mkdtemp(self) srcdir = join(mkdtemp(self), 'namespace') mkdir(srcdir) spec = {'build_dir': build_dir} source = join(srcdir, 'text_file.txt') with open(source, 'w') as fd: fd.write('a text file\n') toolchain = None # this one is not necessary for text. modname = 'text!namespace/text_file.txt' target = 'text!namespace/text_file.txt' modpath = 'text!namespace/text_file.txt' with pretty_logging('calmjs.rjs.plugin', stream=StringIO()) as stream: result = plugin.TextPlugin(None)(toolchain, spec, modname, source, target, modpath) self.assertEqual(stream.getvalue(), '') self.assertTrue(exists(join(build_dir, 'namespace', 'text_file.txt'))) bundled_modpaths, bundled_targets, module_name = result self.assertEqual( bundled_modpaths, { 'text!namespace/text_file.txt': 'text!namespace/text_file.txt', }) self.assertEqual( bundled_targets, { 'namespace/text_file': 'namespace/text_file', 'namespace/text_file.txt': 'namespace/text_file.txt', }) self.assertEqual(module_name, ['text!namespace/text_file.txt'])
def test_prepare_compile_assemble_verify_loaders_not_found(self): working_dir = utils.mkdtemp(self) mock_text_loader(working_dir) src_dir = utils.mkdtemp(self) index_file = join(src_dir, 'index.js') with open(index_file, 'w') as fd: fd.write('var hello = require("text!hello/world.txt");\n') webpack = toolchain.WebpackToolchain() spec = Spec( **{ 'build_dir': self.build_dir, 'export_target': join(working_dir, 'export.js'), webpack.webpack_bin_key: join(self.build_dir, 'webpack'), LOADERPLUGIN_SOURCEPATH_MAPS: {}, 'transpile_sourcepath': { 'index': index_file, }, 'working_dir': working_dir, 'verify_imports': True, }) with pretty_logging(stream=mocks.StringIO()) as s: webpack.prepare(spec) webpack.compile(spec) webpack.assemble(spec) self.assertIn("not in modules: %s" % (['text!hello/world.txt'], ), s.getvalue())
def test_call_dir_nesting(self): srcfile = join(mkdtemp(self), 'some.file.txt') tgtfile = join('dir', 'some.file.txt') spec = Spec(build_dir=mkdtemp(self)) toolchain = Toolchain() with open(srcfile, 'w') as fd: fd.write('hello world') reg = LoaderPluginRegistry('calmjs.webpack.loaders') text = loaderplugin.WebpackLoaderHandler(reg, 'text') modpaths, targets, export_module_names = text(toolchain, spec, 'text!some.file.txt', srcfile, tgtfile, 'text!some.file.txt') self.assertTrue(exists(join(spec['build_dir'], 'dir', 'some.file.txt'))) self.assertEqual({'text!some.file.txt': 'text!some.file.txt'}, modpaths) self.assertEqual( { 'some.file.txt': tgtfile, './some.file.txt': tgtfile, }, targets) self.assertEqual(['text!some.file.txt'], export_module_names)
def test_webpack_core_compiled_raw(self): remember_cwd(self) chdir(self._env_root) build_dir = mkdtemp(self) src_dir = mkdtemp(self) src_template = join(src_dir, 'template.nja') with open(src_template, 'w') as fd: fd.write('<p>Hello, {name}</p>') spec = Spec( build_dir=build_dir, loaderplugin_sourcepath={ 'text!some/mold/template.nja': src_template, }, bundle_sourcepath={ 'nunjucks': join('node_modules', 'nunjucks', 'nunjucks.js'), }, ) with pretty_logging('nunja', stream=StringIO()) as stream: webpack(spec, ('raw')) # now trigger the advice spec.handle(BEFORE_COMPILE) # template remains in plugins self.assertEqual(spec['loaderplugin_sourcepath'], { 'text!some/mold/template.nja': src_template, }) # will not be applied in raw. self.assertIn('__nunja__/some/mold', spec['bundle_sourcepath']) self.assertIn( 'nunja cannot skip precompilation for webpack toolchain', stream.getvalue(), )
def test_find_node_modules_basedir(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check self.assertEqual([], driver.find_node_modules_basedir()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) self.assertEqual([p1, p2], driver.find_node_modules_basedir()) # create the node_modules in the working directory defined for # the driver instance, and unset NODE_PATH driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) self.assertEqual([dwd_wd_nm], driver.find_node_modules_basedir()) # combine the two, they should be in this order, where the # working directory has higher precedence over NODE_PATH driver.node_path = p1 self.assertEqual([dwd_wd_nm, p1], driver.find_node_modules_basedir())
def test_call_loader_chaining(self): srcfile = join(mkdtemp(self), 'some.css') spec = Spec(build_dir=mkdtemp(self)) toolchain = Toolchain() with open(srcfile, 'w') as fd: fd.write('body { color: #000; }') reg = LoaderPluginRegistry('calmjs.webpack.loaders') reg.records['text'] = text = loaderplugin.WebpackLoaderHandler( reg, 'text') reg.records['css'] = loaderplugin.WebpackLoaderHandler(reg, 'css') modpaths, targets, export_module_names = text(toolchain, spec, 'text!css!some.css', srcfile, 'some.css', 'text!css!some.css') self.assertEqual({'text!css!some.css': 'text!css!some.css'}, modpaths) self.assertEqual({ 'some.css': 'some.css', './some.css': 'some.css', }, targets) self.assertEqual(['text!css!some.css'], export_module_names) self.assertTrue(exists(join(spec['build_dir'], 'some.css')))
def test_null_toolchain_transpile_sources(self): source_dir = mkdtemp(self) build_dir = mkdtemp(self) source_file = join(source_dir, 'source.js') with open(source_file, 'w') as fd: fd.write('var dummy = function () {};\n') spec = Spec( build_dir=build_dir, transpile_source_map={ 'namespace.dummy.source': source_file, }, ) self.toolchain(spec) # name, and relative filename to the build_path self.assertEqual( spec, { 'build_dir': build_dir, 'transpile_source_map': { 'namespace.dummy.source': source_file, }, 'bundled_paths': {}, 'compiled_paths': { 'namespace.dummy.source': 'namespace.dummy.source', }, 'module_names': ['namespace.dummy.source'], 'prepare': 'prepared', 'assemble': 'assembled', 'link': 'linked', }) self.assertTrue(exists(join(build_dir, 'namespace.dummy.source.js')))
def test_modname_loader_map(self): srcfile = join(mkdtemp(self), 'some.css') spec = Spec( build_dir=mkdtemp(self), calmjs_webpack_modname_loader_map={'some.css': ['style', 'css']}, ) toolchain = Toolchain() with open(srcfile, 'w') as fd: fd.write('.body {}') reg = LoaderPluginRegistry('calmjs.webpack.loaders') reg.records['style'] = text = loaderplugin.WebpackLoaderHandler( reg, 'style') reg.records['css'] = loaderplugin.WebpackLoaderHandler(reg, 'css') modpaths, targets, export_module_names = text(toolchain, spec, 'style!css!some.css', srcfile, 'some.css', 'style!css!some.css') self.assertEqual({'style!css!some.css': 'style!css!some.css'}, modpaths) self.assertEqual({ 'some.css': 'some.css', './some.css': 'some.css', }, targets) self.assertEqual([], export_module_names)
def create_coverage_report(self, report_type): stub_stdouts(self) self.addCleanup( root_registry.records.pop, 'calmjs.dev.module.tests', None) build_dir = mkdtemp(self) coverage_dir = join(mkdtemp(self), 'coverage') # manipulate the registry to remove the fail test reg = root_registry.get('calmjs.dev.module.tests') reg.records['calmjs.dev.tests'].pop('calmjs/dev/tests/test_fail', '') # use the full blown runtime rt = KarmaRuntime(self.driver) # the artifact in our case is identical to the source file artifact_fn = resource_filename('calmjs.dev', 'main.js') result = rt([ '--artifact', artifact_fn, 'run', '--build-dir', build_dir, '--test-registry', 'calmjs.dev.module.tests', '--test-with-package', 'calmjs.dev', '--coverage', '--cover-artifact', '--cover-report-type', report_type, '--cover-report-dir', coverage_dir, ]) self.assertIn('karma_config_path', result) self.assertEqual(result['artifact_paths'], [artifact_fn]) self.assertTrue(exists(result['karma_config_path'])) # should exit cleanly self.assertNotIn( "karma exited with return code 1", sys.stderr.getvalue()) self.assertIn(artifact_fn, result['karma_config']['preprocessors']) self.assertTrue(exists(coverage_dir)) return coverage_dir, artifact_fn
def test_build_bundle_standard(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_sourcepath = {} transpile_sourcepath.update(self._example_package_map) bundle_sourcepath = {} export_target = join(bundle_dir, 'example.package.js') rjs = toolchain.RJSToolchain() spec = Spec( transpile_sourcepath=transpile_sourcepath, bundle_sourcepath=bundle_sourcepath, export_target=export_target, build_dir=build_dir, ) rjs(spec) self.assertTrue(exists(export_target)) # verify that the bundle works with node stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' 'var define = requirejs.define;\n' '%s\n' 'var main = requirejs("example/package/main");\n' 'main.main();\n', export_target, ) self.assertEqual(stderr, '') self.assertEqual(stdout, '2\n4\n')
def test_mkdtemp_clean_ups(self): target1 = mkdtemp(self) target2 = mkdtemp(self) self.assertTrue(exists(target1)) self.assertTrue(exists(target2)) self.assertNotEqual(target1, target2) self.doCleanups() self.assertFalse(exists(target1)) self.assertFalse(exists(target2)) self.assertEqual(self.mock_tempfile.count, 2)
def test_mkdtemp_clean_ups(self): target1 = mkdtemp(self) target2 = mkdtemp(self) self.assertTrue(exists(target1)) self.assertTrue(exists(target2)) self.assertNotEqual(target1, target2) self.doCleanups() self.assertFalse(exists(target1)) self.assertFalse(exists(target2)) self.assertEqual(self.mock_tempfile.count, 2)
def test_artifact_verify_extra_artifacts_with_build_dir(self): # this one is provided only as convenience; this may be useful # for builders that construct a partial artifacts but using a # test rule that doesn't provide some requirements, or for # testing whether inclusion of that other artifact will cause # interference with the expected functionality of the artifact # to be tested with. extra_js = join(mkdtemp(self), 'extra.js') extra_test = join(mkdtemp(self), 'test_extra.js') with open(extra_js, 'w') as fd: fd.write('var extra = {value: "artifact"};') with open(extra_test, 'w') as fd: fd.write(dedent(""" 'use strict'; describe('emulated extra test', function() { it('extra artifact provided', function() { expect(window.extra.value).to.equal("artifact"); }); }); """.strip())) build_dir = mkdtemp(self) stub_stdouts(self) rt = self.setup_karma_artifact_runtime() # remove the fail test. reg = root_registry.get('calmjs.dev.module.tests') reg.records['calmjs.dev.tests'].pop('calmjs/dev/tests/test_fail', '') # inject our extra test to ensure the artifact that got added # still gets tested. reg.records['calmjs.dev.tests'][ 'calmjs/dev/tests/test_extra'] = extra_test self.assertTrue(rt([ '-vv', '--artifact', extra_js, '--build-dir', build_dir, '-u', 'calmjs.dev', 'calmjs.dev', ])) stderr = sys.stderr.getvalue() self.assertIn("specified artifact '%s' found" % extra_js, stderr) self.assertIn("artifact.js' found", stderr) with codecs.open( join(build_dir, 'karma.conf.js'), encoding='utf8') as fd: rawconf = es5(fd.read()) # manually and lazily extract the configuration portion config = json.loads(str( rawconf.children()[0].expr.right.elements[0].expr.args.items[0])) # the extra specified artifact must be before the rest. self.assertEqual(config['files'][0], extra_js)
def test_mkdtemp_missing_addcleanup(self): # Quick and dirty subclassing for type signature and cleanup # availability sanity checks. FakeTestCase = type('FakeTestCase', (unittest.TestCase,), { 'runTest': None, 'addCleanup': None, }) with self.assertRaises(TypeError): mkdtemp(FakeTestCase()) self.assertEqual(self.mock_tempfile.count, 0)
def test_mkdtemp_missing_addcleanup(self): # Quick and dirty subclassing for type signature and cleanup # availability sanity checks. FakeTestCase = type('FakeTestCase', (unittest.TestCase, ), { 'runTest': None, 'addCleanup': None, }) with self.assertRaises(TypeError): mkdtemp(FakeTestCase()) self.assertEqual(self.mock_tempfile.count, 0)
def test_which_with_node_modules(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages. self.assertNotIn('will attempt', s.getvalue()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages, binary still not # assigned. self.assertNotIn('will attempt', s.getvalue()) driver.binary = 'dummy' with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # now the log should show what attempted. log = s.getvalue() self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from " "its NODE_PATH of", log) self.assertIn(p1, log) self.assertIn(p2, log) self.assertIn("'BaseDriver' instance located 2 possible paths", log) # try again with working directory driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) log = s.getvalue() # now the log should show what attempted. self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from", log, ) self.assertIn(dwd_wd_nm, log) self.assertIn("located through the working directory", log) self.assertIn("'BaseDriver' instance located 1 possible paths", log)
def test_which_with_node_modules(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages. self.assertNotIn('will attempt', s.getvalue()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages, binary still not # assigned. self.assertNotIn('will attempt', s.getvalue()) driver.binary = 'dummy' with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # now the log should show what attempted. log = s.getvalue() self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from " "its NODE_PATH of", log) self.assertIn(p1, log) self.assertIn(p2, log) self.assertIn("'BaseDriver' instance located 2 possible paths", log) # try again with working directory driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) log = s.getvalue() # now the log should show what attempted. self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from", log, ) self.assertIn(dwd_wd_nm, log) self.assertIn("located through the working directory", log) self.assertIn("'BaseDriver' instance located 1 possible paths", log)
def test_prepare_rjs_plugin_key(self): tmpdir = utils.mkdtemp(self) working_dir = utils.mkdtemp(self) rjs = toolchain.RJSToolchain() with open(join(tmpdir, 'r.js'), 'w'): # mock a r.js file. pass spec = Spec( # this is not written export_target=join(tmpdir, 'bundle.js'), build_dir=tmpdir, bundle_sourcepath={}, transpiled_modpaths={}, bundled_modpaths={}, export_module_names=[], working_dir=working_dir, ) spec[rjs.rjs_bin_key] = join(tmpdir, 'r.js') spec[LOADERPLUGIN_SOURCEPATH_MAPS] = { 'text': { 'text!namespace/module/path.txt': '/namespace/module/path.txt', }, 'unsupported/unknown_plugin': { 'also this is an invalid value': '/some/path', }, } with pretty_logging(logger='calmjs', stream=mocks.StringIO()) as s: rjs.prepare(spec) self.assertEqual(spec['plugin_sourcepath'], { 'text!namespace/module/path.txt': '/namespace/module/path.txt', }) # due to working dir NOT having the text plugin installed from # npm. self.assertEqual(spec['bundle_sourcepath'], {}) logs = s.getvalue() self.assertIn("DEBUG", logs) self.assertIn("found handler for 'text' loader plugin", logs) self.assertIn("WARNING", logs) self.assertIn( "loaderplugin handler for 'unsupported/unknown_plugin' not found " "in loaderplugin registry 'calmjs.rjs.loader_plugin';", logs) self.assertIn("also this is an invalid value", logs) self.assertIn( "could not locate 'package.json' for the npm package " "'requirejs-text'", logs)
def test_build_bundle_with_data(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_sourcepath = {} transpile_sourcepath.update(self._example_package_map) # include custom loader and data transpile_sourcepath.update(self._example_package_loader) bundle_sourcepath = {} export_target = join(bundle_dir, 'example.package.js') requirejs_plugins = { 'example/package/loader': self._example_package_data } custom_registry = LoaderPluginRegistry( 'custom', _working_set=WorkingSet({ 'custom': [ 'example/package/loader = ' 'calmjs.rjs.testing.plugin:DemoPluginHandler' ] }) ) rjs = toolchain.RJSToolchain(loader_plugin_registry=custom_registry) spec = Spec( transpile_sourcepath=transpile_sourcepath, bundle_sourcepath=bundle_sourcepath, loaderplugin_sourcepath_maps=requirejs_plugins, export_target=export_target, build_dir=build_dir, ) with pretty_logging(stream=StringIO()): # to avoid logging the issue of mismatch map to extension # to stderr. rjs(spec) self.assertTrue(exists(export_target)) # verify that the bundle works with node stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' 'var define = requirejs.define;\n' '%s\n' 'var result = requirejs(\n' ' "example/package/loader!example/package/data");\n' 'process.stdout.write("" + result.results.item_count);\n', export_target, ) self.assertEqual(stderr, '') self.assertEqual(stdout, '0')
def test_karma_runtime_integration_coverage(self): class DummyToolchain(NullToolchain): """ Need this step to prepare some actual sources from this project, and we are cheating a bit due to the lack of actual registry setup. """ def prepare(self, spec): # manually set up the source and the tests. main = resource_filename( 'calmjs.dev', 'main.js') test_main = resource_filename( 'calmjs.dev.tests', 'test_main.js') spec.update(dict( transpile_sourcepath={ 'calmjs/dev/main': main, }, test_module_paths_map={ 'calmjs/test_main': test_main, }, )) stub_stdouts(self) target = join(mkdtemp(self), 'target') build_dir = mkdtemp(self) coverage_report_dir = join(build_dir, 'coverage') # ensure this does not already exist self.assertFalse(exists(coverage_report_dir)) stub_item_attr_value( self, mocks, 'dummy', ToolchainRuntime(DummyToolchain()), ) make_dummy_dist(self, (( 'entry_points.txt', '[calmjs.runtime]\n' 'null = calmjs.testing.mocks:dummy\n' ),), 'example.package', '1.0') working_set = WorkingSet([self._calmjs_testing_tmpdir]) rt = KarmaRuntime(self.driver, working_set=working_set) result = rt([ '--coverage', '--cover-report-dir', coverage_report_dir, 'null', '--export-target', target, '--build-dir', build_dir, ]) # ensure coverage report created self.assertTrue(result['coverage_enable']) self.assertTrue(exists(coverage_report_dir))
def test_predefined_path(self): # ensure that the various paths are passed to env or cwd. stub_mod_call(self, cli) stub_base_which(self) somepath = mkdtemp(self) cwd = mkdtemp(self) driver = cli.PackageManagerDriver(pkg_manager_bin='mgr', env_path=somepath, working_dir=cwd) with pretty_logging(stream=mocks.StringIO()): driver.pkg_manager_install() args, kwargs = self.call_args self.assertEqual(kwargs['env']['PATH'].split(pathsep)[0], somepath) self.assertEqual(kwargs['cwd'], cwd)
def test_build_bundle_with_data(self): bundle_dir = utils.mkdtemp(self) build_dir = utils.mkdtemp(self) transpile_source_map = {} transpile_source_map.update(self._example_package_map) # include custom loader and data transpile_source_map.update(self._example_package_loader) bundle_source_map = {} export_target = join(bundle_dir, 'example.package') requirejs_plugins = { 'example/package/loader': self._example_package_data } custom_registry = LoaderPluginRegistry( 'custom', _working_set=WorkingSet({ 'custom': [ 'example/package/loader = calmjs.rjs.plugin:TextPlugin']}) ) rjs = toolchain.RJSToolchain() rjs.loader_plugin_registry = custom_registry spec = Spec( transpile_source_map=transpile_source_map, bundle_source_map=bundle_source_map, requirejs_plugins=requirejs_plugins, export_target=export_target, build_dir=build_dir, ) with pretty_logging(stream=StringIO()): # to avoid logging the issue of mismatch map to extension # to stderr. rjs(spec) self.assertTrue(exists(export_target)) # verify that the bundle works with node stdout, stderr = run_node( 'var requirejs = require("requirejs");\n' 'var define = requirejs.define;\n' '%s\n' 'var result = requirejs(\n' ' "example/package/loader!example/package/data");\n' 'process.stdout.write("" + result.results.item_count);\n', export_target, ) self.assertEqual(stderr, '') self.assertEqual(stdout, '0')
def test_update_artifact_metadata(self): # inject dummy module and add cleanup mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, ( ('requires.txt', '\n'.join([ 'calmjs', ])), ('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) # mock a version of calmjs within that environment too utils.make_dummy_dist(self, (('entry_points.txt', ''), ), 'calmjs', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) self.assertTrue(exists(registry.metadata.get('app'))) with pretty_logging(stream=mocks.StringIO()) as s: registry.update_artifact_metadata('calmjs', {}) self.assertIn("package 'calmjs' has not declare any artifacts", s.getvalue())
def test_relocated_distribution(self): root = mkdtemp(self) dummyns_path = join(root, 'dummyns') make_dummy_dist(self, (( 'namespace_packages.txt', 'dummyns\n', ), ( 'entry_points.txt', '[dummyns]\n' 'dummyns = dummyns:attr\n', ),), 'dummyns', '1.0', working_dir=root) working_set = pkg_resources.WorkingSet([ root, self.ds_egg_root, ]) # activate this as the working set stub_item_attr_value(self, pkg_resources, 'working_set', working_set) dummyns_ep = next(working_set.iter_entry_points('dummyns')) with pretty_logging(stream=StringIO()) as fd: p = indexer.resource_filename_mod_entry_point( 'dummyns', dummyns_ep) # since the actual location is not created) self.assertIsNone(p) self.assertIn("does not exist", fd.getvalue()) # retry with the module directory created at the expected location os.mkdir(dummyns_path) with pretty_logging(stream=StringIO()) as fd: p = indexer.resource_filename_mod_entry_point( 'dummyns', dummyns_ep) self.assertEqual(normcase(p), normcase(dummyns_path)) self.assertEqual('', fd.getvalue())
def test_pkg_manager_init_merge(self): self.setup_requirements_json() cwd = mkdtemp(self) driver = cli.PackageManagerDriver( pkg_manager_bin='mgr', pkgdef_filename='requirements.json', dep_keys=('require', ), working_dir=cwd, ) target = join(cwd, 'requirements.json') with open(target, 'w') as fd: result = json.dump({"require": {"calmpy": "1.0.0"}}, fd) driver.pkg_manager_init('calmpy.pip', interactive=False, merge=True, overwrite=True) self.assertNotEqual( result, { "require": { "calmpy": "1.0.0", "setuptools": "25.1.6", }, "name": "calmpy.pip", })
def fake_mgr_bin(self): tmpdir = mkdtemp(self) # fake an executable in node_modules bin_dir = join(tmpdir, 'node_modules', '.bin') os.makedirs(bin_dir) self.create_fake_mgr_bin(bin_dir) return tmpdir, bin_dir
def test_iter_builders_verify_export_target(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', 'invalid.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) class FakeArtifactRegistry(ArtifactRegistry): def verify_export_target(self, export_target): return 'invalid.js' not in export_target registry = FakeArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) # the invalid.js should be filtered out with pretty_logging(stream=mocks.StringIO()) as stream: self.assertEqual(1, len(list(registry.iter_builders_for('app')))) self.assertIn("invalid.js' has been rejected", stream.getvalue())
def test_yarn_init_merge_no_overwrite_if_semantically_identical(self): tmpdir = mkdtemp(self) # Write an initial thing with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'jquery': '~1.11.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'foo'}, fd, indent=None) os.chdir(tmpdir) self.assertTrue(yarn.yarn_init('foo', merge=True)) with open(join(tmpdir, 'package.json')) as fd: # Notes that we initial wrote a file within a line with # explicitly no indent, so this should parse everything to # show that the indented serializer did not trigger. result = json.loads(fd.readline()) # Merge results shouldn't have written self.assertEqual(result, { 'dependencies': { 'jquery': '~1.11.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'foo', })
def test_init_overwrite(self): tmpdir = mkdtemp(self) with open(os.path.join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': {}, 'devDependencies': {}}, fd) os.chdir(tmpdir) dist = Distribution(dict( script_name='setup.py', script_args=['yarn', '--init', '--overwrite'], name='foo', )) dist.parse_command_line() dist.run_commands() with open(os.path.join(tmpdir, 'package.json')) as fd: result = json.load(fd) # gets overwritten anyway. self.assertEqual(result, { 'dependencies': {'jquery': '~1.11.0'}, 'devDependencies': {}, 'name': 'foo', }) stdout = sys.stdout.getvalue() self.assertIn("wrote '%s'\n" % join(tmpdir, 'package.json'), stdout)
def test_yarn_no_path(self): tmpdir = mkdtemp(self) os.chdir(tmpdir) os.environ['PATH'] = '' with pretty_logging(stream=StringIO()) as stderr: self.assertIsNone(yarn.get_yarn_version()) self.assertIn("failed to execute 'yarn'", stderr.getvalue())
def test_yarn_init_write_name_merge(self): stub_stdouts(self) stub_stdin(self, 'Y') tmpdir = mkdtemp(self) # Write an initial thing with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'jquery': '~1.8.9', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'something_else'}, fd, indent=0) os.chdir(tmpdir) self.assertTrue(yarn.yarn_init('named', merge=True)) with open(join(tmpdir, 'package.json')) as fd: with self.assertRaises(ValueError): json.loads(fd.readline()) fd.seek(0) result = json.load(fd) # Merge results should be written when user agrees. self.assertEqual(result, { 'dependencies': { 'jquery': '~3.0.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, # name derived from the package_json field. 'name': 'named-js', })
def test_yarn_init_existing_interactive_merge_no(self): stub_stdouts(self) stub_stdin(self, 'N') tmpdir = mkdtemp(self) # Write an initial thing with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'jquery': '~3.0.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'dummy'}, fd, indent=0) os.chdir(tmpdir) self.assertFalse(yarn.yarn_init( 'foo', merge=True, callback=prompt_overwrite_json)) with open(join(tmpdir, 'package.json')) as fd: with self.assertRaises(ValueError): json.loads(fd.readline()) fd.seek(0) result = json.load(fd) # Should not have written anything if user said no. self.assertEqual(result, { 'dependencies': { 'jquery': '~3.0.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'dummy', })
def test_yarn_init_existing_merge_interactive_yes(self): stub_stdouts(self) stub_stdin(self, 'Y') tmpdir = mkdtemp(self) # Write an initial thing with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'jquery': '~3.0.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'dummy'}, fd, indent=0) os.chdir(tmpdir) self.assertTrue(yarn.yarn_init('foo', merge=True)) with open(join(tmpdir, 'package.json')) as fd: with self.assertRaises(ValueError): json.loads(fd.readline()) fd.seek(0) result = json.load(fd) # Merge results should be written when user agrees. self.assertEqual(result, { 'dependencies': { 'jquery': '~1.11.0', 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0' }, 'name': 'foo', })
def test_standard(self): d_egg_root = join(mkdtemp(self), 'dummyns') make_dummy_dist(self, (( 'namespace_packages.txt', 'dummyns\n', ), ( 'entry_points.txt', '[dummyns]\n' 'dummyns = dummyns:attr\n', ),), 'dummyns', '1.0', working_dir=d_egg_root) working_set = pkg_resources.WorkingSet([ d_egg_root, self.ds_egg_root, ]) # ensure the working_set is providing the distributions being # mocked here so that resource_filename will resolve correctly stub_item_attr_value(self, pkg_resources, 'working_set', working_set) moddir = join(d_egg_root, 'dummyns') os.makedirs(moddir) # make this also a proper thing with open(join(moddir, '__init__.py'), 'w') as fd: fd.write('') dummyns_ep = next(working_set.iter_entry_points('dummyns')) p = indexer.resource_filename_mod_entry_point('dummyns', dummyns_ep) # finally, this should work. self.assertEqual(normcase(p), normcase(moddir))
def test_iter_builders_side_effect(self): # inject dummy module and add cleanup mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') self.assertFalse(exists(root)) ep, toolchain, spec = next(registry.iter_builders_for('app')) self.assertFalse(exists(root)) # directory only created after the toolchain is executed toolchain(spec) self.assertTrue(exists(root))
def test_init_merge(self): # --merge without --interactive implies overwrite tmpdir = mkdtemp(self) with open(os.path.join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0', }}, fd) os.chdir(tmpdir) dist = Distribution(dict( script_name='setup.py', script_args=['yarn', '--init', '--merge'], name='foo', )) dist.parse_command_line() dist.run_commands() with open(os.path.join(tmpdir, 'package.json')) as fd: result = json.load(fd) # gets overwritten as we explicitly asked self.assertEqual(result, { 'dependencies': {'jquery': '~1.11.0', 'underscore': '~1.8.0'}, 'devDependencies': {'sinon': '~1.17.0'}, 'name': 'foo', })
def test_iter_builders_side_effect_build_issue(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') # clog the build directory so build cannot happen with open(join(root), 'w'): pass ep, toolchain, spec = next(registry.iter_builders_for('app')) check = [] spec.advise('after_prepare', check.append, True) with pretty_logging(stream=mocks.StringIO()) as stream: with self.assertRaises(ToolchainAbort): toolchain(spec) self.assertIn("an advice in group 'before_prepare' triggered an abort", stream.getvalue()) # should have stopped at before_prepare self.assertFalse(check)
def test_init_merge_interactive_default(self): tmpdir = mkdtemp(self) stub_stdin(self, u'') with open(os.path.join(tmpdir, 'package.json'), 'w') as fd: json.dump({'dependencies': { 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0', }}, fd) os.chdir(tmpdir) dist = Distribution(dict( script_name='setup.py', script_args=['yarn', '--init', '--merge', '--interactive'], name='foo', )) dist.parse_command_line() dist.run_commands() stdout = sys.stdout.getvalue() self.assertIn('+ "jquery": "~1.11.0",', stdout) with open(os.path.join(tmpdir, 'package.json')) as fd: result = json.load(fd) # Nothing happened. self.assertEqual(result, { 'dependencies': {'underscore': '~1.8.0'}, 'devDependencies': {'sinon': '~1.17.0'}, })
def test_set_node_path(self): stub_mod_call(self, cli) stub_base_which(self) node_path = mkdtemp(self) driver = cli.PackageManagerDriver(node_path=node_path, pkg_manager_bin='mgr') # ensure env is passed into the call. with pretty_logging(stream=mocks.StringIO()): driver.pkg_manager_install() self.assertEqual(self.call_args, ((['mgr', 'install'], ), { 'env': finalize_env({'NODE_PATH': node_path}), })) # will be overridden by instance settings. with pretty_logging(stream=mocks.StringIO()): driver.pkg_manager_install( env={ 'PATH': '.', 'MGR_ENV': 'dev', 'NODE_PATH': '/tmp/somewhere/else/node_mods', }) self.assertEqual(self.call_args, ((['mgr', 'install'], ), { 'env': finalize_env({ 'NODE_PATH': node_path, 'MGR_ENV': 'dev', 'PATH': '.' }), }))
def test_install_no_init_nodevnoprod(self): # install implies init stub_mod_call(self, cli) stub_base_which(self, which_yarn) tmpdir = mkdtemp(self) os.chdir(tmpdir) dist = Distribution(dict( script_name='setup.py', script_args=['yarn', '--install'], name='foo', )) dist.parse_command_line() dist.run_commands() with open(os.path.join(tmpdir, 'package.json')) as fd: result = json.load(fd) # The cli will still automatically write to that, as install # implies init. self.assertEqual(result, { 'dependencies': {'jquery': '~1.11.0'}, 'devDependencies': {}, 'name': 'foo', }) self.assertEqual(self.call_args[0], ([which_yarn, 'install'],))
def test_set_env_path_with_node_modules_fail(self): stub_os_environ(self) tmpdir = mkdtemp(self) driver = cli.PackageManagerDriver(pkg_manager_bin='mgr', working_dir=tmpdir) self.assertFalse(driver._set_env_path_with_node_modules()) self.assertIsNone(driver.env_path)
def test_install_no_init_has_package_json_interactive_default_input(self): stub_stdin(self, u'') stub_mod_call(self, cli) tmpdir = mkdtemp(self) with open(os.path.join(tmpdir, 'package.json'), 'w') as fd: json.dump({ 'dependencies': {'jquery': '~3.0.0'}, 'devDependencies': {} }, fd) os.chdir(tmpdir) dist = Distribution(dict( script_name='setup.py', script_args=['yarn', '--install', '--interactive'], name='foo', )) dist.parse_command_line() dist.run_commands() with open(os.path.join(tmpdir, 'package.json')) as fd: result = json.load(fd) # Existing package.json will not be overwritten. self.assertEqual(result, { 'dependencies': {'jquery': '~3.0.0'}, 'devDependencies': {}, }) # Ensure that install is NOT called. self.assertIsNone(self.call_args)
def test_pkg_manager_init(self): # we still need a temporary directory, but the difference is # that whether the instance contains it or not. self.setup_requirements_json() remember_cwd(self) cwd = mkdtemp(self) os.chdir(cwd) driver = cli.PackageManagerDriver( pkg_manager_bin='mgr', pkgdef_filename='requirements.json', dep_keys=('require', ), ) driver.pkg_manager_init('calmpy.pip', interactive=False) target = join(cwd, 'requirements.json') self.assertTrue(exists(target)) with open(target) as fd: result = json.load(fd) self.assertEqual(result, { "require": { "setuptools": "25.1.6" }, "name": "calmpy.pip", })
def test_karma_test_files_located(self): karma_config = karma.build_base_config() karma_config['files'] = ['example/package/lib.js'] spec = Spec( karma_config=karma_config, build_dir=mkdtemp(self), rjs_loader_plugin_registry=get(RJS_LOADER_PLUGIN_REGISTRY_NAME), export_module_names=['preexported'], test_module_paths_map={ 'example/package/tests/test_some_module': '/src/example/package/tests/test_some_module.js', 'example/package/tests/some_test_data': '/src/example/package/tests/some_test_data.js', }, ) with pretty_logging(stream=StringIO()): karma_requirejs(spec) with open(spec['karma_requirejs_test_script'], encoding='utf-8') as fd: script = es5(fd.read()) # this is the node for the json in the build file deps = json.loads(str(script.children()[0].children()[0].initializer)) tests = json.loads(str(script.children()[1].children()[0].initializer)) self.assertEqual(['example/package/tests/test_some_module'], tests) self.assertEqual( ['preexported', 'example/package/tests/some_test_data'], deps)
def test_pkg_manager_init_exists_and_overwrite(self): self.setup_requirements_json() cwd = mkdtemp(self) driver = cli.PackageManagerDriver( pkg_manager_bin='mgr', pkgdef_filename='requirements.json', dep_keys=('require', ), working_dir=cwd, ) target = join(cwd, 'requirements.json') with open(target, 'w') as fd: result = json.dump({"require": {}}, fd) with pretty_logging(stream=mocks.StringIO()) as err: driver.pkg_manager_init('calmpy.pip', interactive=False, overwrite=False) self.assertIn('not overwriting existing ', err.getvalue()) self.assertIn('requirements.json', err.getvalue()) with open(target) as fd: result = json.load(fd) self.assertNotEqual(result, {"require": {"setuptools": "25.1.6"}}) driver.pkg_manager_init('calmpy.pip', interactive=False, overwrite=True) with open(target) as fd: result = json.load(fd) self.assertEqual(result, { "require": { "setuptools": "25.1.6" }, "name": "calmpy.pip", })
def test_process_path_error(self): build_dir = mkdtemp(self) source1 = join(build_dir, 'source1.js') source2 = join(build_dir, 'source2.js') source3 = join(build_dir, 'source3.js') with open(source1, 'w') as fd: fd.write( "define('source1/mod1', ['require','exports','module']," "function (require, exports, module) {});\n" "define('source1/mod2', ['require','exports','module']," "function (require, exports, module) {});\n" ) with open(source2, 'w') as fd: fd.write( "define('source2/mod1', ['require','exports','module']" "function (require, exports, module) {});\n" ) with open(source3, 'w') as fd: fd.write( "define('source3/mod1', ['require','exports','module']," "function (require, exports, module) {});\n" ) with pretty_logging(stream=StringIO()) as s: result = process_artifacts([source1, source2, source3]) self.assertEqual(sorted(result), [ 'source1/mod1', 'source1/mod2', 'source3/mod1']) self.assertIn('syntax error in', s.getvalue()) self.assertIn(source2, s.getvalue())
def test_plugin_package_chained_loaders_initial_simple(self): working_dir = mkdtemp(self) reg, base, extra, base_dir, extra_dir = self.create_base_extra_plugins( working_dir) simple = reg.records['simple'] = LoaderPluginHandler(reg, 'simple') toolchain = NullToolchain() spec = Spec(working_dir=working_dir) with pretty_logging(stream=StringIO()) as stream: self.assertEqual( {}, simple.generate_handler_sourcepath(toolchain, spec, { 'simple!fun.file': 'fun.file', }), ) with pretty_logging(stream=StringIO()) as stream: self.assertEqual({ 'extra': join(extra_dir, 'extra.js'), }, simple.generate_handler_sourcepath(toolchain, spec, { 'simple!extra!fun.file': 'fun.file', }), ) self.assertIn("for loader plugin 'extra'", stream.getvalue())
def test_generate_bundle_sourcepaths_bad_dir(self): bad_dir = utils.mkdtemp(self) with pretty_logging(stream=StringIO()) as log: mapping = dist.generate_bundle_sourcepaths( ['service'], bad_dir) self.assertEqual(sorted(mapping.keys()), []) self.assertIn('fake_modules', log.getvalue())