def test_plugin_package_chained_loaders_initial_simple(self):
        working_dir = mkdtemp(self)
        reg, base, extra, base_dir, extra_dir = self.create_base_extra_plugins(
            working_dir)
        simple = reg.records['simple'] = LoaderPluginHandler(reg, 'simple')

        toolchain = NullToolchain()
        spec = Spec(working_dir=working_dir)

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                {},
                simple.generate_handler_sourcepath(toolchain, spec, {
                    'simple!fun.file': 'fun.file',
                }),
            )

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual({
                'extra': join(extra_dir, 'extra.js'),
            }, simple.generate_handler_sourcepath(toolchain, spec, {
                    'simple!extra!fun.file': 'fun.file',
                }),
            )
        self.assertIn("for loader plugin 'extra'", stream.getvalue())
    def test_plugin_loaders_modname_source_to_target(self):
        class InterceptHandler(LoaderPluginHandler):
            def modname_source_to_target(self, *a, **kw):
                # failed to inspect and call parent
                return 'intercepted'

        reg = LoaderPluginRegistry('simloaders', _working_set=WorkingSet({}))
        base = reg.records['base'] = LoaderPluginHandler(reg, 'base')
        extra = reg.records['extra'] = LoaderPluginHandler(reg, 'extra')
        reg.records['intercept'] = InterceptHandler(reg, 'intercept')
        toolchain = NullToolchain()
        spec = Spec()
        self.assertEqual('fun.file', base.modname_source_to_target(
            toolchain, spec, 'base!fun.file', '/some/path/fun.file'))
        self.assertEqual('fun.file', extra.modname_source_to_target(
            toolchain, spec, 'extra!fun.file', '/some/path/fun.file'))
        self.assertEqual('fun.file', base.modname_source_to_target(
            toolchain, spec, 'extra!base!fun.file', '/some/path/fun.file'))
        # no plugin was found, so no modification
        self.assertEqual('noplugin!fun.file', base.modname_source_to_target(
            toolchain, spec, 'extra!noplugin!fun.file', '/some/path/fun.file'))
        # chained of the same type
        self.assertEqual('fun.file', base.modname_source_to_target(
            toolchain, spec, 'base!base!base!fun.file',
            '/some/path/fun.file'))
        # chained but overloaded
        self.assertEqual('intercepted', base.modname_source_to_target(
            toolchain, spec, 'base!intercept!base!fun.file',
            '/some/path/fun.file'))
Exemple #3
0
 def test_extract_builder_result(self):
     self.assertEqual(
         2, len(extract_builder_result((
             NullToolchain(),
             Spec(),
         ))))
     self.assertEqual((None, None),
                      extract_builder_result((
                          Spec(),
                          NullToolchain(),
                      )))
     self.assertEqual((None, None),
                      extract_builder_result((
                          NullToolchain(),
                          None,
                      )))
     self.assertEqual((None, None), extract_builder_result(None))
 def test_base_plugin_generate_handler_sourcepath(self):
     base = BaseLoaderPluginHandler(None, 'base')
     toolchain = NullToolchain()
     spec = Spec(working_dir=mkdtemp(self))
     self.assertEqual(
         base.generate_handler_sourcepath(toolchain, spec, {
             'base!bad': 'base!bad',
         }), {})
 def test_plugin_generate_handler_sourcepath_default_registry(self):
     base = LoaderPluginHandler(None, 'base')
     toolchain = NullToolchain()
     spec = Spec(working_dir=mkdtemp(self))
     with pretty_logging(stream=StringIO()) as stream:
         self.assertEqual(
             base.generate_handler_sourcepath(toolchain, spec, {
                 'base!bad': 'base!bad',
             }), {})
     self.assertIn("using default loaderplugin registry", stream.getvalue())
 def test_initialize_standard(self):
     # ensure that we have a proper working registry
     working_set = WorkingSet({'calmjs.loader_plugin': [
         'example = calmjs.loaderplugin:LoaderPluginHandler',
     ]})
     registry = LoaderPluginRegistry(
         'calmjs.loader_plugin', _working_set=working_set)
     plugin = registry.get('example')
     self.assertTrue(isinstance(plugin, LoaderPluginHandler))
     self.assertEqual({}, plugin.generate_handler_sourcepath(
         NullToolchain(), Spec(), {}))
 def test_plugin_package_base(self):
     base = NPMLoaderPluginHandler(None, 'base')
     toolchain = NullToolchain()
     spec = Spec(working_dir=mkdtemp(self))
     with pretty_logging(stream=StringIO()) as stream:
         self.assertEqual(
             base.generate_handler_sourcepath(toolchain, spec, {}), {})
     self.assertIn(
         "no npm package name specified or could be resolved for "
         "loaderplugin 'base' of registry '<invalid_registry/handler>'; "
         "please subclass calmjs.loaderplugin:NPMLoaderPluginHandler such "
         "that the npm package name become specified", stream.getvalue(),
     )
Exemple #8
0
 def test_trace_toolchain(self):
     version = find_pkg_dist('calmjs').version
     results = trace_toolchain(NullToolchain())
     self.assertEqual(results, [{
         'calmjs.toolchain:NullToolchain': {
             'project_name': 'calmjs',
             'version': version,
         },
     }, {
         'calmjs.toolchain:Toolchain': {
             'project_name': 'calmjs',
             'version': version,
         }
     }])
 def test_plugin_generate_handler_sourcepath_resolved_registry(self):
     base = LoaderPluginHandler(None, 'base')
     reg = LoaderPluginRegistry('loaders', _working_set=WorkingSet({}))
     toolchain = NullToolchain()
     spec = Spec(
         working_dir=mkdtemp(self), calmjs_loaderplugin_registry=reg)
     with pretty_logging(stream=StringIO()) as stream:
         self.assertEqual(
             base.generate_handler_sourcepath(toolchain, spec, {
                 'base!bad': 'base!bad',
             }), {})
     self.assertIn(
         "loaderplugin registry 'loaders' already assigned to spec",
         stream.getvalue())
    def test_plugin_loaders_modname_source_to_target_identity(self):
        # manually create a registry
        reg = LoaderPluginRegistry('simloaders', _working_set=WorkingSet({}))
        base = reg.records['local/dev'] = LoaderPluginHandler(reg, 'local/dev')
        toolchain = NullToolchain()
        spec = Spec()

        self.assertEqual('fun.file', base.modname_source_to_target(
            toolchain, spec, 'local/dev!fun.file',
            '/some/path/fun.file'))
        # a redundant usage test
        self.assertEqual('local/dev', base.modname_source_to_target(
            toolchain, spec, 'local/dev',
            '/some/path/to/the/plugin'))
 def test_plugin_package_missing_dir(self):
     base = NPMLoaderPluginHandler(None, 'base')
     base.node_module_pkg_name = 'dummy_pkg'
     toolchain = NullToolchain()
     spec = Spec(working_dir=mkdtemp(self))
     with pretty_logging(stream=StringIO()) as stream:
         self.assertEqual(
             base.generate_handler_sourcepath(toolchain, spec, {}), {})
     self.assertIn(
         "could not locate 'package.json' for the npm package 'dummy_pkg' "
         "which was specified to contain the loader plugin 'base' in the "
         "current working directory '%s'" % spec['working_dir'],
         stream.getvalue(),
     )
    def test_plugin_package_success_main(self):
        base = NPMLoaderPluginHandler(None, 'base')
        base.node_module_pkg_name = 'dummy_pkg'
        toolchain = NullToolchain()
        spec = Spec(working_dir=mkdtemp(self))
        pkg_dir = join(spec['working_dir'], 'node_modules', 'dummy_pkg')
        makedirs(pkg_dir)
        with open(join(pkg_dir, 'package.json'), 'w') as fd:
            fd.write('{"main": "base.js"}')

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                join(pkg_dir, 'base.js'),
                base.generate_handler_sourcepath(toolchain, spec, {})['base'],
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())
 def test_plugin_loaders_modname_source_to_target(self):
     working_dir = mkdtemp(self)
     reg, base, extra, base_dir, extra_dir = self.create_base_extra_plugins(
         working_dir)
     toolchain = NullToolchain()
     spec = Spec(working_dir=working_dir)
     self.assertEqual('fun.file', base.modname_source_to_target(
         toolchain, spec, 'base!fun.file', '/some/path/fun.file'))
     self.assertEqual('fun.file', base.modname_source_to_target(
         toolchain, spec, 'extra!base!fun.file', '/some/path/fun.file'))
     # no plugin was found, so no modification
     self.assertEqual('noplugin!fun.file', base.modname_source_to_target(
         toolchain, spec, 'extra!noplugin!fun.file', '/some/path/fun.file'))
     # chained of the same type
     self.assertEqual('fun.file', base.modname_source_to_target(
         toolchain, spec, 'base!base!base!fun.file',
         '/some/path/fun.file'))
    def test_plugin_package_strip_broken_recursion_stop(self):
        class BadPluginHandler(LoaderPluginHandler):
            def unwrap(self, value):
                # return the identity
                return value

        base = BadPluginHandler(None, 'base')
        toolchain = NullToolchain()
        spec = Spec(working_dir=mkdtemp(self))
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                base.generate_handler_sourcepath(toolchain, spec, {
                    'base!bad': 'base!bad',
                }), {})

        self.assertIn(
            "loaderplugin 'base' extracted same sourcepath of",
            stream.getvalue())
    def test_plugin_package_success_package_spec_missing_working_dir(self):
        remember_cwd(self)
        cwd = mkdtemp(self)
        chdir(cwd)

        base = NPMLoaderPluginHandler(None, 'base')
        base.node_module_pkg_name = 'dummy_pkg'
        toolchain = NullToolchain()
        spec = Spec()
        pkg_dir = join(cwd, 'node_modules', 'dummy_pkg')
        makedirs(pkg_dir)
        with open(join(pkg_dir, 'package.json'), 'w') as fd:
            fd.write('{"browser": "browser/base.js"}')

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                join(pkg_dir, 'browser', 'base.js'),
                base.generate_handler_sourcepath(toolchain, spec, {})['base'],
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())
        self.assertIn("missing working_dir", stream.getvalue())
    def test_plugin_package_missing_main(self):
        base = NPMLoaderPluginHandler(None, 'base')
        base.node_module_pkg_name = 'dummy_pkg'
        toolchain = NullToolchain()
        spec = Spec(working_dir=mkdtemp(self))
        pkg_dir = join(spec['working_dir'], 'node_modules', 'dummy_pkg')
        makedirs(pkg_dir)
        with open(join(pkg_dir, 'package.json'), 'w') as fd:
            fd.write('{}')

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                base.generate_handler_sourcepath(toolchain, spec, {}), {})

        self.assertIn(
            "calmjs.loaderplugin 'package.json' for the npm package "
            "'dummy_pkg' does not contain a valid entry point: sources "
            "required for loader plugin 'base' cannot be included "
            "automatically; the build process may fail",
            stream.getvalue(),
        )
    def test_plugin_package_dynamic_selection(self):

        class CustomHandler(NPMLoaderPluginHandler):
            def find_node_module_pkg_name(self, toolchain, spec):
                return spec.get('loaderplugin')

        reg = LoaderPluginRegistry('lp.reg', _working_set=WorkingSet({}))
        base = CustomHandler(reg, 'base')
        toolchain = NullToolchain()
        spec = Spec(working_dir=mkdtemp(self))
        pkg_dir = join(spec['working_dir'], 'node_modules', 'dummy_pkg')
        makedirs(pkg_dir)
        with open(join(pkg_dir, 'package.json'), 'w') as fd:
            fd.write('{"main": "base.js"}')

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                {}, base.generate_handler_sourcepath(toolchain, spec, {}))
        self.assertIn(
            "no npm package name specified or could be resolved for "
            "loaderplugin 'base' of registry 'lp.reg'",
            stream.getvalue()
        )
        self.assertIn(
            "test_loaderplugin:CustomHandler may be at fault",
            stream.getvalue()
        )
        self.assertNotIn("for loader plugin 'base'", stream.getvalue())

        # plug the value into the spec to satisfy the condition for this
        # particular loader

        spec['loaderplugin'] = 'dummy_pkg'
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                join(pkg_dir, 'base.js'),
                base.generate_handler_sourcepath(toolchain, spec, {})['base'],
            )
        self.assertIn("base.js' for loader plugin 'base'", stream.getvalue())
    def test_plugin_package_chained_loaders(self):
        working_dir = mkdtemp(self)
        reg, base, extra, base_dir, extra_dir = self.create_base_extra_plugins(
            working_dir)
        # standard case
        toolchain = NullToolchain()
        spec = Spec(working_dir=working_dir)
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual(
                {'base': join(base_dir, 'base.js')},
                base.generate_handler_sourcepath(toolchain, spec, {
                    'base!fun.file': 'base!fun.file',
                }),
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())

        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual({
                'base': join(base_dir, 'base.js'),
                'extra': join(extra_dir, 'extra.js'),
            }, base.generate_handler_sourcepath(toolchain, spec, {
                    'base!fun.file': 'fun.file',
                    'base!extra!fun.file': 'fun.file',
                    'base!missing!fun.file': 'fun.file',
                    'base!extra!missing!fun.file': 'fun.file',
                }),
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())
        self.assertIn("for loader plugin 'extra'", stream.getvalue())
        self.assertNotIn("for loader plugin 'missing'", stream.getvalue())

        # for the outer one
        self.assertIn(
            "loaderplugin 'base' from registry 'simloaders' cannot find "
            "sibling loaderplugin handler for 'missing'; processing may fail "
            "for the following nested/chained sources: "
            "{'missing!fun.file': 'fun.file'}", stream.getvalue()
        )
        # for the inner one
        self.assertIn(
            "loaderplugin 'extra' from registry 'simloaders' cannot find "
            "sibling loaderplugin handler for 'missing'; processing may fail "
            "for the following nested/chained sources: "
            "{'missing!fun.file': 'fun.file'}", stream.getvalue()
        )

        # for repeat loaders
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual({
                'base': join(base_dir, 'base.js'),
                'extra': join(extra_dir, 'extra.js'),
            }, base.generate_handler_sourcepath(toolchain, spec, {
                    'base!extra!base!extra!fun.file': 'fun.file',
                }),
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())
        self.assertIn("for loader plugin 'extra'", stream.getvalue())

        # for repeat loaders
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual({
                'base': join(base_dir, 'base.js'),
            }, base.generate_handler_sourcepath(toolchain, spec, {
                    'base!base!base!fun.file': 'fun.file',
                }),
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())

        # for argument loaders
        with pretty_logging(stream=StringIO()) as stream:
            self.assertEqual({
                'base': join(base_dir, 'base.js'),
                'extra': join(extra_dir, 'extra.js'),
            }, base.generate_handler_sourcepath(toolchain, spec, {
                    'base?argument!extra?argument!fun.file': 'fun.file',
                }),
            )
        self.assertIn("for loader plugin 'base'", stream.getvalue())
        self.assertIn("for loader plugin 'extra'", stream.getvalue())
Exemple #19
0
 def blank_spec(package_names, export_target):
     "does not produce an artifact"
     return NullToolchain(), Spec()
Exemple #20
0
 def setUp(self):
     self.toolchain = NullToolchain()
Exemple #21
0
 def malformed_builder(package_names, export_target):
     "does not produce an artifact"
     return NullToolchain()
Exemple #22
0
class NullToolchainTestCase(unittest.TestCase):
    """
    A null toolchain class test case.
    """
    def setUp(self):
        self.toolchain = NullToolchain()

    def tearDown(self):
        pass

    def test_null_transpiler(self):
        # a kind of silly test but shows concept
        tmpdir = mkdtemp(self)
        js_code = 'var dummy = function () {};\n'
        source = join(tmpdir, 'source.js')
        target = join(tmpdir, 'target.js')

        with open(source, 'w') as fd:
            fd.write(js_code)

        spec = Spec()
        self.toolchain.compile(spec, source, target)

        with open(target) as fd:
            result = fd.read()

        self.assertEqual(js_code, result)

    def test_null_toolchain_transpile_sources(self):
        source_dir = mkdtemp(self)
        build_dir = mkdtemp(self)
        source_file = join(source_dir, 'source.js')

        with open(source_file, 'w') as fd:
            fd.write('var dummy = function () {};\n')

        spec = Spec(
            build_dir=build_dir,
            transpile_source_map={
                'namespace.dummy.source': source_file,
            },
        )
        self.toolchain(spec)

        # name, and relative filename to the build_path
        self.assertEqual(
            spec, {
                'build_dir': build_dir,
                'transpile_source_map': {
                    'namespace.dummy.source': source_file,
                },
                'bundled_paths': {},
                'compiled_paths': {
                    'namespace.dummy.source': 'namespace.dummy.source',
                },
                'module_names': ['namespace.dummy.source'],
                'prepare': 'prepared',
                'assemble': 'assembled',
                'link': 'linked',
            })
        self.assertTrue(exists(join(build_dir, 'namespace.dummy.source.js')))

    def test_null_toolchain_bundled_sources(self):
        source_dir = mkdtemp(self)
        bundled_dir = mkdtemp(self)
        build_dir = mkdtemp(self)

        source_file = join(source_dir, 'source.js')

        with open(source_file, 'w') as fd:
            fd.write('var dummy = function () {};\n')

        with open(join(bundled_dir, 'bundled.js'), 'w') as fd:
            fd.write('var dummy = function () {};\n')

        spec = Spec(
            build_dir=build_dir,
            bundled_source_map={
                'bundle1': source_file,
                'bundle2': bundled_dir,
            },
        )
        self.toolchain(spec)

        # name, and relative filename to the build_path
        self.assertEqual(
            spec, {
                'build_dir': build_dir,
                'bundled_source_map': {
                    'bundle1': source_file,
                    'bundle2': bundled_dir,
                },
                'bundled_paths': {
                    'bundle1': 'bundle1',
                    'bundle2': 'bundle2',
                },
                'compiled_paths': {},
                'module_names': ['bundle1'],
                'prepare': 'prepared',
                'assemble': 'assembled',
                'link': 'linked',
            })
        self.assertTrue(exists(join(build_dir, 'bundle1.js')))
        self.assertTrue(exists(join(build_dir, 'bundle2', 'bundled.js')))

    def test_null_toolchain_transpile_js_ns_directory_sources(self):
        """
        Ensure that directory structures are copied, if needed, because
        JavaScript uses directories for namespaces, too, however the
        names are verbatim from directories and `.`s are valid module
        names which can result in some really hilarious side effects
        when combined with its completely transparent model on top of
        the filesystem (think ``..``), but that's for another time.
        """

        source_dir = mkdtemp(self)
        build_dir = mkdtemp(self)

        namespace_root = join(source_dir, 'namespace', 'dummy')
        makedirs(namespace_root)
        source_file = join(namespace_root, 'source.js')
        with open(source_file, 'w') as fd:
            fd.write('var dummy = function () {};\n')

        spec = Spec(
            build_dir=build_dir,
            transpile_source_map={
                'namespace/dummy/source': source_file,
            },
        )
        self.toolchain(spec)

        # name, and relative filename to the build_path
        self.assertEqual(
            spec, {
                'build_dir': build_dir,
                'transpile_source_map': {
                    'namespace/dummy/source': source_file,
                },
                'bundled_paths': {},
                'compiled_paths': {
                    'namespace/dummy/source': 'namespace/dummy/source',
                },
                'module_names': ['namespace/dummy/source'],
                'prepare': 'prepared',
                'assemble': 'assembled',
                'link': 'linked',
            })
        self.assertTrue(
            exists(join(build_dir, 'namespace', 'dummy', 'source.js')))
Exemple #23
0
 def nothing_builder(package_names, export_target):
     "does not produce an artifact"
     return NullToolchain(), Spec(export_target=export_target)