def test_token_handler_default(self):
     stream = setup_logger(self, logger)
     definitions = {}
     unparser = BaseUnparser(definitions)
     token_handler, layout_handlers, deferrable_handlers, prewalk_hooks = (
         unparser.setup())
     self.assertIs(token_handler, token_handler_str_default)
     self.assertIn(
         "DEBUG 'BaseUnparser' instance has no token_handler specified; "
         "default handler 'token_handler_str_default' activate",
         stream.getvalue())
Exemple #2
0
    def test_write_no_sourcemap(self):
        root = mktemp()
        definitions = {
            'Node': (
                Attr(attr='left'),
                Attr(attr='op'),
                Attr(attr='right'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program = Node()
        program.left, program.op, program.right = ('foo', '=', 'true')
        program.sourcepath = join(root, 'original.js')
        program._token_map = {
            'foo': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'true': [(6, 1, 7)],
        }

        output_stream = StringIO()
        output_stream.name = join(root, 'processed.js')

        unparser = BaseUnparser(definitions)
        io.write(unparser, program, output_stream)
        self.assertEqual('foo=true', output_stream.getvalue())
Exemple #3
0
    def test_write_wrong_type(self):
        stream = StringIO()
        unparser = BaseUnparser({})
        with self.assertRaises(TypeError):
            io.write(unparser, [], stream)

        with self.assertRaises(TypeError):
            io.write(unparser, '', stream)
Exemple #4
0
def convert_dynamic_require_unparser(indent_str='    '):
    """
    The dynamic require unparser.
    """

    return BaseUnparser(
        definitions=definitions,
        rules=(rules.indent(indent_str=indent_str), ),
        prewalk_hooks=(convert_dynamic_require_hook, ),
    )
Exemple #5
0
    def test_write_multiple(self):
        root = mktemp()
        definitions = {
            'Node': (
                Attr(attr='left'),
                Text(value=' '),
                Attr(attr='op'),
                Text(value=' '),
                Attr(attr='right'),
                Text(value=';'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program1 = Node()
        program1.left, program1.op, program1.right = ('foo', '=', 'true')
        program1.sourcepath = join(root, 'program1.js')
        program1._token_map = {
            'foo': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'true': [(6, 1, 7)],
        }
        program2 = Node()
        program2.left, program2.op, program2.right = ('bar', '=', 'false')
        program2.sourcepath = join(root, 'program2.js')
        program2._token_map = {
            'bar': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'false': [(6, 1, 7)],
        }

        # streams
        output_stream = StringIO()
        output_stream.name = join(root, 'packed.js')
        sourcemap_stream = StringIO()
        sourcemap_stream.name = join(root, 'packed.js.map')

        unparser = BaseUnparser(definitions)
        io.write(unparser, [program1, program2],
                 output_stream,
                 sourcemap_stream,
                 source_mapping_url=None)

        self.assertEqual('foo = true;bar = false;', output_stream.getvalue())

        sourcemap = json.loads(sourcemap_stream.getvalue())
        self.assertEqual(
            {
                "version": 3,
                "sources": ["program1.js", "program2.js"],
                "names": [],
                "mappings": "AAAA,WCAA",
                "file": "packed.js"
            }, sourcemap)
Exemple #6
0
    def test_write_same_stream_callable(self):
        # streams
        root = mktemp()
        output_stream = StringIO()
        output_stream.name = join(root, 'packed.js')
        called = []
        closed = []

        def close():
            closed.append(True)

        output_stream.close = close

        def f_output_stream():
            called.append(True)
            return output_stream

        definitions = {
            'Node': (
                Attr(attr='text'),
                Text(value=';'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program = Node()
        program.text = 'hello'
        program.sourcepath = join(root, 'program.js')
        program._token_map = {'hello': [(0, 1, 1)]}

        unparser = BaseUnparser(definitions)
        io.write(unparser, [program], f_output_stream, f_output_stream)

        self.assertEqual(1, len(called))
        self.assertEqual(1, len(closed))
        output = output_stream.getvalue()
        self.assertIn('hello', output)
        self.assertNotIn('program.js', output)
        # since output stream is a StringIO, default to utf8 encoding
        self.assertIn('data:application/json;base64;charset=utf8', output)
        # decode the base64 string
        self.assertEqual(
            {
                "version": 3,
                "sources": ["program.js"],
                "names": [],
                "mappings": "AAAA",
                "file": "packed.js"
            },
            json.loads(
                base64.b64decode(output.splitlines()[-1].split(',')[-1].encode(
                    'utf8')).decode('utf8')))
    def test_prewalk_hooking(self):
        results = {}

        def prewalk_dummy(dispatcher, node):
            results.update({'dispatcher': dispatcher, 'node': node})
            return node

        definitions = {'Node': ()}
        unparser = BaseUnparser(definitions, prewalk_hooks=[prewalk_dummy])
        self.assertEqual(results, {})
        # invoke complete run to trigger prewalk hook.
        root = Node()
        self.assertEqual([], list(unparser(root)))
        self.assertTrue(isinstance(results['dispatcher'], Dispatcher))
        self.assertTrue(results['node'], root)
    def test_called_prewalk_multicall(self):
        prewalk = []

        def rule():
            prewalk.append(True)
            return {}

        root = Node()
        definitions = {'Node': ()}
        unparser = BaseUnparser(definitions, rules=(rule, ))
        # invoke complete run to trigger prewalk hook.
        self.assertEqual(len(prewalk), 0)
        self.assertEqual([], list(unparser(root)))
        self.assertEqual(len(prewalk), 1)
        self.assertEqual([], list(unparser(root)))
        self.assertEqual(len(prewalk), 2)
    def test_called_prewalk_via_rules(self):
        results = {}

        def prewalk_dummy(dispatcher, node):
            results.update({'dispatcher': dispatcher, 'node': node})
            return node

        def rule():
            return {'prewalk_hooks': (prewalk_dummy, )}

        definitions = {'Node': ()}
        unparser = BaseUnparser(definitions, rules=(rule, ))
        # invoke complete run to trigger prewalk hook.
        root = Node()
        self.assertEqual([], list(unparser(root)))
        self.assertTrue(isinstance(results['dispatcher'], Dispatcher))
        self.assertTrue(results['node'], root)
Exemple #10
0
    def test_write_sourcemap_omitted(self):
        root = mktemp()
        definitions = {
            'Node': (
                Attr(attr='left'),
                Attr(attr='op'),
                Attr(attr='right'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program = Node()
        program.left, program.op, program.right = ('foo', '=', 'true')
        program.sourcepath = join(root, 'original.js')
        program._token_map = {
            'foo': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'true': [(6, 1, 7)],
        }

        # streams
        output_stream = StringIO()
        output_stream.name = join(root, 'processed.js')
        sourcemap_stream = StringIO()
        sourcemap_stream.name = join(root, 'processed.js.map')

        unparser = BaseUnparser(definitions)
        io.write(unparser,
                 program,
                 output_stream,
                 sourcemap_stream,
                 source_mapping_url=None)

        sourcemap = json.loads(sourcemap_stream.getvalue())
        self.assertEqual(
            {
                "version": 3,
                "sources": ["original.js"],
                "names": [],
                "mappings": "AAAA,GAAI,CAAE",
                "file": "processed.js"
            }, sourcemap)
        self.assertEqual('foo=true', output_stream.getvalue())
Exemple #11
0
    def test_write_error_handled_callable_closed(self):
        # streams
        root = mktemp()
        output_stream = StringIO()
        output_stream.name = join(root, 'packed.js')
        closed = []

        def close():
            closed.append(True)

        output_stream.close = close

        def f_output_stream():
            return output_stream

        def f_error():
            raise IOError('some error happened')

        definitions = {
            'Node': (
                Attr(attr='text'),
                Text(value=';'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program = Node()
        program.text = 'hello'
        program.sourcepath = join(root, 'program.js')
        program._token_map = {'hello': [(0, 1, 1)]}

        unparser = BaseUnparser(definitions)
        with self.assertRaises(IOError):
            io.write(unparser, [program], f_output_stream, f_error)

        self.assertEqual(1, len(closed))
        self.assertEqual('hello;', output_stream.getvalue())
        self.assertNotIn('program.js', output_stream.getvalue())
    def test_token_handler_setup_with_rules(self):
        def rule1():
            def handler1():
                "handler1"

            return {'token_handler': handler1}

        def rule2():
            def handler2():
                "handler2"

            return {'token_handler': handler2}

        def custom_handler():
            "custom handler"

        stream = setup_logger(self, logger)
        definitions = {}
        unparser = BaseUnparser(definitions, rules=(rule1, rule2))
        token_handler, layout_handlers, deferrable_handlers, prewalk_hooks = (
            unparser.setup())
        self.assertEqual(token_handler.__name__, 'handler2')

        self.assertIn(
            "DEBUG rule 'rule1' specified a token_handler 'handler1'",
            stream.getvalue())
        self.assertIn(
            "WARNING rule 'rule2' specified a new token_handler 'handler2', "
            "overriding previously assigned token_handler 'handler1'",
            stream.getvalue())

        unparser = BaseUnparser(definitions,
                                token_handler=custom_handler,
                                rules=(rule1, rule2))
        token_handler, layout_handlers, deferrable_handlers, prewalk_hooks = (
            unparser.setup())
        self.assertIs(token_handler, custom_handler)
        self.assertIn(
            "INFO manually specified token_handler 'custom_handler' to the "
            "'BaseUnparser' instance will override rule derived token_handler "
            "'handler2'", stream.getvalue())
 def test_minimum_definition(self):
     definitions = {'Node': ()}
     unparser = BaseUnparser(definitions)
     self.assertEqual([], list(unparser(Node())))
 def test_prewalk_fail(self):
     definitions = {}
     unparser = BaseUnparser(definitions)
     # can't lookup an empty definition
     with self.assertRaises(KeyError):
         self.assertEqual([], list(unparser(Node())))
Exemple #15
0
    def test_write_callables(self):
        closed = []

        class Stream(StringIO):
            # don't actually close the stream so it can be read later by
            # the tests
            def close(self):
                closed.append(self)

        # streams
        root = mktemp()
        output_stream = Stream()
        output_stream.name = join(root, 'packed.js')
        sourcemap_stream = Stream()
        sourcemap_stream.name = join(root, 'packed.js.map')

        def f_output_stream():
            return output_stream

        def f_sourcemap_stream():
            return sourcemap_stream

        definitions = {
            'Node': (
                Attr(attr='left'),
                Text(value=' '),
                Attr(attr='op'),
                Text(value=' '),
                Attr(attr='right'),
                Text(value=';'),
            )
        }

        # the program node; attributes are assigned to mimic a real one
        program1 = Node()
        program1.left, program1.op, program1.right = ('foo', '=', 'true')
        program1.sourcepath = join(root, 'program1.js')
        program1._token_map = {
            'foo': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'true': [(6, 1, 7)],
        }
        program2 = Node()
        program2.left, program2.op, program2.right = ('bar', '=', 'false')
        program2.sourcepath = join(root, 'program2.js')
        program2._token_map = {
            'bar': [(0, 1, 1)],
            '=': [(4, 1, 5)],
            'false': [(6, 1, 7)],
        }

        unparser = BaseUnparser(definitions)
        io.write(unparser, [program1, program2],
                 f_output_stream,
                 f_sourcemap_stream,
                 source_mapping_url=None)

        self.assertIn(output_stream, closed)
        self.assertIn(sourcemap_stream, closed)
        self.assertEqual('foo = true;bar = false;', output_stream.getvalue())

        sourcemap = json.loads(sourcemap_stream.getvalue())
        self.assertEqual(
            {
                "version": 3,
                "sources": ["program1.js", "program2.js"],
                "names": [],
                "mappings": "AAAA,WCAA",
                "file": "packed.js"
            }, sourcemap)