Ejemplo n.º 1
0
    def __init__(self,
                 log=None,
                 loglevel=None,
                 raiseExceptions=None,
                 fetcher=None):
        """
        log
            logging object
        loglevel
            logging loglevel
        raiseExceptions
            if log should simply log (default) or raise errors during
            parsing. Later while working with the resulting sheets
            the setting used in cssutils.log.raiseExeptions is used
        fetcher
            see ``setFetcher(fetcher)``
        """
        if log is not None:
            cssutils.log.setLog(log)
        if loglevel is not None:
            cssutils.log.setLevel(loglevel)

        # remember global setting
        self.__globalRaising = cssutils.log.raiseExceptions
        if raiseExceptions:
            self.__parseRaising = raiseExceptions
        else:
            # DEFAULT during parse
            self.__parseRaising = False

        self.__tokenizer = tokenize2.Tokenizer()
        self.setFetcher(fetcher)
Ejemplo n.º 2
0
    def __init__(self,
                 log=None,
                 loglevel=None,
                 raiseExceptions=None,
                 fetcher=None,
                 parseComments=True):
        """
        :param log:
            logging object
        :param loglevel:
            logging loglevel
        :param raiseExceptions:
            if log should simply log (default) or raise errors during
            parsing. Later while working with the resulting sheets
            the setting used in cssutils.log.raiseExeptions is used
        :param fetcher:
            see ``setFetcher(fetcher)``
        :param parseComments:
            if comments should be added to CSS DOM or simply omitted
        """
        if log is not None:
            cssutils.log.setLog(log)
        if loglevel is not None:
            cssutils.log.setLevel(loglevel)

        # remember global setting
        self.__globalRaising = cssutils.log.raiseExceptions
        if raiseExceptions:
            self.__parseRaising = raiseExceptions
        else:
            # DEFAULT during parse
            self.__parseRaising = False

        self.__tokenizer = tokenize2.Tokenizer(doComments=parseComments)
        self.setFetcher(fetcher)
Ejemplo n.º 3
0
def repl():
    tokenizer = tokenize2.Tokenizer()
    compiler = virtual_machine.TreeCompiler()
    vm = virtual_machine.VirtualMachine()
    while True:
        line = input('Jen> ')
        if line.strip() == 'exit()':
            break
        else:
            try:
                tokens = tokenizer.get_tokens(line)
                if len(tokens) == 1:
                    value = tokens[0].extract_number()
                else:
                    tree = expr_tree.build_tree(tokens)
                    if has_function_calls(tree):
                        tokenizer.print_tokens(tokens)
                        print(tree)
                        continue
                    instructions, final_reg = compiler.compile(tree)
                    vm.execute(instructions)
                    value = vm.get_reg(final_reg.reg_num)
                print(tokenize2.format_number(value))
            except error.JenError as e:
                print('Error: ' + e.message)
Ejemplo n.º 4
0
    def __init__(self, documentHandler=None, errorHandler=None):
        self._tokenizer = tokenize2.Tokenizer()
        if documentHandler:
            self.setDocumentHandler(documentHandler)
        else:
            self.setDocumentHandler(DocumentHandler())

        if errorHandler:
            self.setErrorHandler(errorHandler)
        else:
            self.setErrorHandler(ErrorHandler())
Ejemplo n.º 5
0
def test_numeric_token_errors():
    tokenizer = tokenize2.Tokenizer()
    expressions = [
        '2.+3.', '.75+.25', '2eee10 * 3...5', '3...5', '2EEe-3', '5.6e', '7E',
        '1e+2e', '1e*2e', '2e.0', '10.E+5'
    ]
    print(expressions)
    for expr in expressions:
        try:
            tokens = tokenizer.get_tokens(expr)
            tokenizer.print_tokens(tokens)
        except tokenize2.TokenizationError as e:
            print("For '{0}', caught tokenization error: {1}".format(
                expr, e.message))
Ejemplo n.º 6
0
def test_expressions(expressions):
    tokenizer = tokenize2.Tokenizer()
    compiler = virtual_machine.TreeCompiler()
    vm = virtual_machine.VirtualMachine()
    print(expressions)
    for expr in expressions:
        print('--------')
        print(expr)
        tokens = tokenizer.get_tokens(expr)
        tokenizer.print_tokens(tokens)
        tree = expr_tree.build_tree(tokens)
        print(tree)
        instructions, final_reg = compiler.compile(tree)
        for inst in instructions:
            print(inst)
        print(final_reg)
        vm.execute(instructions)
        value = vm.get_reg(final_reg.reg_num)
        print('{0} evaluates to {1}'.format(expr,
                                            tokenize2.format_number(value)))
Ejemplo n.º 7
0
def test_tokenizer():
    tokenizer = tokenize2.Tokenizer()
    expressions = '2+3\n18 * 24\n5^3 + 21.24e+5\n8 + 16e-4\n123e99 * 135e1000'
    print(expressions)
    tokens = tokenizer.get_tokens(expressions)
    tokenizer.print_tokens(tokens)