示例#1
0
文件: tokenizer.py 项目: alxm/siter
    def __make_block_tokens(self, flat_tokens):
        stack = []
        block_tokens = TokenCollection()

        for token in flat_tokens:
            if token.t_type is TokenType.TagOpen:
                # Subsequent tokens will be added to this new block
                stack.append(BlockToken(self.special_tokens[TokenType.TagOpen],
                                        self.special_tokens[TokenType.TagClose],
                                        None))
            else:
                if token.t_type is TokenType.TagClose:
                    if len(stack) == 0:
                        Util.error("Found extra closing tag")

                    # Got the closing tag, pop the block from the stack
                    token = stack.pop()

                if len(stack) > 0:
                    stack[-1].tokens.add_token(token)
                else:
                    block_tokens.add_token(token)

        if len(stack) > 0:
            Util.error("Missing closing tag")

        return block_tokens
示例#2
0
    def __evaluate_collection(self, collection):
        eval_tokens = TokenCollection()

        for token in collection:
            if token.t_type is TokenType.Block:
                evaluated = self.evaluate_block(token)

                if evaluated:
                    eval_tokens.add_collection(evaluated)
            else:
                eval_tokens.add_token(token)

        return eval_tokens
示例#3
0
    def evaluate_block(self, block):
        # Get the binding's name
        name = block.capture_call()

        if name is None:
            # This block does not call a binding
            return self.__evaluate_collection(block.tokens)

        if not self.bindings.contains(name):
            # Name is unknown, discard block
            Util.warning('Use of unknown binding {}:\n{}'.format(name, block))
            return None

        binding = self.bindings.get(name)
        eval_tokens = TokenCollection()

        if type(binding) is VariableBinding:
            eval_binding = self.__evaluate_collection(binding.tokens)
            eval_tokens.add_collection(eval_binding)
        elif type(binding) is MacroBinding:
            args = block.capture_args(binding.num_params == 1)

            if len(args) != binding.num_params:
                Util.warning('Macro {} takes {} args, got {}:\n{}'
                    .format(name, binding.num_params, len(args), block))
                return None

            self.bindings.push()

            # Bind each parameter to the supplied argument
            for arg, param in zip(args, binding.params):
                self.bindings.add_variable(param.resolve(),
                                           TokenCollection([arg]))

            eval_binding = self.__evaluate_collection(binding.tokens)
            eval_tokens.add_collection(eval_binding)

            self.bindings.pop()
        elif type(binding) is FunctionBinding:
            args = block.capture_args(binding.num_params == [1])

            if len(args) not in binding.num_params:
                Util.warning('Function {} takes {} args, got {}:\n{}'
                    .format(name, binding.num_params, len(args), block))
                return None

            if binding.lazy:
                # Feed block tokens directly to function
                result = binding.func(self, args)

                if result:
                    result = self.evaluate_block(result)
                    eval_tokens.add_collection(result)
            else:
                # Evaluate and resolve each argument
                arguments = [self.evaluate_block(a).resolve() for a in args]

                body = binding.func(self, arguments)
                eval_tokens.add_collection(self.tokenizer.tokenize(body))

        # Trim leading and trailing whitespace
        eval_tokens.trim()

        # Run page content through Markdown
        if binding.protected \
            and name == self.settings.Content \
            and self.imports.Md:

            content = eval_tokens.resolve()
            md = self.imports.Md.markdown(content, output_format = 'html5')
            md_token = Token(TokenType.Text, md)
            eval_tokens = TokenCollection([md_token])

        return eval_tokens