def run(self): start = time.perf_counter() count = self.__work(self.dirs.pages, self.dirs.out) elapsed = round(time.perf_counter() - start + 0.05, 1) Util.message('Done', '{} {} in {}s' \ .format(count, 'page' if count == 1 else 'pages', elapsed))
def test_line(self, number, min_len = None, max_len = None): if self.content is None: return False error = None line = self.get_line(number) if line is None: error = '{}:{} line not found'.format(self.path, number) else: if min_len and len(line) < min_len: error = '{}:{} length must be at least {}, is {}: "{}"' \ .format(self.path, number, min_len, len(line), line) elif max_len and len(line) > max_len: error = '{}:{} length must not exceed {}, is {}: "{}"' \ .format(self.path, number, max_len, len(line), line) if error: if self.mode is FileMode.Optional: Util.warning(error) else: Util.error(error) return False return True
def __work(self, read_dir, write_dir): counter = 0 for in_file in read_dir.list_files(): out_file = write_dir.add_file(in_file.get_name(), FileMode.Create) Util.message('Writing', out_file.get_path()) self.bindings.push() self.__set_local_bindings(in_file, read_dir) self.__set_file_bindings(in_file, True) # Load template and replace variables and functions with bindings final = self.__apply_template() out_file.write(final) self.bindings.pop() counter += 1 for read_subdir in read_dir.list_dirs(): write_subdir = write_dir.add_dir(read_subdir.get_name(), FileMode.Create) counter += self.__work(read_subdir, write_subdir) return counter
def __make_block_tokens(self, flat_tokens): stack = [] block_tokens = TokenCollection() for token in flat_tokens: if token.t_type is TokenType.TagOpen: # Subsequent tokens will be added to this new block stack.append(BlockToken(self.special_tokens[TokenType.TagOpen], self.special_tokens[TokenType.TagClose], None)) else: if token.t_type is TokenType.TagClose: if len(stack) == 0: Util.error("Found extra closing tag") # Got the closing tag, pop the block from the stack token = stack.pop() if len(stack) > 0: stack[-1].tokens.add_token(token) else: block_tokens.add_token(token) if len(stack) > 0: Util.error("Missing closing tag") return block_tokens
def copy_to(self, dst_dir): src = self.path dst = dst_dir.path Util.message('Copy files', 'From {} to {}'.format(src, dst)) shutil.rmtree(dst) shutil.copytree(src, dst)
def __from_files(self, files): if files.evalhint.test_line(0, 1, 1): self.EvalHint = files.evalhint.get_line(0) Util.info('Using {} as block eval hint'.format(self.EvalHint)) if files.tags.test_line(0, 1) and files.tags.test_line(1, 1): self.TagOpen = files.tags.get_line(0) self.TagClose = files.tags.get_line(1) Util.info('Using {} and {} as block tags' .format(self.TagOpen, self.TagClose))
def __init__(self, path, mode): File.__init__(self, path, mode) self.content = None self.lines = None if self.mode is not FileMode.Create: try: with open(self.path, 'rU') as f: self.content = f.read() except FileNotFoundError: if self.mode is not FileMode.Optional: Util.error('Required file {} not found'.format(self.path))
def process_file(self, in_file, read_dir, template_file, is_stub = False): Util.message('Process', in_file.get_path()) self.bindings.push() # Keep root path relative to the file that invoked the stub if not is_stub: self.__set_local_bindings(in_file, read_dir) self.__set_file_bindings(in_file, True) # Load template and replace variables and functions with bindings final = self.__apply_template(template_file) self.bindings.pop() return final
def get(self, name): if name not in self.bindings: Util.error('{} not in bindings'.format(name)) return self.bindings[name]
def __add(self, name, binding, protected): if self.contains(name) and self.get(name).protected: Util.error('Cannot overwrite binding {}'.format(name)) binding.protected = protected self.bindings[name] = binding
def __init__(self, path, mode): self.path = os.path.abspath(path) self.mode = mode if self.mode is FileMode.Required and not self.exists(): Util.error('Required file {} not found'.format(self.path))
def __init__(self): self.Md = Util.try_import('markdown') self.Pygments = Util.try_import('pygments') self.PygmentsLexers = Util.try_import('pygments.lexers') self.PygmentsFormatters = Util.try_import('pygments.formatters')
def evaluate_block(self, block): # Get the binding's name name = block.capture_call() if name is None: # This block does not call a binding return self.__evaluate_collection(block.tokens) if not self.bindings.contains(name): # Name is unknown, discard block Util.warning('Use of unknown binding {}:\n{}'.format(name, block)) return None binding = self.bindings.get(name) eval_tokens = TokenCollection() if type(binding) is VariableBinding: eval_binding = self.__evaluate_collection(binding.tokens) eval_tokens.add_collection(eval_binding) elif type(binding) is MacroBinding: args = block.capture_args(binding.num_params == 1) if len(args) != binding.num_params: Util.warning('Macro {} takes {} args, got {}:\n{}' .format(name, binding.num_params, len(args), block)) return None self.bindings.push() # Bind each parameter to the supplied argument for arg, param in zip(args, binding.params): self.bindings.add_variable(param.resolve(), TokenCollection([arg])) eval_binding = self.__evaluate_collection(binding.tokens) eval_tokens.add_collection(eval_binding) self.bindings.pop() elif type(binding) is FunctionBinding: args = block.capture_args(binding.num_params == [1]) if len(args) not in binding.num_params: Util.warning('Function {} takes {} args, got {}:\n{}' .format(name, binding.num_params, len(args), block)) return None if binding.lazy: # Feed block tokens directly to function result = binding.func(self, args) if result: result = self.evaluate_block(result) eval_tokens.add_collection(result) else: # Evaluate and resolve each argument arguments = [self.evaluate_block(a).resolve() for a in args] body = binding.func(self, arguments) eval_tokens.add_collection(self.tokenizer.tokenize(body)) # Trim leading and trailing whitespace eval_tokens.trim() # Run page content through Markdown if binding.protected \ and name == self.settings.Content \ and self.imports.Md: content = eval_tokens.resolve() md = self.imports.Md.markdown(content, output_format = 'html5') md_token = Token(TokenType.Text, md) eval_tokens = TokenCollection([md_token]) return eval_tokens