def atom(cache): """Build atom feed.""" data = _complement(index=cache.index()) dest = os.path.join(conf.get('build_path'), conf.get('atom_location')) logger.info(_to('atom feed', dest)) helpers.makedirs(os.path.dirname(dest)) templates.render(data, 'atom.xml', dest)
def archive(cache): """Build blog archive page.""" dest = os.path.join(conf.get('build_path'), conf.get('archive_location')) logger.info('archive: ' + conf.get('archive_location')) helpers.makedirs(os.path.dirname(dest)) page_data = {'title': 'Archive', 'tags': cache.tags()} data = _complement(page_data, index=cache.index()) templates.render(data, 'archive.html', dest)
def run(path=None, port=None, browse=False): """Preview generated website.""" conf.load(path) helpers.check_build(conf.get('build_path')) port = port or conf.get('port') args = [conf.get('build_path'), port] threading.Thread(target=_serve, args=args).start() if browse: url = "http://localhost:%d/" % port webbrowser.open_new(url)
def js(cache): """Minify JavaScript files to the build path.""" for source in cache.assets(ext='.js'): helpers.makedirs(source.dest_dir()) command = conf.get('min_js_cmd') if conf.get('min_js') and command: logger.info('minifying JavaScript: ' + source.rel_path()) helpers.execute(command, source.path(), source.dest()) else: logger.info('copying: ' + source.rel_path()) shutil.copyfile(source.path(), source.dest()) helpers.utime(source.dest(), source.updated()) source.processed(True)
def deploy(path=None): """Deploy generated website to the remote web server.""" conf.load(path) helpers.check_build(conf.get('build_path')) logger.info('deploying website...') if not conf.get('deploy_cmd'): raise Exception('deploy command is not defined') cmd = conf.get('deploy_cmd').format(build_path=conf.get('build_path')) try: output = subprocess.check_output(cmd, shell=True) logger.debug("Command output:\n%s" % output.decode('utf-8')) logger.info('done') except subprocess.CalledProcessError as e: logger.error(e) logger.debug("Command output:\n%s" % e.output.decode('utf-8'))
def less(cache): """Compile and minify less files.""" for source in cache.assets(ext='.less'): helpers.makedirs(source.dest_dir()) logger.info('compiling LESS: ' + source.rel_path()) if conf.get('min_css') and conf.get('min_css_cmd'): tmp_file = os.path.join(source.dest_dir(), '_' + source.basename()) helpers.execute(conf.get('less_cmd'), source.path(), tmp_file) logger.info('minifying CSS: ' + source.rel_path()) helpers.execute(conf.get('min_css_cmd'), tmp_file, source.dest()) os.remove(tmp_file) else: helpers.execute(conf.get('less_cmd'), source.path(), source.dest()) helpers.utime(source.dest(), source.updated()) source.processed(True)
def sitemap(cache): """Build sitemap.xml.""" data = _complement(index=cache.full_index()) dest = os.path.join(conf.get('build_path'), const.SITEMAP) logger.info(_to('sitemap', dest)) helpers.makedirs(os.path.dirname(dest)) templates.render(data, 'sitemap.xml', dest)
def post(path=None, name=None, force=False, edit=False): """Create new post.""" conf.load(path) path = source.PostSource.create(name, force) logger.info('post created: ' + path) if edit: helpers.execute(conf.get('editor_cmd'), path)
def page(path=None, name=None, force=False, edit=False): """Create new page.""" conf.load(path) try: path = source.PageSource.create(name, force) except source.PageExistsException: logger.error('page already exists, use -f to overwrite') return logger.info('page created: ' + path) if edit: helpers.execute(conf.get('editor_cmd'), path)
def parse_time(value, default=None): """Converts string to datetime using the first of the preconfigured time_format values that will work.""" if not value and default: return default for time_format in conf.get('time_format'): try: return datetime.strptime(value.strip(), time_format) except ValueError: pass else: raise Exception('bad date/time format')
def _parse(self): """Extract page header data and content from a list of lines and return the result as key-value couples.""" meta, desc, content = ParseableSource._split(self.text()) meta.update( { "source": self._path, "title": meta.get("title", helpers.get_h1(content)), "template": meta.get("template", self.default_template()), "author": meta.get("author", conf.get("author")), "author_twitter": meta.get("author_twitter", conf.get("author_twitter")), "author_url": meta.get("author_url", conf.get("author_url")), "tags": list(ParseableSource._tags(meta.get("tags", ""))), "source_url": self.source_url(), "created": helpers.parse_time(meta.get("created"), self._ctime), "updated": helpers.parse_time(meta.get("updated"), self._utime), "description": meta.get("description", desc), "content": md(content.strip()), } ) return meta
def get_logger(): logger = logging.getLogger() logger.setLevel(logging.DEBUG) def add_channel(channel): level = logging.DEBUG if conf.get('verbose', False) else logging.INFO formatter = logging.Formatter(const.LOG_FORMAT, const.LOG_DATE_FORMAT) channel.setLevel(level) channel.setFormatter(formatter) logger.addHandler(channel) add_channel(logging.StreamHandler()) log_file = conf.get('log_file') if log_file is not None: helpers.makedirs(os.path.dirname(log_file)) backup_cnt = conf.get('log_backup_cnt', 0) max_bytes = conf.get('log_max_size', 0) add_channel(RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_cnt)) return logger
def posts(cache): """Build blog posts and copy the latest post to the site root.""" for source in cache.posts(): logger.info(_to('post', source.rel_path(), source.rel_dest())) helpers.makedirs(source.dest_dir()) try: data = _complement(source.data()) templates.render_page(data, source.dest()) except Exception as ex: logger.error('post building error: ' + str(ex)) logger.debug(traceback.format_exc()) if conf.get('post_at_root_url'): # put the latest post at site root url last = cache.posts()[0] path = os.path.join(conf.get('build_path'), conf.get('index_page')) logger.info(_to('root', last.rel_dest(), conf.get('index_page'))) if any(cache.pages(dest=conf.get('index_page'))): logger.warn('root page will be overwritten by the latest post') try: shutil.copyfile(last.dest(), path) except FileNotFoundError: logger.error("latest post was not generated and can't be copied")
def build(path=None, output=None): """Generate web content from source.""" conf.load(path) cache = Cache() if cache.processing_errors(): for file_name, error in cache.processing_errors(): message = "error processing source file '%s' - %s" logger.error(message % (file_name, error)) if output: conf.set('build_path', output) logger.info('build directory: ' + conf.get('build_path')) for builder in builders.order(): builder(cache)
def create(name, force=False): """Creates page file. Arguments: name -- page name (will be used for file name and URL). force -- True to overwrite existing file; False to throw exception.""" page_name = urlify(name, ext_map={ord(u"\\"): u"/"}) + ".md" file_name = os.path.join(pathes.pages(), page_name) if os.path.exists(file_name) and not force: raise PageExistsException(path=file_name) created = datetime.now().strftime(conf.get("time_format")[0]) text = const.PROTO_PAGE helpers.newfile(file_name, text.format(title=name, created=created)) return page_name
def create(name, force=False): """Create new post file placeholder with a unique name. Arguments: name -- post name. force -- True to overwrite existing file; False to raise an exception.""" created = datetime.now() post_name = urlify(name) or const.UNTITLED_POST file_name = PostSource._ymd(POST_NAME_FORMAT, created, post_name) post_path = os.path.join(pathes.posts(), file_name) count = 0 while True: file_name = helpers.suffix(post_path, count) if force or not os.path.exists(file_name): created = created.strftime(conf.get("time_format")[0]) text = const.PROTO_POST text = text.format(title=name, created=created) helpers.newfile(file_name, text) break count += 1 return os.path.basename(file_name)
def _rel(path): build_path = conf.get('build_path') use_rel = path.startswith(build_path) return os.path.relpath(path, build_path) if use_rel else path
def clean(path=None): """Delete all generated content.""" conf.load(path) logger.info('cleaning output...') helpers.rmdir(conf.get('build_path')) logger.info('done')
def tag_url(tag): """Returns relative URL to the specified tag page.""" tag_location = conf.get('tag_location').format(tag=urlify(tag)) return conf.get('rel_root_url') + tag_location
def tag_path(tag): """Returns full path to the specified tag page.""" file_name = conf.get('tag_location').format(tag=urlify(tag)) return os.path.join(conf.get('build_path'), file_name)
def dest(self): """Returns fully qualified destination file.""" return os.path.join(conf.get("build_path"), self.rel_dest())
def default_template(self): return conf.get("post_tpl")
def add_channel(channel): level = logging.DEBUG if conf.get('verbose', False) else logging.INFO formatter = logging.Formatter(const.LOG_FORMAT, const.LOG_DATE_FORMAT) channel.setLevel(level) channel.setFormatter(formatter) logger.addHandler(channel)
def rel_dest(self): name = os.path.basename(self._rel_path).lstrip("0123456789-_") name = os.path.splitext(name)[0] return PostSource._ymd(conf.get("post_location"), self.created(), name)
def _tags(value): """Parses tags from comma-separaed string, or returns default tags set from configuration.""" tags = list(helpers.xsplit(",", value, strip=True, drop_empty=True)) for tag in tags or conf.get("default_tags"): yield {"name": tag, "url": helpers.tag_url(tag)}
def _save(text, dest_path): """Apply optional HTML minification to the [text] and save it to file.""" if conf.get('min_html') and helpers.ext(dest_path) == '.html': text = minify.minify_html(text) with codecs.open(dest_path, mode='w', encoding='utf-8') as f: f.write(text)
def source_url(self): """Source file URL.""" if not conf.get("source_url"): return None pattern = "{root}blob/master/{type}/{name}" return pattern.format(root=conf.get("source_url"), type=self.source_type(), name=self.basename())
def url(self, full=False): """Returns an URL corresponding to the source file.""" root = conf.get("root_url") if full else conf.get("rel_root_url") return root + self.rel_dest()