def serve(site, director): """Run a simple web server that serve the output directory and watches for changes to the site. When something is changed, it should be generated. """ # Override the log level to display some interactive messages with the # user. With the dev server running, there's no sense in being silent. logger.setLevel(logging.INFO) # Start the watchdog. event_handler = SiteHandler(director) observer = Observer() observer.schedule(event_handler, site.path, recursive=True) observer.start() # The simple HTTP server is pretty dumb and does not even take a path to # serve. The only way to serve the right path is to change the directory. outdir = director.outdir os.chdir(outdir) socketserver.TCPServer.allow_reuse_address = True httpd = socketserver.TCPServer(('', PORT), SimpleHTTPRequestHandler) logger.info( _('Serving {outdir} at http://localhost:{port}/.' '\nPress Ctrl-C to quit.').format(outdir=outdir, port=PORT)) try: httpd.serve_forever() except KeyboardInterrupt: logger.info(_('\nBye.')) observer.stop() observer.join()
def register(self, subparsers): parser = super(BuildCommand, self).register(subparsers) parser.add_argument( 'site', nargs='?', help=_('the path to your website')) parser.add_argument('outdir', nargs='?', help=_( 'an optional output directory to create or' ' update if it already exists'))
def register(self, subparsers): parser = super(ScaffoldCommand, self).register(subparsers) parser.add_argument( 'scaffold', nargs='?', help=_('the scaffold to generate')) parser.add_argument( 'site', nargs='?', default='site', help=_('the path to your website'))
def _split_content_with_frontmatter(self, first, source, source_file): """Separate frontmatter from source material.""" max_splits = 1 # With a directive present, there must be two document markers. if first.startswith('%YAML'): max_splits = 2 content = source.split(self.document_marker, max_splits) try: data = yaml.load(content[max_splits - 1]) except ScannerError as ex: raise AbortError( _('There is invalid YAML in the frontmatter: {details}'). format(details=str(ex))) try: source = content[max_splits] except IndexError: raise AbortError( _('A YAML marker was missing in {source}').format( source=source_file)) if 'title' in data: data['title'] = escape(data['title']) return data, source
def register(self, subparsers): parser = super(WatchCommand, self).register(subparsers) parser.add_argument('site', nargs='?', help=_('the path to your website')) parser.add_argument('outdir', nargs='?', help=_('an optional output directory to create or' ' update if it already exists'))
def register(self, subparsers): parser = super(ScaffoldCommand, self).register(subparsers) parser.add_argument('scaffold', nargs='?', help=_('the scaffold to generate')) parser.add_argument('site', nargs='?', default='site', help=_('the path to your website'))
def make(scaffold, site): """Make a site from the scaffold.""" if scaffold not in BUILTIN_SCAFFOLDS: raise AbortError(_('There is no {scaffold} scaffold.'.format( scaffold=scaffold))) if os.path.exists(site): raise AbortError(_('{site} already exists.'.format(site=site))) os.makedirs(site) scaffold_path = os.path.join(SCAFFOLDS_PATH, scaffold) shutil.copytree(scaffold_path, os.path.join(site, 'source'))
def make(scaffold, site): """Make a site from the scaffold.""" if scaffold not in BUILTIN_SCAFFOLDS: raise AbortError( _('There is no {scaffold} scaffold.'.format(scaffold=scaffold))) if os.path.exists(site): raise AbortError(_('{site} already exists.'.format(site=site))) os.makedirs(site) scaffold_path = os.path.join(SCAFFOLDS_PATH, scaffold) shutil.copytree(scaffold_path, os.path.join(site, 'source'))
def on_pre_composition(self, director): if not self._config.parser.has_section('open_graph'): raise AbortError(_( 'An open_graph section is missing in the configuration file.')) if not self._config.parser.has_option('open_graph', 'default_image'): raise AbortError(_( 'A default image URL is missing in the configuration file.')) self._resolver = director.resolver self._url_resolver = URLResolver( self._config, self._config.parser.get('open_graph', 'default_image'))
def _should_skip(self, filename): """Determine if the file type should be skipped.""" if filename.endswith(self.SKIP_EXTENSION): logger.debug( _('Skipping {filename} with skipped file type ...').format( filename=filename)) return True if filename.endswith(self.SKIP_FILES): logger.debug(_('Skipping special file {filename} ...').format( filename=filename)) return True return False
def on_pre_composition(self, director): if not self._config.parser.has_section('open_graph'): raise AbortError( _('An open_graph section is missing in the configuration file.' )) if not self._config.parser.has_option('open_graph', 'default_image'): raise AbortError( _('A default image URL is missing in the configuration file.')) self._resolver = director.resolver self._url_resolver = URLResolver( self._config, self._config.parser.get('open_graph', 'default_image'))
def _should_skip(self, filename): """Determine if the file type should be skipped.""" if filename.endswith(self.SKIP_EXTENSION): logger.debug( _('Skipping {filename} with skipped file type ...').format( filename=filename)) return True if filename.endswith(self.SKIP_FILES): logger.debug( _('Skipping special file {filename} ...').format( filename=filename)) return True return False
def compose(self, catalog, source_file, out_dir): root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) if self._needs_update(source_file, output_file): logger.info(_('Generating Atom XML for {source_file} ...').format( source_file=source_file)) feed = self._parse_feed(source_file) with open(output_file, 'wb') as out: out.write(feed.to_string().encode('utf-8')) out.write(b'<!-- handrolled for excellence -->\n') else: logger.debug(_('Skipping {filename} ... It is up to date.').format( filename=filename))
def _generate_atom_feed(self, director, blog_posts): """Generate the atom feed.""" logger.info(_('Generating Atom XML feed ...')) builder = FeedBuilder(self.atom_metadata) builder.add(blog_posts) output_file = os.path.join(director.outdir, self.atom_output) builder.write_to(output_file)
def build(cls, args): """Build a validated site.""" site = cls(args.site) valid, message = site.is_valid() if not valid: raise AbortError(_('Invalid site source: {0}').format(message)) return site
def compose(self, catalog, source_file, out_dir): filename = os.path.basename(source_file.rstrip('.j2')) output_file = os.path.join(out_dir, filename) if self._needs_update(source_file, output_file): logger.info(_('Generating from template {source_file} ...').format( source_file=source_file)) data, source = self.get_data(source_file) data['config'] = self._config template = jinja2.Template(source) with open(output_file, 'wb') as out: out.write(template.render(data).encode('utf-8')) # Frontmatter loading seems to munch the final line separator. out.write(os.linesep.encode('utf-8')) else: logger.debug(_('Skipping {filename} ... It is up to date.').format( filename=filename))
def _generate_atom_feed(self, director, blog_posts): """Generate the atom feed.""" logger.info(_("Generating Atom XML feed ...")) builder = FeedBuilder(self.atom_metadata) builder.add(blog_posts) output_file = os.path.join(director.outdir, self.atom_output) builder.write_to(output_file)
def _generate_list_page(self, director, blog_posts): """Generate the list page.""" logger.info(_('Generating blog list page ...')) template = director.catalog.get_template(self.list_template) builder = ListPageBuilder(template) builder.add(blog_posts) output_file = os.path.join(director.outdir, self.list_output) builder.write_to(output_file)
def _is_post(self, frontmatter): """Check if the front matter looks like a blog post.""" is_post = frontmatter.get('blog', False) if type(is_post) != bool: raise AbortError( _('Invalid blog frontmatter (expects True or False): ' '{blog_value}').format(blog_value=is_post)) return is_post
def _get_option(self, option): """Get an option out of the blog section.""" try: return self._config.parser.get('blog', option) except configparser.NoOptionError: raise AbortError( _('The blog extension requires the {option} option.').format( option=option))
def on_pre_composition(self, director): if not self._config.parser.has_section('twitter'): raise AbortError( _('An twitter section is missing in the configuration file.')) if not self._config.parser.has_option('twitter', 'default_image'): raise AbortError( _('A default image URL is missing in the configuration file.')) if not self._config.parser.has_option('twitter', 'site_username'): raise AbortError( _('A site username is missing in the configuration file.')) self._resolver = director.resolver self._url_resolver = URLResolver( self._config, self._config.parser.get('twitter', 'default_image')) self._site = self._config.parser.get('twitter', 'site_username')
def _create_output_directories(self, dirnames, output_dirpath): """Create new directories in output.""" for dirname in dirnames: out = os.path.join(output_dirpath, dirname) # The directory may already exist for updates. if not os.path.exists(out): logger.info(_('Creating directory {out} ...').format(out=out)) os.mkdir(out)
def compose(self, catalog, source_file, out_dir): root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) if self._needs_update(source_file, output_file): logger.info( _("Generating Atom XML for {source_file} ...").format( source_file=source_file)) feed = self._parse_feed(source_file) with open(output_file, "wb") as out: out.write(feed.to_string().encode("utf-8")) out.write(b"<!-- handrolled for excellence -->\n") else: logger.debug( _("Skipping {filename} ... It is up to date.").format( filename=filename))
def _is_post(self, frontmatter): """Check if the front matter looks like a blog post.""" is_post = frontmatter.get("blog", False) if type(is_post) != bool: raise AbortError( _("Invalid blog frontmatter (expects True or False): " "{blog_value}").format(blog_value=is_post)) return is_post
def _generate_list_page(self, director, blog_posts): """Generate the list page.""" logger.info(_("Generating blog list page ...")) template = director.catalog.get_template(self.list_template) builder = ListPageBuilder(template) builder.add(blog_posts) output_file = os.path.join(director.outdir, self.list_output) builder.write_to(output_file)
def _get_option(self, option): """Get an option out of the blog section.""" try: return self._config.parser.get("blog", option) except configparser.NoOptionError: raise AbortError( _("The blog extension requires the {option} option.").format( option=option))
def _build_template(self, template_path): """Build a template. Abort if unknown type.""" for extension, template_builder in self._builders.items(): if template_path.endswith(extension): return template_builder(template_path) raise AbortError( _('Unknown template type provided for {template}.').format( template=template_path))
def compose(self, catalog, source_file, out_dir): filename = os.path.basename(source_file.rstrip('.j2')) output_file = os.path.join(out_dir, filename) if self._needs_update(source_file, output_file): logger.info( _('Generating from template {source_file} ...').format( source_file=source_file)) data, source = self.get_data(source_file) data['config'] = self._config template = jinja2.Template(source) with open(output_file, 'wb') as out: out.write(template.render(data).encode('utf-8')) # Frontmatter loading seems to munch the final line separator. out.write(os.linesep.encode('utf-8')) else: logger.debug( _('Skipping {filename} ... It is up to date.').format( filename=filename))
def on_pre_composition(self, director): if not self._config.parser.has_section('twitter'): raise AbortError(_( 'An twitter section is missing in the configuration file.')) if not self._config.parser.has_option('twitter', 'default_image'): raise AbortError(_( 'A default image URL is missing in the configuration file.')) if not self._config.parser.has_option('twitter', 'site_username'): raise AbortError(_( 'A site username is missing in the configuration file.')) self._resolver = director.resolver self._url_resolver = URLResolver( self._config, self._config.parser.get('twitter', 'default_image')) self._site = self._config.parser.get('twitter', 'site_username')
def on_post_composition(self, director): if not self._dirty: return logger.info(_('Generating sitemap ...')) sitemap_path = os.path.join(director.outdir, 'sitemap.txt') with open(sitemap_path, 'w') as sitemap: for url in sorted(self.urls): sitemap.write(url + '\n') self._dirty = False
def _generate_output(self, outdir): if os.path.exists(outdir): logger.info(_('Updating {outdir} ...').format(outdir=outdir)) else: logger.info(_('Creating {outdir} ...').format(outdir=outdir)) os.mkdir(outdir) self._collect_frontmatter() for dirpath, dirnames, filenames in self.site.walk(): output_dirpath = self._get_output_dirpath(dirpath, outdir) logger.info( _('Populating {dirpath} ...').format(dirpath=output_dirpath)) self._create_output_directories(dirnames, output_dirpath) for filename in filenames: filepath = os.path.join(dirpath, filename) self._process_file(filepath, output_dirpath)
def compose(self, catalog, source_file, out_dir): root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) logger.info(_('Generating CSS for {source_file} ...').format( source_file=source_file)) command = self.build_command(source_file, output_file) process = subprocess.Popen( command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (out, err) = process.communicate() if out: logger.debug(_('Received output from sass:\n{0}'.format(out))) if process.returncode != 0: raise AbortError(_('Sass failed to generate CSS:\n{0}').format( err))
def _generate_output(self, outdir): if os.path.exists(outdir): logger.info(_('Updating {outdir} ...').format(outdir=outdir)) else: logger.info(_('Creating {outdir} ...').format(outdir=outdir)) os.mkdir(outdir) self._collect_frontmatter() for dirpath, dirnames, filenames in self.site.walk(): output_dirpath = self._get_output_dirpath(dirpath, outdir) logger.info(_('Populating {dirpath} ...').format( dirpath=output_dirpath)) self._create_output_directories(dirnames, output_dirpath) for filename in filenames: filepath = os.path.join(dirpath, filename) self._process_file(filepath, output_dirpath)
def _validate_post(self, source_file, frontmatter): """Validate that the post contains all the required fields.""" required = set(["date", "title"]) fields = set(frontmatter.keys()) missing = required - fields if missing: raise AbortError( _("The blog post, {filename}, " "is missing required fields: {missing_fields}".format( filename=source_file, missing_fields=", ".join(missing))))
def _parse_feed(self, source_file): try: with io.open(source_file, 'r', encoding='utf-8') as f: metadata = json.loads(f.read()) if metadata.get('entries') is None: raise ValueError(_('Missing entries list.')) entries = metadata['entries'] # AtomFeed expects FeedEntry objects for the entries keyword so # remove it from the metadata and add it after the feed is built. del metadata['entries'] feed = AtomFeed(**metadata) [feed.add(self._make_entry(entry)) for entry in entries] except ValueError as error: raise AbortError(_('Invalid feed {source_file}: {error}').format( source_file=source_file, error=str(error))) return feed
class BuildCommand(Command): name = 'build' description = _('Build a site in an output directory.') help = _('build a site') def register(self, subparsers): parser = super(BuildCommand, self).register(subparsers) parser.add_argument('site', nargs='?', help=_('the path to your website')) parser.add_argument('outdir', nargs='?', help=_('an optional output directory to create or' ' update if it already exists')) def run(self, args): site = Site.build(args) director = prepare_director(args, site) director.produce() finish()
def compose(self, catalog, source_file, out_dir): """Copy a file to the destination if the file does not exist or was modified.""" filename = os.path.basename(source_file) # Do not copy files that are already there unless different. destination = os.path.join(out_dir, filename) if os.path.exists(destination): if filecmp.cmp(source_file, destination): # Files are equal. Do nothing. logger.debug(_('Skipping {filename} ... It is the same as ' '{destination}.').format( filename=filename, destination=destination)) return else: logger.info( _('{filename} differs from {destination} ...').format( filename=filename, destination=destination)) logger.info(_('Copying {filename} to {out_dir} ...').format( filename=filename, out_dir=out_dir)) shutil.copy(source_file, out_dir)
def compose(self, catalog, source_file, out_dir): root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) logger.info( _('Generating CSS for {source_file} ...').format( source_file=source_file)) command = self.build_command(source_file, output_file) process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (out, err) = process.communicate() if out: logger.debug(_('Received output from sass:\n{0}'.format(out))) if process.returncode != 0: raise AbortError( _('Sass failed to generate CSS:\n{0}').format(err))
def compose(self, catalog, source_file, out_dir): """Compose an HTML document by generating HTML from the source file, merging it with a template, and write the result to output directory.""" data, source = self.get_data(source_file) template = self.select_template(catalog, data) # Determine the output filename. root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) if self._needs_update(template, source_file, output_file): logger.info(_('Generating HTML for {source_file} ...').format( source_file=source_file)) data['content'] = self._generate_content(source) self._render_to_output(template, data, output_file) else: logger.debug(_('Skipping {filename} ... It is up to date.').format( filename=filename))
def _parse_feed(self, source_file): try: with io.open(source_file, "r", encoding="utf-8") as f: metadata = json.loads(f.read()) if metadata.get("entries") is None: raise ValueError(_("Missing entries list.")) entries = metadata["entries"] # AtomFeed expects FeedEntry objects for the entries keyword so # remove it from the metadata and add it after the feed is built. del metadata["entries"] feed = AtomFeed(**metadata) [feed.add(self._make_entry(entry)) for entry in entries] except ValueError as error: raise AbortError( _("Invalid feed {source_file}: {error}").format( source_file=source_file, error=str(error))) return feed
def _validate_post(self, source_file, frontmatter): """Validate that the post contains all the required fields.""" required = set([ 'date', 'title', ]) fields = set(frontmatter.keys()) missing = required - fields if missing: raise AbortError(_( 'The blog post, {filename}, ' 'is missing required fields: {missing_fields}'.format( filename=source_file, missing_fields=', '.join(missing))))
class WatchCommand(Command): name = 'watch' description = _('watch the site for changes and' ' run a web server in the output directory') help = _('watch a site and run a web server') def register(self, subparsers): parser = super(WatchCommand, self).register(subparsers) parser.add_argument('site', nargs='?', help=_('the path to your website')) parser.add_argument('outdir', nargs='?', help=_('an optional output directory to create or' ' update if it already exists')) def run(self, args): site = Site.build(args) director = prepare_director(args, site) director.produce() serve(site, director)
def _find_extensions(self, parser): """Check if the site options have extensions to enable.""" for option in parser.options('site'): if option.startswith('with_'): try: extension = option.split('with_', 1)[1] or option enabled = parser.getboolean('site', option) if enabled: self.active_extensions.add(extension) except ValueError: raise AbortError(_( 'Cannot determine if {extension} is enabled.').format( extension=extension))
def main(argv=sys.argv): args = parse_args(argv) if args.verbose: logger.setLevel(logging.INFO) if args.debug: logger.setLevel(logging.DEBUG) try: site = Site(args.site) valid, message = site.is_valid() if not valid: raise AbortError(_('Invalid site source: {0}').format(message)) config = build_config(site.config_file, args) director = Director(config, site) director.produce() print(_('Complete.')) except AbortError as abort: logger.error(str(abort)) sys.exit(_('Incomplete.'))
def _find_extensions(self, parser): """Check if the site options have extensions to enable.""" for option in parser.options('site'): if option.startswith('with_'): try: extension = option.split('with_', 1)[1] or option enabled = parser.getboolean('site', option) if enabled: self.active_extensions.add(extension) except ValueError: raise AbortError( _('Cannot determine if {extension} is enabled.'). format(extension=extension))
def build(self, template_path): """Build a Jinja template from the file path.""" # Strip the templates path from the template path to get the relative # name that the ``FileSystemLoader`` wants. template_name = os.path.relpath(template_path, self.templates_path) try: template = self._env.get_template(template_name) template.last_modified = self._get_last_modified(template_name, template_path) return template except jinja2.exceptions.TemplateSyntaxError as e: raise AbortError( _('An error exists in the Jinja template at {template}:' ' {error}').format(template=template_path, error=str(e)))
def main(argv=sys.argv): args = parse(argv) if args.verbose: logger.setLevel(logging.INFO) if args.debug: logger.setLevel(logging.DEBUG) try: args.func(args) except AbortError as abort: logger.error(str(abort)) sys.exit(_('Incomplete.'))
def compose(self, catalog, source_file, out_dir): """Compose an HTML document by generating HTML from the source file, merging it with a template, and write the result to output directory.""" data, source = self.get_data(source_file) template = self.select_template(catalog, data) # Determine the output filename. root, ext = os.path.splitext(os.path.basename(source_file)) filename = root + self.output_extension output_file = os.path.join(out_dir, filename) if self._needs_update(template, source_file, output_file): logger.info( _('Generating HTML for {source_file} ...').format( source_file=source_file)) data['content'] = self._generate_content(source) self._render_to_output(template, data, output_file) else: logger.debug( _('Skipping {filename} ... It is up to date.').format( filename=filename))
def _validate_post(self, source_file, frontmatter): """Validate that the post contains all the required fields.""" required = set([ 'date', 'title', ]) fields = set(frontmatter.keys()) missing = required - fields if missing: raise AbortError( _('The blog post, {filename}, ' 'is missing required fields: {missing_fields}'.format( filename=source_file, missing_fields=', '.join(missing))))
class ScaffoldCommand(Command): name = 'scaffold' description = _('Make a new handroll site from a scaffold ' 'or list the available scaffolds') help = _('make a new handroll site') def register(self, subparsers): parser = super(ScaffoldCommand, self).register(subparsers) parser.add_argument('scaffold', nargs='?', help=_('the scaffold to generate')) parser.add_argument('site', nargs='?', default='site', help=_('the path to your website')) def run(self, args): if args.scaffold: scaffolder.make(args.scaffold, args.site) finish() else: scaffolder.list_scaffolds()
def _generate_output(self, outdir, timing): if os.path.exists(outdir): logger.info(_('Updating {outdir} ...').format(outdir=outdir)) else: logger.info(_('Creating {outdir} ...').format(outdir=outdir)) os.mkdir(outdir) for dirpath, dirnames, filenames in os.walk(self.site.path): # Prevent work on the output or templates directory. # Skip the template. if dirpath == self.site.path: if self.site.OUTPUT in dirnames: dirnames.remove(self.site.OUTPUT) if template.TEMPLATES_DIR in dirnames: dirnames.remove(template.TEMPLATES_DIR) if template.DEFAULT_TEMPLATE in filenames: filenames.remove(template.DEFAULT_TEMPLATE) self.prune_skip_directories(dirnames) output_dirpath = self._get_output_dirpath(dirpath, outdir) logger.info(_('Populating {dirpath} ...').format( dirpath=output_dirpath)) # Create new directories in output. for dirname in dirnames: out_dir = os.path.join(output_dirpath, dirname) # The directory may already exist for updates. if not os.path.exists(out_dir): logger.info(_('Creating directory {out_dir} ...').format( out_dir=out_dir)) os.mkdir(out_dir) for filename in filenames: filepath = os.path.join(dirpath, filename) self._process_file(filepath, output_dirpath, timing)
def _split_content_with_frontmatter(self, first, source, source_file): """Separate frontmatter from source material.""" max_splits = 1 # With a directive present, there must be two document markers. if first.startswith('%YAML'): max_splits = 2 content = source.split(self.document_marker, max_splits) try: data = yaml.load(content[max_splits - 1]) except ScannerError as ex: raise AbortError(_( 'There is invalid YAML in the frontmatter: {details}').format( details=str(ex))) try: source = content[max_splits] except IndexError: raise AbortError(_('A YAML marker was missing in {source}').format( source=source_file)) if 'title' in data: data['title'] = escape(data['title']) return data, source
def on_pre_composition(self, director): """Check that all the required configuration exists.""" if not self._config.parser.has_section('blog'): raise AbortError( _('A blog section is missing in the configuration file.')) # Collect atom feed configuration. for metadata, option in self.required_metadata.items(): self._add_atom_metadata(metadata, option) self.atom_output = self._get_option('atom_output') # Collect HTML listing configuration. if self._config.parser.has_option('blog', 'list_template'): self.list_template = self._get_option('list_template') self.list_output = self._get_option('list_output') # Grab the resolver from the director for determining URLs for posts. self._resolver = director.resolver