def run(self): """Run the generators and return""" context = self.settings.copy() context['filenames'] = {} # share the dict between all the generators context['localsiteurl'] = self.settings.get('SITEURL') # share generators = [ cls( context, self.settings, self.path, self.theme, self.output_path, self.markup, ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self)
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls(context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, "generate_context"): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path): clean_output_dir(self.output_path) writer = self.get_writer() # pass the assets environment to the generators if self.settings["WEBASSETS"]: generators[1].env.assets_environment = generators[0].assets_env generators[2].env.assets_environment = generators[0].assets_env for p in generators: if hasattr(p, "generate_output"): p.generate_output(writer) signals.finalized.send(self)
def test_clean_output_dir_is_file(self): test_directory = os.path.join(os.path.dirname(__file__), 'this_is_a_file') f = open(test_directory, 'w') f.write('') f.close() utils.clean_output_dir(test_directory) self.assertTrue(not os.path.exists(test_directory))
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls(context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) writer = self.get_writer() # pass the assets environment to the generators if self.settings['WEBASSETS']: generators[1].env.assets_environment = generators[0].assets_env generators[2].env.assets_environment = generators[0].assets_env for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self)
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls( context, self.settings, self.path, self.theme, self.output_path, self.markup, self.keep ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source if os.path.realpath(self.path).startswith(self.output_path) and not self.keep: clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer)
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls(context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer)
def test_clean_output_dir_is_file(self): retention = () test_directory = os.path.join(os.path.dirname(__file__), "this_is_a_file") f = open(test_directory, "w") f.write("") f.close() utils.clean_output_dir(test_directory, retention) self.assertFalse(os.path.exists(test_directory))
def test_clean_output_dir(self): test_directory = os.path.join(os.path.dirname(__file__), 'clean_output') content = os.path.join(os.path.dirname(__file__), 'content') shutil.copytree(content, test_directory) utils.clean_output_dir(test_directory) self.assertTrue(os.path.isdir(test_directory)) self.assertListEqual([], os.listdir(test_directory)) shutil.rmtree(test_directory)
def test_clean_output_dir(self): retention = () test_directory = os.path.join(os.path.dirname(__file__), "clean_output") content = os.path.join(os.path.dirname(__file__), "content") shutil.copytree(content, test_directory) utils.clean_output_dir(test_directory, retention) self.assertTrue(os.path.isdir(test_directory)) self.assertListEqual([], os.listdir(test_directory)) shutil.rmtree(test_directory)
def test_clean_output_dir_is_file(self): retention = () test_directory = os.path.join(os.path.dirname(__file__), 'this_is_a_file') f = open(test_directory, 'w') f.write('') f.close() utils.clean_output_dir(test_directory, retention) self.assertFalse(os.path.exists(test_directory))
def write_site(ctx): output_post_dir = os.path.join(SITE_DIR, 'posts') clean_output_dir(SITE_DIR, []) copy(SITE_ROOT_DIR, SITE_DIR) mkdir_p(output_post_dir) write_post_to_dir = functools.partial(write_post, output_post_dir, ctx) posts = itertools.imap(write_post_to_dir, get_posts()) posts = itertools.ifilter(lambda x: x.get('published'), posts) posts = sorted(list(posts), key=lambda x: x.get('date'), reverse=True) write_index(SITE_DIR, ctx, posts) write_feed(SITE_DIR, ctx, posts)
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() # Share these among all the generators and content objects: context['filenames'] = {} # maps source path to Content object or None context['localsiteurl'] = self.settings['SITEURL'] generators = [ cls( context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path, ) for cls in self.get_generator_classes() ] # erase the directory if it is not the source and if that's # explicitly asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path, self.output_retention) for p in generators: if hasattr(p, 'generate_context'): p.generate_context() signals.all_generators_finalized.send(generators) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) print('Done: Processed {} article(s), {} draft(s) and {} page(s) in ' \ '{:.2f} seconds.'.format( len(articles_generator.articles) + len(articles_generator.translations), len(articles_generator.drafts) + \ len(articles_generator.drafts_translations), len(pages_generator.pages) + len(pages_generator.translations), time.time() - start_time))
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() context['filenames'] = {} # share the dict between all the generators context['localsiteurl'] = self.settings.get('SITEURL') # share generators = [ cls( context, self.settings, self.path, self.theme, self.output_path, self.markup, ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) print('Done: Processed {} articles and {} pages in {:.2f} seconds.'. format( len(articles_generator.articles) + len(articles_generator.translations), len(pages_generator.pages) + len(pages_generator.translations), time.time() - start_time))
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info('Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) path = os.path.dirname(os.path.realpath(__file__)) env = Environment(loader=FileSystemLoader(path)) template = env.get_template('permalink.html') settings = self.settings.copy() if settings.get('RELATIVE_URLS', False): settings['SITEURL'] = path_to_url( get_relative_path( os.path.join(settings['PERMALINK_PATH'], 'dummy.html'))) with open(os.path.join(self.permalink_output_path, '.htaccess'), 'w') as redirect_file: for content in itertools.chain(self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): relative_permalink_path = os.path.join( self.settings['PERMALINK_PATH'], permalink_id) + '.html' permalink_path = os.path.join(self.output_path, relative_permalink_path) localcontext = settings.copy() localcontext['content'] = content localcontext['page'] = content with open(permalink_path, 'wb') as f: f.write(template.render(**localcontext)) signals.content_written.send(permalink_path, context=localcontext) redirect_file.write( 'Redirect permanent "/{relative_permalink_path}" "{url}"\n' .format( url=article_url(content), permalink_id=permalink_id, relative_permalink_path=relative_permalink_path, ))
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() context['filenames'] = {} # share the dict between all the generators context['localsiteurl'] = self.settings['SITEURL'] # share generators = [ cls( context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path, markup=self.markup, ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) print('Done: Processed {} articles and {} pages in {:.2f} seconds.'.format( len(articles_generator.articles) + len(articles_generator.translations), len(pages_generator.pages) + len(pages_generator.translations), time.time() - start_time))
def generate_output(self, writer=None): """ Generate redirect files """ logger.info("Generating permalink files in %r", self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) for content in itertools.chain(self.context["articles"], self.context["pages"]): for permalink_id in content.get_permalink_ids_iter(): permalink_path = ( os.path.join(self.permalink_output_path, permalink_id) + ".html" ) redirect_string = REDIRECT_STRING.format( url=article_url(content), title=content.title ) open(permalink_path, "w").write(redirect_string)
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls( context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) # pass the assets environment to the generators if self.settings['WEBASSETS']: generators[1].env.assets_environment = generators[0].assets_env generators[2].env.assets_environment = generators[0].assets_env flavours = self.settings.get('OUTPUT_FLAVOURS', (None,)) for flavour in flavours: if flavour != None: context.update(OUTPUT_FLAVOUR=str(flavour)) writer = self.get_writer(flavour) for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer)
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info('Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) for content in itertools.chain(self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): permalink_path = os.path.join(self.permalink_output_path, permalink_id) + '.html' redirect_string = REDIRECT_STRING.format( url=article_url(content), title=content.title) open(permalink_path, 'w').write(redirect_string)
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info( 'Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) for content in itertools.chain( self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): permalink_path = os.path.join( self.permalink_output_path, permalink_id) + '.html' redirect_string = REDIRECT_STRING.format( url=article_url(content), title=content.title) open(permalink_path, 'w').write(redirect_string)
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() context["filenames"] = {} # share the dict between all the generators context["localsiteurl"] = self.settings["SITEURL"] # share generators = [ cls(context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path) for cls in self.get_generator_classes() ] # erase the directory if it is not the source and if that's # explicitely asked if self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path): clean_output_dir(self.output_path, self.output_retention) for p in generators: if hasattr(p, "generate_context"): p.generate_context() writer = self.get_writer() for p in generators: if hasattr(p, "generate_output"): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) print( "Done: Processed {} article(s), {} draft(s) and {} page(s) in " "{:.2f} seconds.".format( len(articles_generator.articles) + len(articles_generator.translations), len(articles_generator.drafts) + len(articles_generator.drafts_translations), len(pages_generator.pages) + len(pages_generator.translations), time.time() - start_time, ) )
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls(context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, "generate_context"): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if self.delete_outputdir and os.path.realpath(self.path).startswith(self.output_path): clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, "generate_output"): p.generate_output(writer)
def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls(context, self.settings, self.path, self.theme, self.output_path, self.markup, self.delete_outputdir) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source and if that's # explicitely asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path) # pass the assets environment to the generators if self.settings['WEBASSETS']: generators[1].env.assets_environment = generators[0].assets_env generators[2].env.assets_environment = generators[0].assets_env flavours = self.settings.get('OUTPUT_FLAVOURS', (None, )) for flavour in flavours: if flavour != None: context.update(OUTPUT_FLAVOUR=str(flavour)) writer = self.get_writer(flavour) for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer)
def test_clean_output_dir_not_there(self): retention = () test_directory = os.path.join(os.path.dirname(__file__), 'does_not_exist') utils.clean_output_dir(test_directory, retention) self.assertFalse(os.path.exists(test_directory))
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() # Share these among all the generators and content objects # They map source paths to Content objects or None context['generated_content'] = {} context['static_links'] = set() context['static_content'] = {} context['localsiteurl'] = self.settings['SITEURL'] generators = [ cls( context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path, ) for cls in self.get_generator_classes() ] # erase the directory if it is not the source and if that's # explicitly asked if (self.delete_outputdir and not os.path.realpath(self.path).startswith(self.output_path)): clean_output_dir(self.output_path, self.output_retention) for p in generators: if hasattr(p, 'generate_context'): p.generate_context() for p in generators: if hasattr(p, 'refresh_metadata_intersite_links'): p.refresh_metadata_intersite_links() signals.all_generators_finalized.send(generators) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) pluralized_articles = maybe_pluralize( (len(articles_generator.articles) + len(articles_generator.translations)), 'article', 'articles') pluralized_drafts = maybe_pluralize( (len(articles_generator.drafts) + len(articles_generator.drafts_translations)), 'draft', 'drafts') pluralized_pages = maybe_pluralize( (len(pages_generator.pages) + len(pages_generator.translations)), 'page', 'pages') pluralized_hidden_pages = maybe_pluralize( (len(pages_generator.hidden_pages) + len(pages_generator.hidden_translations)), 'hidden page', 'hidden pages') pluralized_draft_pages = maybe_pluralize( (len(pages_generator.draft_pages) + len(pages_generator.draft_translations)), 'draft page', 'draft pages') print('Done: Processed {}, {}, {}, {} and {} in {:.2f} seconds.' .format( pluralized_articles, pluralized_drafts, pluralized_pages, pluralized_hidden_pages, pluralized_draft_pages, time.time() - start_time))
def test_clean_output_dir_not_there(self): test_directory = os.path.join(os.path.dirname(__file__), 'does_not_exist') utils.clean_output_dir(test_directory) self.assertTrue(not os.path.exists(test_directory))
def run(self): """Run the generators and return""" start_time = time.time() context = self.settings.copy() # Share these among all the generators and content objects # They map source paths to Content objects or None context['generated_content'] = {} context['static_links'] = set() context['static_content'] = {} context['localsiteurl'] = self.settings['SITEURL'] generators = [ cls( context=context, settings=self.settings, path=self.path, theme=self.theme, output_path=self.output_path, ) for cls in self._get_generator_classes() ] # Delete the output directory if (1) the appropriate setting is True # and (2) that directory is not the parent of the source directory if (self.delete_outputdir and os.path.commonpath( [os.path.realpath(self.output_path)]) != os.path.commonpath([ os.path.realpath(self.output_path), os.path.realpath(self.path) ])): clean_output_dir(self.output_path, self.output_retention) for p in generators: if hasattr(p, 'generate_context'): p.generate_context() for p in generators: if hasattr(p, 'refresh_metadata_intersite_links'): p.refresh_metadata_intersite_links() signals.all_generators_finalized.send(generators) writer = self._get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) signals.finalized.send(self) articles_generator = next(g for g in generators if isinstance(g, ArticlesGenerator)) pages_generator = next(g for g in generators if isinstance(g, PagesGenerator)) pluralized_articles = maybe_pluralize( (len(articles_generator.articles) + len(articles_generator.translations)), 'article', 'articles') pluralized_drafts = maybe_pluralize( (len(articles_generator.drafts) + len(articles_generator.drafts_translations)), 'draft', 'drafts') pluralized_hidden_articles = maybe_pluralize( (len(articles_generator.hidden_articles) + len(articles_generator.hidden_translations)), 'hidden article', 'hidden articles') pluralized_pages = maybe_pluralize( (len(pages_generator.pages) + len(pages_generator.translations)), 'page', 'pages') pluralized_hidden_pages = maybe_pluralize( (len(pages_generator.hidden_pages) + len(pages_generator.hidden_translations)), 'hidden page', 'hidden pages') pluralized_draft_pages = maybe_pluralize( (len(pages_generator.draft_pages) + len(pages_generator.draft_translations)), 'draft page', 'draft pages') console.print( 'Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.'. format(pluralized_articles, pluralized_drafts, pluralized_hidden_articles, pluralized_pages, pluralized_hidden_pages, pluralized_draft_pages, time.time() - start_time))