def test_get_relative_path(self): samples = (('/test/test', '../../.'), ('/test/test/', '../../../.'), ('/', '../.')) for value, expected in samples: self.assertEquals(utils.get_relative_path(value), expected)
def write_feed(self, elements, context, filename=None, feed_type='atom'): """Generate a feed with the list of articles provided Return the feed. If no output_path or filename is specified, just return the feed object. :param articles: the articles to put on the feed. :param context: the context to get the feed metadata. :param output_path: where to output the file. :param filename: the filename to output. :param feed_type: the feed type to use (atom or rss) """ self.site_url = context.get('SITEURL', get_relative_path(filename)) self.feed_url= '%s/%s' % (self.site_url, filename) feed = self._create_new_feed(feed_type, context) for item in elements: self._add_item_to_the_feed(feed, item) if filename: complete_path = os.path.join(self.output_path, filename) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass fp = open(complete_path, 'w') feed.write(fp, 'utf-8') print u' [ok] writing %s' % complete_path fp.close() return feed
def write_file(self, name, template, context, relative_urls=True, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param **kwargs: additional variables to pass to the templates """ localcontext = context.copy() if relative_urls: localcontext['SITEURL'] = get_relative_path(name) localcontext.update(kwargs) self.update_context_contents(name, localcontext) output = template.render(localcontext) filename = os.sep.join((self.output_path, name)) try: os.makedirs(os.path.dirname(filename)) except Exception: pass with open(filename, 'w', encoding='utf-8') as f: f.write(output) print u' [ok] writing %s' % filename
def _update_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile(r'<\s*[^\>]*href\s*=(^!#)\s*(["\'])(.*?)\1') srcs = re.compile(r'<\s*[^\>]*src\s*=\s*(["\'])(.*?)\1') matches = hrefs.findall(content) matches.extend(srcs.findall(content)) relative_paths = [] for found in matches: found = found[1] if found not in relative_paths: relative_paths.append(found) for relative_path in relative_paths: if not ":" in relative_path: # we don't want to rewrite protocols dest_path = os.sep.join( (get_relative_path(name), "static", relative_path)) content = content.replace(relative_path, dest_path) return content
def _update_object_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile(r'<\s*[^\>]*href\s*=\s*(["\'])(.*?)\1') srcs = re.compile(r'<\s*[^\>]*src\s*=\s*(["\'])(.*?)\1') matches = hrefs.findall(content) matches.extend(srcs.findall(content)) relative_paths = [] for found in matches: found = found[1] if found not in relative_paths: relative_paths.append(found) for relative_path in relative_paths: if not "://" in relative_path: # we don't want to rewrite protocols dest_path = os.sep.join((get_relative_path(name), "static", relative_path)) content = content.replace(relative_path, dest_path) return content
def replacer(m): relative_path = m.group('path') dest_path = os.path.normpath( os.sep.join( (get_relative_path(name), "static", relative_path))) return m.group('markup') + m.group( 'quote') + dest_path + m.group('quote')
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates, which need to be updated. """ def _update_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile(r""" (?P<markup><\s*[^\>]* # match tag with src and href attr (?:href|src)\s*=\s* ) (?P<quote>["\']) # require value to be quoted (?![#?]) # don't match fragment or query URLs (?![a-z]+:) # don't match protocol URLS (?P<path>.*?) # the url value \2""", re.X) def replacer(m): relative_path = m.group('path') replace_with = self.settings.get('CONTENT_STATIC_LOC', 'static') dest_path = os.path.normpath( os.sep.join((get_relative_path(name), replace_with, relative_path))) return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote') return hrefs.sub(replacer, content) if context is None: return if hasattr(context, 'values'): context = context.values() for item in context: # run recursively on iterables if hasattr(item, '__iter__'): self.update_context_contents(name, item) # if it is a content, patch it elif hasattr(item, '_content'): relative_path = get_relative_path(name) paths = self.reminder.setdefault(item, []) if relative_path not in paths: paths.append(relative_path) setattr(item, "_get_content", partial(_update_content, name, item))
def write_feed(self, elements, context, path=None, url=None, feed_type='atom', override_output=False, feed_title=None): """Generate a feed with the list of articles provided Return the feed. If no path or output_path is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param path: the path to output. :param url: the publicly visible feed URL; if None, path is used instead :param feed_type: the feed type to use (atom or rss) :param override_output: boolean telling if we can override previous output with the same name (and if next files written with the same name should be skipped to keep that one) :param feed_title: the title of the feed.o """ if not is_selected_for_writing(self.settings, path): return self.site_url = context.get('SITEURL', path_to_url(get_relative_path(path))) self.feed_domain = context.get('FEED_DOMAIN') self.feed_url = self.urljoiner(self.feed_domain, url if url else path) feed = self._create_new_feed(feed_type, feed_title, context) max_items = len(elements) if self.settings['FEED_MAX_ITEMS']: max_items = min(self.settings['FEED_MAX_ITEMS'], max_items) for i in range(max_items): self._add_item_to_the_feed(feed, elements[i]) signals.feed_generated.send(context, feed=feed) if path: complete_path = sanitised_join(self.output_path, path) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass encoding = 'utf-8' if six.PY3 else None with self._open_w(complete_path, encoding, override_output) as fp: feed.write(fp, 'utf-8') logger.info('Writing %s', complete_path) signals.feed_written.send(complete_path, context=context, feed=feed) return feed
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates, which need to be updated. """ def _update_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile( r""" (?P<markup><\s*[^\>]* # match tag with src and href attr (?:href|src)\s*=\s* ) (?P<quote>["\']) # require value to be quoted (?![#?]) # don't match fragment or query URLs (?![a-z]+:) # don't match protocol URLS (?P<path>.*?) # the url value \2""", re.X) def replacer(m): relative_path = m.group('path') dest_path = os.path.normpath( os.sep.join( (get_relative_path(name), "static", relative_path))) return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote') return hrefs.sub(replacer, content) if context is None: return if hasattr(context, 'values'): context = context.values() for item in context: # run recursively on iterables if hasattr(item, '__iter__'): self.update_context_contents(name, item) # if it is a content, patch it elif hasattr(item, '_content'): relative_path = get_relative_path(name) paths = self.reminder.setdefault(item, []) if relative_path not in paths: paths.append(relative_path) setattr(item, "_get_content", partial(_update_content, name, item))
def replacer(m): relative_path = m.group('path') dest_path = os.path.normpath( os.sep.join((get_relative_path(name), "static", relative_path))) return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote')
def write_file(self, name, template, context, relative_urls=True, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" output = template.render(localcontext) filename = os.sep.join((output_path, name)) try: os.makedirs(os.path.dirname(filename)) except Exception: pass with open(filename, "w", encoding="utf-8") as f: f.write(output) print u" [ok] writing %s" % filename localcontext = context.copy() if relative_urls: localcontext["SITEURL"] = get_relative_path(name) localcontext.update(kwargs) self.update_context_contents(name, localcontext) # check paginated paginated = paginated or {} if self.settings.get("WITH_PAGINATION") and paginated: # pagination needed, init paginators paginators = {} for key in paginated.iterkeys(): object_list = paginated[key] paginators[key] = Paginator( object_list, self.settings.get("DEFAULT_PAGINATION"), self.settings.get("DEFAULT_ORPHANS") ) # generated pages, and write for page_num in range(paginators.values()[0].num_pages): paginated_localcontext = localcontext.copy() paginated_name = name for key in paginators.iterkeys(): paginator = paginators[key] page = paginator.page(page_num + 1) paginated_localcontext.update({"%s_paginator" % key: paginator, "%s_page" % key: page}) if page_num > 0: ext = "." + paginated_name.rsplit(".")[-1] paginated_name = paginated_name.replace(ext, "%s%s" % (page_num + 1, ext)) _write_file(template, paginated_localcontext, self.output_path, paginated_name) else: # no pagination _write_file(template, localcontext, self.output_path, name)
def replacer(m): relative_path = m.group('path') replace_with = self.settings.get('CONTENT_STATIC_LOC', 'static') dest_path = os.path.normpath( os.sep.join((get_relative_path(name), replace_with, relative_path))) return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote')
def test_get_relative_path(self): samples = ((os.path.join('test', 'test.html'), os.pardir), (os.path.join('test', 'test', 'test.html'), os.path.join(os.pardir, os.pardir)), ('test.html', os.curdir)) for value, expected in samples: self.assertEquals(utils.get_relative_path(value), expected)
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates, which need to be updated. """ def _update_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile(r'<\s*[^\>]*href\s*=(^!#)\s*(["\'])(.*?)\1') srcs = re.compile(r'<\s*[^\>]*src\s*=\s*(["\'])(.*?)\1') matches = hrefs.findall(content) matches.extend(srcs.findall(content)) relative_paths = [] for found in matches: found = found[1] if found not in relative_paths: relative_paths.append(found) for relative_path in relative_paths: if not ":" in relative_path: # we don't want to rewrite protocols dest_path = os.sep.join( (get_relative_path(name), "static", relative_path)) content = content.replace(relative_path, dest_path) return content if context is None: return if hasattr(context, 'values'): context = context.values() for item in context: # run recursively on iterables if hasattr(item, '__iter__'): self.update_context_contents(name, item) # if it is a content, patch it elif hasattr(item, '_content'): relative_path = get_relative_path(name) paths = self.reminder.setdefault(item, []) if relative_path not in paths: paths.append(relative_path) setattr(item, "_get_content", partial(_update_content, name, item))
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates, which need to be updated. """ def _update_content(name, input): """Change all the relatives paths of the input content to relatives paths suitable fot the ouput content :param name: path of the output. :param input: input resource that will be passed to the templates. """ content = input._content hrefs = re.compile(r'<\s*[^\>]*href\s*=(^!#)\s*(["\'])(.*?)\1') srcs = re.compile(r'<\s*[^\>]*src\s*=\s*(["\'])(.*?)\1') matches = hrefs.findall(content) matches.extend(srcs.findall(content)) relative_paths = [] for found in matches: found = found[1] if found not in relative_paths: relative_paths.append(found) for relative_path in relative_paths: if not ":" in relative_path: # we don't want to rewrite protocols dest_path = os.sep.join((get_relative_path(name), "static", relative_path)) content = content.replace(relative_path, dest_path) return content if context is None: return if hasattr(context, 'values'): context = context.values() for item in context: # run recursively on iterables if hasattr(item, '__iter__'): self.update_context_contents(name, item) # if it is a content, patch it elif hasattr(item, '_content'): relative_path = get_relative_path(name) paths = self.reminder.setdefault(item, []) if relative_path not in paths: paths.append(relative_path) setattr(item, "_get_content", partial(_update_content, name, item))
def _get_localcontext(context, name, kwargs, relative_urls): localcontext = context.copy() localcontext['localsiteurl'] = localcontext.get('localsiteurl', None) if relative_urls: relative_url = path_to_url(get_relative_path(name)) localcontext['SITEURL'] = relative_url localcontext['localsiteurl'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) return localcontext
def write_feed(self, elements, context, path=None, feed_type='atom', feed_title=None): """Generate a feed with the list of articles provided Return the feed. If no path or output_path is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param path: the path to output. :param feed_type: the feed type to use (atom or rss) :param feed_title: the title of the feed. """ if not is_selected_for_writing(self.settings, path): return old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: self.site_url = context.get('SITEURL', path_to_url(get_relative_path(path))) self.feed_domain = context.get('FEED_DOMAIN') self.feed_url = '{}/{}'.format(self.feed_domain, path) feed = self._create_new_feed(feed_type, feed_title, context) max_items = len(elements) if self.settings['FEED_MAX_ITEMS']: max_items = min(self.settings['FEED_MAX_ITEMS'], max_items) for i in range(max_items): self._add_item_to_the_feed(feed, elements[i]) if path: complete_path = os.path.join(self.output_path, path) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass encoding = 'utf-8' if six.PY3 else None with self._open_w(complete_path, encoding) as fp: feed.write(fp, 'utf-8') logger.info('Writing %s', complete_path) signals.feed_written.send(complete_path, context=context, feed=feed) return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def _get_localcontext(context, name, kwargs, relative_urls): localcontext = context.copy() localcontext['localsiteurl'] = localcontext.get( 'localsiteurl', None) if relative_urls: relative_url = path_to_url(get_relative_path(name)) localcontext['SITEURL'] = relative_url localcontext['localsiteurl'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) return localcontext
def replacer(m): relative_path = m.group('path') dest_path = os.path.normpath( os.sep.join( (get_relative_path(name), "static", relative_path))) # On Windows, make sure we end up with Unix-like paths. if os.name == 'nt': dest_path = dest_path.replace('\\', '/') return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote')
def replacer(m): relative_path = m.group('path') dest_path = os.path.normpath( os.sep.join((get_relative_path(name), "static", relative_path))) # On Windows, make sure we end up with Unix-like paths. if os.name == 'nt': dest_path = dest_path.replace('\\', '/') return m.group('markup') + m.group('quote') + dest_path \ + m.group('quote')
def __init__(self, article_generator): self.articles = article_generator.articles self.settings = article_generator.settings self.context = article_generator.context self.generator = article_generator self.path = self.settings.get('JSON_FEED') self.site_url = self.context.get('SITEURL', path_to_url(get_relative_path(self.path))) self.feed_domain = self.context.get('FEED_DOMAIN')
def __init__(self, article_generator): self.articles = article_generator.articles self.settings = article_generator.settings self.context = article_generator.context self.generator = article_generator self.path = self.settings.get('JSON_FEED') self.site_url = self.context.get( 'SITEURL', path_to_url(get_relative_path(self.path))) self.feed_domain = self.context.get('FEED_DOMAIN')
def write_feed(self, elements, context, path=None, url=None, feed_type='atom', override_output=False, feed_title=None): """Generate a feed with the list of articles provided Return the feed. If no path or output_path is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param path: the path to output. :param url: the publicly visible feed URL; if None, path is used instead :param feed_type: the feed type to use (atom or rss) :param override_output: boolean telling if we can override previous output with the same name (and if next files written with the same name should be skipped to keep that one) :param feed_title: the title of the feed.o """ if not is_selected_for_writing(self.settings, path): return self.site_url = context.get( 'SITEURL', path_to_url(get_relative_path(path))) self.feed_domain = context.get('FEED_DOMAIN') self.feed_url = self.urljoiner(self.feed_domain, url if url else path) feed = self._create_new_feed(feed_type, feed_title, context) max_items = len(elements) if self.settings['FEED_MAX_ITEMS']: max_items = min(self.settings['FEED_MAX_ITEMS'], max_items) for i in range(max_items): self._add_item_to_the_feed(feed, elements[i]) signals.feed_generated.send(context, feed=feed) if path: complete_path = sanitised_join(self.output_path, path) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass encoding = 'utf-8' if six.PY3 else None with self._open_w(complete_path, encoding, override_output) as fp: feed.write(fp, 'utf-8') logger.info('Writing %s', complete_path) signals.feed_written.send( complete_path, context=context, feed=feed) return feed
def test_get_relative_path(self): samples = ((os.path.join('test', 'test.html'), os.pardir), (os.path.join('test', 'test', 'test.html'), os.path.join(os.pardir, os.pardir)), ('test.html', os.curdir), (os.path.join('/test', 'test.html'), os.pardir), (os.path.join('/test', 'test', 'test.html'), os.path.join(os.pardir, os.pardir)), ('/test.html', os.curdir),) for value, expected in samples: self.assertEqual(utils.get_relative_path(value), expected)
def test_get_relative_path(self): samples = ( (os.path.join("test", "test.html"), os.pardir), (os.path.join("test", "test", "test.html"), os.path.join(os.pardir, os.pardir)), ("test.html", os.curdir), (os.path.join("/test", "test.html"), os.pardir), (os.path.join("/test", "test", "test.html"), os.path.join(os.pardir, os.pardir)), ("/test.html", os.curdir), ) for value, expected in samples: self.assertEqual(utils.get_relative_path(value), expected)
def write_feed(self, elements, context, path=None, feed_type='atom', feed_title=None): """Generate a feed with the list of articles provided Return the feed. If no path or output_path is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param path: the path to output. :param feed_type: the feed type to use (atom or rss) :param feed_title: the title of the feed. """ if not is_selected_for_writing(self.settings, path): return old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: self.site_url = context.get( 'SITEURL', path_to_url(get_relative_path(path))) self.feed_domain = context.get('FEED_DOMAIN') self.feed_url = '{}/{}'.format(self.feed_domain, path) feed = self._create_new_feed(feed_type, feed_title, context) max_items = len(elements) if self.settings['FEED_MAX_ITEMS']: max_items = min(self.settings['FEED_MAX_ITEMS'], max_items) for i in range(max_items): self._add_item_to_the_feed(feed, elements[i]) if path: complete_path = os.path.join(self.output_path, path) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass encoding = 'utf-8' if six.PY3 else None with self._open_w(complete_path, encoding) as fp: feed.write(fp, 'utf-8') logger.info('Writing %s', complete_path) signals.feed_written.send(complete_path, context=context, feed=feed) return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def process_content(article): """ Get a list of PDF, PS, and EPS files for which PNG previews must be generated. Also make the substitutions in article content so that the PNG will be used as a preview and provide a link to the original. """ try: soup = BeautifulSoup(article._content, 'lxml') except FeatureNotFound: soup = BeautifulSoup(article._content, 'html.parser') for img in soup.find_all('img', src=FORMAT_RE): src = re.sub(article.settings['INTRASITE_LINK_REGEX'], '', img['src'].strip()) if src.startswith(('http://', 'https://', 'ftp://')): continue if src.startswith('/'): src = src[1:] else: # relative to the source path of this content src = article.get_relative_source_path( os.path.join(article.relative_dir, src)) if src not in article._context['static_content']: unquoted_path = src.replace('%20', ' ') if unquoted_path in article._context['static_content']: src = unquoted_path linked_content = article._context['static_content'].get(src) if not linked_content: continue link = img.wrap(soup.new_tag("a")) link['href'] = img['src'] png_save_as = os.path.join(preview_dir, linked_content.save_as + '.png') pdf_imgs[linked_content.source_path] = png_save_as siteurl = article.get_siteurl() if article.settings['RELATIVE_URLS']: siteurl = path_to_url(get_relative_path(article.save_as)) png_url = '/'.join((siteurl, preview_dir, linked_content.url + '.png')) png_url = png_url.replace('\\', '/') img['src'] = png_url class_list = 'pdf-img' if 'class' in img.attrs: img['class'].append(class_list) else: img['class'] = [class_list] article._content = unicode(soup)
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info('Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) path = os.path.dirname(os.path.realpath(__file__)) env = Environment(loader=FileSystemLoader(path)) template = env.get_template('permalink.html') settings = self.settings.copy() if settings.get('RELATIVE_URLS', False): settings['SITEURL'] = path_to_url( get_relative_path( os.path.join(settings['PERMALINK_PATH'], 'dummy.html'))) with open(os.path.join(self.permalink_output_path, '.htaccess'), 'w') as redirect_file: for content in itertools.chain(self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): relative_permalink_path = os.path.join( self.settings['PERMALINK_PATH'], permalink_id) + '.html' permalink_path = os.path.join(self.output_path, relative_permalink_path) localcontext = settings.copy() localcontext['content'] = content localcontext['page'] = content with open(permalink_path, 'wb') as f: f.write(template.render(**localcontext)) signals.content_written.send(permalink_path, context=localcontext) redirect_file.write( 'Redirect permanent "/{relative_permalink_path}" "{url}"\n' .format( url=article_url(content), permalink_id=permalink_id, relative_permalink_path=relative_permalink_path, ))
def process_content(article): """ Get a list of PDF, PS, and EPS files for which PNG previews must be generated. Also make the substitutions in article content so that the PNG will be used as a preview and provide a link to the original. """ try: soup = BeautifulSoup(article._content, "lxml") except FeatureNotFound: soup = BeautifulSoup(article._content, "html.parser") for img in soup.find_all("img", src=FORMAT_RE): src = re.sub(article.settings["INTRASITE_LINK_REGEX"], "", img["src"].strip()) if src.startswith(("http://", "https://", "ftp://")): continue if src.startswith("/"): src = src[1:] else: # relative to the source path of this content src = article.get_relative_source_path( os.path.join(article.relative_dir, src)) if src not in article._context["filenames"]: unquoted_path = src.replace("%20", " ") if unquoted_path in article._context["filenames"]: src = unquoted_path linked_content = article._context["filenames"].get(src) if not linked_content: continue link = img.wrap(soup.new_tag("a")) link["href"] = img["src"] png_save_as = os.path.join(preview_dir, linked_content.save_as + ".png") pdf_imgs[linked_content.source_path] = png_save_as siteurl = article.get_siteurl() if article.settings["RELATIVE_URLS"]: siteurl = path_to_url(get_relative_path(article.save_as)) png_url = "/".join((siteurl, preview_dir, linked_content.url + ".png")) png_url = png_url.replace("\\", "/") img["src"] = png_url article._content = unicode(soup)
def write_feed(self, elements, context, path=None, feed_type="atom"): """Generate a feed with the list of articles provided Return the feed. If no path or output_path is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param path: the path to output. :param feed_type: the feed type to use (atom or rss) """ old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str("C")) try: self.site_url = context.get("SITEURL", path_to_url(get_relative_path(path))) self.feed_domain = context.get("FEED_DOMAIN") self.feed_url = "{}/{}".format(self.feed_domain, path) feed = self._create_new_feed(feed_type, context) max_items = len(elements) if self.settings["FEED_MAX_ITEMS"]: max_items = min(self.settings["FEED_MAX_ITEMS"], max_items) for i in range(max_items): self._add_item_to_the_feed(feed, elements[i]) if path: complete_path = os.path.join(self.output_path, path) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass encoding = "utf-8" if six.PY3 else None with self._open_w(complete_path, encoding) as fp: feed.write(fp, "utf-8") logger.info("writing %s" % complete_path) return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def write_feed(self, elements, context, filename=None, feed_type='atom'): """Generate a feed with the list of articles provided Return the feed. If no output_path or filename is specified, just return the feed object. :param elements: the articles to put on the feed. :param context: the context to get the feed metadata. :param filename: the filename to output. :param feed_type: the feed type to use (atom or rss) """ old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, 'C') try: self.site_url = context.get('SITEURL', get_relative_path(filename)) self.feed_domain = context.get('FEED_DOMAIN') self.feed_url = '%s/%s' % (self.feed_domain, filename) feed = self._create_new_feed(feed_type, context) max_items = len(elements) if self.settings['FEED_MAX_ITEMS']: max_items = min(self.settings['FEED_MAX_ITEMS'], max_items) for i in xrange(max_items): self._add_item_to_the_feed(feed, elements[i]) if filename: complete_path = os.path.join(self.output_path, filename) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass fp = open(complete_path, 'w') feed.write(fp, 'utf-8') logger.info('writing %s' % complete_path) fp.close() return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates. """ if context is None: return None if type(context) == tuple: context = list(context) if type(context) == dict: context = list(context.values()) for i in xrange(len(context)): if type(context[i]) == tuple or type(context[i]) == list: context[i] = self.update_context_contents(name, context[i]) elif type(context[i]) == dict: context[i] = self.update_context_contents( name, context[i].values()) elif hasattr(context[i], '_content'): relative_path = get_relative_path(name) item = context[i] if item in self.reminder: if relative_path not in self.reminder[item]: l = self.reminder[item] l.append(relative_path) self.inject_update_method(name, item) else: l = list(relative_path) self.reminder[item] = l self.inject_update_method(name, item) return context
def process_content(article): """ Get a list of PDF, PS, and EPS files for which PNG previews must be generated. Also make the substitutions in article content so that the PNG will be used as a preview and provide a link to the original. """ try: soup = BeautifulSoup(article._content,'lxml') except FeatureNotFound: soup = BeautifulSoup(article._content,'html.parser') for img in soup.find_all('img',src=FORMAT_RE): src = re.sub(article.settings['INTRASITE_LINK_REGEX'],'',img['src'].strip()) if src.startswith(('http://','https://','ftp://')): continue if src.startswith('/'): src = src[1:] else: # relative to the source path of this content src = article.get_relative_source_path(os.path.join(article.relative_dir, src)) if src not in article._context['filenames']: unquoted_path = src.replace('%20', ' ') if unquoted_path in article._context['filenames']: src = unquoted_path linked_content = article._context['filenames'].get(src) if not linked_content: continue link = img.wrap(soup.new_tag("a")) link['href'] = img['src'] png_save_as = os.path.join(preview_dir, linked_content.save_as + '.png') pdf_imgs[linked_content.source_path] = png_save_as siteurl = article.get_siteurl() if article.settings['RELATIVE_URLS']: siteurl = path_to_url(get_relative_path(article.save_as)) png_url = '/'.join((siteurl, preview_dir, linked_content.url + '.png')) png_url = png_url.replace('\\', '/') img['src'] = png_url article._content = unicode(soup)
def update_context_contents(self, name, context): """Recursively run the context to find elements (articles, pages, etc) whose content getter needs to be modified in order to deal with relative paths. :param name: name of the file to output. :param context: dict that will be passed to the templates. """ if context is None: return None if type(context) == tuple: context = list(context) if type(context) == dict: context = list(context.values()) for i in xrange(len(context)): if type(context[i]) == tuple or type(context[i]) == list: context[i] = self.update_context_contents(name, context[i]) elif type(context[i]) == dict: context[i] = self.update_context_contents(name, context[i].values()) elif hasattr(context[i], "_content"): relative_path = get_relative_path(name) item = context[i] if item in self.reminder: if relative_path not in self.reminder[item]: l = self.reminder[item] l.append(relative_path) self.inject_update_method(name, item) else: l = list(relative_path) self.reminder[item] = l self.inject_update_method(name, item) return context
def write_feed(self, elements, context, filename=None, feed_type='atom'): """Generate a feed with the list of articles provided Return the feed. If no output_path or filename is specified, just return the feed object. :param articles: the articles to put on the feed. :param context: the context to get the feed metadata. :param output_path: where to output the file. :param filename: the filename to output. :param feed_type: the feed type to use (atom or rss) """ old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, 'C') try: self.site_url = context.get('SITEURL', get_relative_path(filename)) self.feed_url = '%s/%s' % (self.site_url, filename) feed = self._create_new_feed(feed_type, context) for item in elements: self._add_item_to_the_feed(feed, item) if filename: complete_path = os.path.join(self.output_path, filename) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass fp = open(complete_path, 'w') feed.write(fp, 'utf-8') print u' [ok] writing %s' % complete_path fp.close() return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def write_feed(self, elements, context, filename=None, feed_type="atom"): """Generate a feed with the list of articles provided Return the feed. If no output_path or filename is specified, just return the feed object. :param articles: the articles to put on the feed. :param context: the context to get the feed metadata. :param output_path: where to output the file. :param filename: the filename to output. :param feed_type: the feed type to use (atom or rss) """ old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, "C") try: self.site_url = context.get("SITEURL", get_relative_path(filename)) self.feed_url = "%s/%s" % (self.site_url, filename) feed = self._create_new_feed(feed_type, context) for item in elements: self._add_item_to_the_feed(feed, item) if filename: complete_path = os.path.join(self.output_path, filename) try: os.makedirs(os.path.dirname(complete_path)) except Exception: pass fp = open(complete_path, "w") feed.write(fp, "utf-8") print u" [ok] writing %s" % complete_path fp.close() return feed finally: locale.setlocale(locale.LC_ALL, old_locale)
def write_file(self, name, template, context, relative_urls=True, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ if name is False: return elif not name: # other stuff, just return for now return def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) path = os.path.join(output_path, name) try: os.makedirs(os.path.dirname(path)) except Exception: pass with open(path, 'w', encoding='utf-8') as f: f.write(output) logger.info('writing {}'.format(path)) localcontext = context.copy() if relative_urls: relative_path = get_relative_path(name) context['localsiteurl'] = relative_path localcontext['SITEURL'] = relative_path localcontext.update(kwargs) # check paginated paginated = paginated or {} if paginated: # pagination needed, init paginators paginators = {} for key in paginated.keys(): object_list = paginated[key] if self.settings.get('DEFAULT_PAGINATION'): paginators[key] = Paginator(object_list, self.settings.get('DEFAULT_PAGINATION'), self.settings.get('DEFAULT_ORPHANS')) else: paginators[key] = Paginator(object_list, len(object_list)) # generated pages, and write name_root, ext = os.path.splitext(name) for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] page = paginator.page(page_num + 1) paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page}) if page_num > 0: paginated_name = '%s%s%s' % ( name_root, page_num + 1, ext) else: paginated_name = name _write_file(template, paginated_localcontext, self.output_path, paginated_name) else: # no pagination _write_file(template, localcontext, self.output_path, name)
def write_file(self, name, template, context, relative_urls=True, paginated=None, urlwrapper=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ if name is False: return elif not name: # other stuff, just return for now return localcontext = context.copy() if relative_urls: relative_path = get_relative_path(name) context['localsiteurl'] = relative_path localcontext['SITEURL'] = relative_path localcontext.update(kwargs) # check paginated paginated = paginated or {} if paginated: # pagination needed, init paginators paginators = {} for key in paginated.keys(): object_list = paginated[key] if self.settings.get('DEFAULT_PAGINATION'): paginators[key] = Paginator(object_list, self.settings.get('DEFAULT_PAGINATION'), self.settings.get('DEFAULT_ORPHANS')) else: paginators[key] = Paginator(object_list, len(object_list)) # generated pages, and write name_root, ext = os.path.splitext(name) for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] page = paginator.page(page_num + 1) if page.has_next(): next_page = paginator.page(page_num + 2) page.next_url = urlwrapper.paginated_url(next_page) if self.settings['GENERATE_ASYNC_FILES']: page.next_async_url = urlwrapper.async_url(next_page) if page.has_previous(): previous_page = paginator.page(page_num) page.previous_url = urlwrapper.paginated_url(previous_page) if self.settings['GENERATE_ASYNC_FILES']: page.previous_async_url = \ urlwrapper.async_url(previous_page) paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page}) paginated_name = urlwrapper.paginated_save_as(paginated_localcontext['articles_page']) self.writer.write_file(template, paginated_localcontext, self.output_path, paginated_name) if self.settings['GENERATE_ASYNC_FILES']: self.writer.write_file(self.get_template(urlwrapper.async_template), paginated_localcontext, self.output_path, urlwrapper.async_save_as(paginated_localcontext['articles_page'])) else: # no pagination self.writer.write_file(template, localcontext, self.output_path, name)
def write_file(self, name, template, context, relative_urls=True, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, 'C') try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) filename = os.sep.join((output_path, name)) try: os.makedirs(os.path.dirname(filename)) except Exception: pass with open(filename, 'w', encoding='utf-8') as f: f.write(output) info(u'writing %s' % filename) localcontext = context.copy() if relative_urls: localcontext['SITEURL'] = get_relative_path(name) localcontext.update(kwargs) if relative_urls: self.update_context_contents(name, localcontext) # check paginated paginated = paginated or {} if paginated: # pagination needed, init paginators paginators = {} for key in paginated.iterkeys(): object_list = paginated[key] if self.settings.get('WITH_PAGINATION'): paginators[key] = Paginator( object_list, self.settings.get('DEFAULT_PAGINATION'), self.settings.get('DEFAULT_ORPHANS')) else: paginators[key] = Paginator(object_list, len(object_list), 0) # generated pages, and write for page_num in range(paginators.values()[0].num_pages): paginated_localcontext = localcontext.copy() paginated_name = name for key in paginators.iterkeys(): paginator = paginators[key] page = paginator.page(page_num + 1) paginated_localcontext.update({ '%s_paginator' % key: paginator, '%s_page' % key: page }) if page_num > 0: ext = '.' + paginated_name.rsplit('.')[-1] paginated_name = paginated_name.replace( ext, '%s%s' % (page_num + 1, ext)) _write_file(template, paginated_localcontext, self.output_path, paginated_name) else: # no pagination _write_file(template, localcontext, self.output_path, name)
def write_file(self, name, template, context, relative_urls=False, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ if name is False: return elif not name: # other stuff, just return for now return def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) path = os.path.join(output_path, name) try: os.makedirs(os.path.dirname(path)) except Exception: pass with self._open_w(path, 'utf-8') as f: f.write(output) logger.info('writing {}'.format(path)) localcontext = context.copy() if relative_urls: relative_url = path_to_url(get_relative_path(name)) context['localsiteurl'] = relative_url localcontext['SITEURL'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) # check paginated paginated = paginated or {} if paginated: name_root = os.path.splitext(name)[0] # pagination needed, init paginators paginators = {} for key in paginated.keys(): object_list = paginated[key] paginators[key] = Paginator( name_root, object_list, self.settings, ) # generated pages, and write for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] previous_page = paginator.page(page_num) \ if page_num > 0 else None page = paginator.page(page_num + 1) next_page = paginator.page(page_num + 2) \ if page_num + 1 < paginator.num_pages else None paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page, '%s_previous_page' % key: previous_page, '%s_next_page' % key: next_page}) _write_file(template, paginated_localcontext, self.output_path, page.save_as) else: # no pagination _write_file(template, localcontext, self.output_path, name)
def write_file(self, name, template, context, relative_urls=False, paginated=None, override_output=False, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param override_output: boolean telling if we can override previous output with the same name (and if next files written with the same name should be skipped to keep that one) :param **kwargs: additional variables to pass to the templates """ if name is False or name == "": return elif not name: # other stuff, just return for now return def _write_file(template, localcontext, output_path, name, override): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) path = os.path.join(output_path, name) try: os.makedirs(os.path.dirname(path)) except Exception: pass with self._open_w(path, 'utf-8', override=override) as f: f.write(output) logger.info('writing {}'.format(path)) # Send a signal to say we're writing a file with some specific # local context. signals.content_written.send(path, context=localcontext) localcontext = context.copy() if relative_urls: relative_url = path_to_url(get_relative_path(name)) context['localsiteurl'] = relative_url localcontext['SITEURL'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) # pagination if paginated: # pagination needed, init paginators paginators = {key: Paginator(name, val, self.settings) for key, val in paginated.items()} # generated pages, and write for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] previous_page = paginator.page(page_num) \ if page_num > 0 else None page = paginator.page(page_num + 1) next_page = paginator.page(page_num + 2) \ if page_num + 1 < paginator.num_pages else None paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page, '%s_previous_page' % key: previous_page, '%s_next_page' % key: next_page}) _write_file(template, paginated_localcontext, self.output_path, page.save_as, override_output) else: # no pagination _write_file(template, localcontext, self.output_path, name, override_output)
def _get_static_path(match_obj): path = match_obj.group("path") extra = html_unescape(match_obj.group("extra")) extra_dict = {} instance_destination_dir = os.path.dirname(instance.save_as) relative_path = False using_relative_urls = instance._context.get("RELATIVE_URLS") if extra: for match in re.finditer(r'(\w+)="?((?:(?<!")[^\s|]+|(?<=")(?:\\.|[^"\\])*(?=")))"?', extra): extra_dict[match.group(1)] = match.group(2) if path.startswith('/'): source_path = path[1:] destination_path = source_path relative_path = False else: source_path = instance.get_relative_source_path( os.path.join(instance.relative_dir, path)) destination_path = os.path.join(instance_destination_dir, path) relative_path = True if "output" in extra_dict: output_override = extra_dict["output"] if output_override.startswith('/'): destination_path = output_override[1:] relative_path = False else: destination_path = os.path.join(instance_destination_dir, output_override) relative_path = True if using_relative_urls: siteurl = get_relative_path(instance.save_as) else: siteurl = instance._context.get("localsiteurl", "") if relative_path and using_relative_urls: url = os.path.relpath(destination_path, instance_destination_dir) else: url = siteurl + "/" + destination_path if "url" in extra_dict: url_override = extra_dict["url"] if url_override.startswith('/'): url = siteurl + url_override else: url = url_override if not using_relative_urls: url = siteurl + "/" + os.path.dirname(instance.save_as) + "/" + url url = url.replace('\\', '/') # for Windows paths. static_path_obj = StaticPath(source_path, destination_path, url, extra_dict) autostatic_path_found.send(autostatic_path=static_path_obj) logger.debug("Detected autostatic path: {} -> {} ({})".format( static_path_obj.source, static_path_obj.destination, static_path_obj.url)) detected_autostatic_paths[static_path_obj.destination] = static_path_obj.source return html_escape(static_path_obj.url)
def write_file(self, name, template, context, relative_urls=False, paginated=None, override_output=False, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param override_output: boolean telling if we can override previous output with the same name (and if next files written with the same name should be skipped to keep that one) :param **kwargs: additional variables to pass to the templates """ if name is False: return elif not name: # other stuff, just return for now return def _write_file(template, localcontext, output_path, name, override): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) path = os.path.join(output_path, name) try: os.makedirs(os.path.dirname(path)) except Exception: pass with self._open_w(path, 'utf-8', override=override) as f: f.write(output) logger.info('writing {}'.format(path)) # Send a signal to say we're writing a file with some specific # local context. signals.content_written.send(path, context=localcontext) localcontext = context.copy() if relative_urls: relative_url = path_to_url(get_relative_path(name)) context['localsiteurl'] = relative_url localcontext['SITEURL'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) # check paginated paginated = paginated or {} if paginated: name_root = os.path.splitext(name)[0] # pagination needed, init paginators paginators = {} for key in paginated.keys(): object_list = paginated[key] paginators[key] = Paginator( name_root, object_list, self.settings, ) # generated pages, and write for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] previous_page = paginator.page(page_num) \ if page_num > 0 else None page = paginator.page(page_num + 1) next_page = paginator.page(page_num + 2) \ if page_num + 1 < paginator.num_pages else None paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page, '%s_previous_page' % key: previous_page, '%s_next_page' % key: next_page}) _write_file(template, paginated_localcontext, self.output_path, page.save_as, override_output) else: # no pagination _write_file(template, localcontext, self.output_path, name, override_output)
def write_file(self, name, template, context, relative_urls=False, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ if name is False: return elif not name: # other stuff, just return for now return def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) path = os.path.join(output_path, name) try: os.makedirs(os.path.dirname(path)) except Exception: pass with open(path, 'w', encoding='utf-8') as f: f.write(output) logger.info('writing {}'.format(path)) localcontext = context.copy() if relative_urls: relative_url = path_to_url(get_relative_path(name)) context['localsiteurl'] = relative_url localcontext['SITEURL'] = relative_url localcontext['output_file'] = name localcontext.update(kwargs) # check paginated paginated = paginated or {} if paginated: # pagination needed, init paginators paginators = {} for key in paginated.keys(): object_list = paginated[key] if self.settings.get('DEFAULT_PAGINATION'): paginators[key] = Paginator( object_list, self.settings.get('DEFAULT_PAGINATION'), self.settings.get('DEFAULT_ORPHANS')) else: paginators[key] = Paginator(object_list, len(object_list)) # generated pages, and write name_root, ext = os.path.splitext(name) for page_num in range(list(paginators.values())[0].num_pages): paginated_localcontext = localcontext.copy() for key in paginators.keys(): paginator = paginators[key] page = paginator.page(page_num + 1) paginated_localcontext.update({ '%s_paginator' % key: paginator, '%s_page' % key: page }) if page_num > 0: paginated_name = '%s%s%s' % (name_root, page_num + 1, ext) else: paginated_name = name _write_file(template, paginated_localcontext, self.output_path, paginated_name) else: # no pagination _write_file(template, localcontext, self.output_path, name)
def write_file(self, name, template, context, relative_urls=True, paginated=None, **kwargs): """Render the template and write the file. :param name: name of the file to output :param template: template to use to generate the content :param context: dict to pass to the templates. :param relative_urls: use relative urls or absolutes ones :param paginated: dict of article list to paginate - must have the same length (same list in different orders) :param **kwargs: additional variables to pass to the templates """ def _write_file(template, localcontext, output_path, name): """Render the template write the file.""" old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, 'C') try: output = template.render(localcontext) finally: locale.setlocale(locale.LC_ALL, old_locale) filename = os.sep.join((output_path, name)) try: os.makedirs(os.path.dirname(filename)) except Exception: pass with open(filename, 'w', encoding='utf-8') as f: f.write(output) logger.info(u'writing %s' % filename) localcontext = context.copy() if relative_urls: localcontext['SITEURL'] = get_relative_path(name) localcontext.update(kwargs) if relative_urls: self.update_context_contents(name, localcontext) # check paginated paginated = paginated or {} if paginated: # pagination needed, init paginators paginators = {} for key in paginated.iterkeys(): object_list = paginated[key] if self.settings.get('DEFAULT_PAGINATION'): paginators[key] = Paginator(object_list, self.settings.get('DEFAULT_PAGINATION'), self.settings.get('DEFAULT_ORPHANS')) else: paginators[key] = Paginator(object_list, len(object_list)) # generated pages, and write for page_num in range(paginators.values()[0].num_pages): paginated_localcontext = localcontext.copy() paginated_name = name for key in paginators.iterkeys(): paginator = paginators[key] page = paginator.page(page_num + 1) paginated_localcontext.update( {'%s_paginator' % key: paginator, '%s_page' % key: page}) if page_num > 0: ext = '.' + paginated_name.rsplit('.')[-1] paginated_name = paginated_name.replace(ext, '%s%s' % (page_num + 1, ext)) _write_file(template, paginated_localcontext, self.output_path, paginated_name) else: # no pagination _write_file(template, localcontext, self.output_path, name)