コード例 #1
0
ファイル: content.py プロジェクト: iksteen/pelican-albums
def build_url(content, path, settings):
    if not settings['RELATIVE_URLS']:
        return '/'.join((settings['SITEURL'], path_to_url(path)))
    else:
        return path_to_url(os.path.relpath(
            os.path.abspath(os.path.join(settings['PATH'], path)),
            os.path.dirname(content.source_path)
        ))
コード例 #2
0
ファイル: content.py プロジェクト: iksteen/pelican-albums
def build_url(content, path, settings):
    if not settings['RELATIVE_URLS']:
        return '/'.join((settings['SITEURL'], path_to_url(path)))
    else:
        return path_to_url(
            os.path.relpath(
                os.path.abspath(os.path.join(settings['PATH'], path)),
                os.path.dirname(content.source_path)))
コード例 #3
0
    def write_feed(self,
                   elements,
                   context,
                   path=None,
                   url=None,
                   feed_type='atom',
                   override_output=False,
                   feed_title=None):
        """Generate a feed with the list of articles provided

        Return the feed. If no path or output_path is specified, just
        return the feed object.

        :param elements: the articles to put on the feed.
        :param context: the context to get the feed metadata.
        :param path: the path to output.
        :param url: the publicly visible feed URL; if None, path is used
            instead
        :param feed_type: the feed type to use (atom or rss)
        :param override_output: boolean telling if we can override previous
            output with the same name (and if next files written with the same
            name should be skipped to keep that one)
        :param feed_title: the title of the feed.o
        """
        if not is_selected_for_writing(self.settings, path):
            return

        self.site_url = context.get('SITEURL',
                                    path_to_url(get_relative_path(path)))

        self.feed_domain = context.get('FEED_DOMAIN')
        self.feed_url = self.urljoiner(self.feed_domain, url if url else path)

        feed = self._create_new_feed(feed_type, feed_title, context)

        max_items = len(elements)
        if self.settings['FEED_MAX_ITEMS']:
            max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)
        for i in range(max_items):
            self._add_item_to_the_feed(feed, elements[i])

        signals.feed_generated.send(context, feed=feed)
        if path:
            complete_path = sanitised_join(self.output_path, path)

            try:
                os.makedirs(os.path.dirname(complete_path))
            except Exception:
                pass

            encoding = 'utf-8' if six.PY3 else None
            with self._open_w(complete_path, encoding, override_output) as fp:
                feed.write(fp, 'utf-8')
                logger.info('Writing %s', complete_path)

            signals.feed_written.send(complete_path,
                                      context=context,
                                      feed=feed)
        return feed
コード例 #4
0
ファイル: writers.py プロジェクト: FashtimeDotCom/pelican
 def _get_localcontext(context, name, kwargs, relative_urls):
     localcontext = context.copy()
     localcontext['localsiteurl'] = localcontext.get('localsiteurl', None)
     if relative_urls:
         relative_url = path_to_url(get_relative_path(name))
         localcontext['SITEURL'] = relative_url
         localcontext['localsiteurl'] = relative_url
     localcontext['output_file'] = name
     localcontext.update(kwargs)
     return localcontext
コード例 #5
0
    def test_attach_to_parent_dir(self):
        """attach_to() preserves dirs inside the linking document dir.
        """
        page = Page(content="fake page", metadata={'title': 'fakepage'},
                    settings=self.settings, source_path='fakepage.md')
        self.static.attach_to(page)

        expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg')
        self.assertEqual(self.static.save_as, expected_save_as)
        self.assertEqual(self.static.url, path_to_url(expected_save_as))
コード例 #6
0
ファイル: writers.py プロジェクト: davidlesieur/pelican
    def write_feed(self,
                   elements,
                   context,
                   path=None,
                   feed_type='atom',
                   feed_title=None):
        """Generate a feed with the list of articles provided

        Return the feed. If no path or output_path is specified, just
        return the feed object.

        :param elements: the articles to put on the feed.
        :param context: the context to get the feed metadata.
        :param path: the path to output.
        :param feed_type: the feed type to use (atom or rss)
        :param feed_title: the title of the feed.
        """
        if not is_selected_for_writing(self.settings, path):
            return
        old_locale = locale.setlocale(locale.LC_ALL)
        locale.setlocale(locale.LC_ALL, str('C'))
        try:
            self.site_url = context.get('SITEURL',
                                        path_to_url(get_relative_path(path)))

            self.feed_domain = context.get('FEED_DOMAIN')
            self.feed_url = '{}/{}'.format(self.feed_domain, path)

            feed = self._create_new_feed(feed_type, feed_title, context)

            max_items = len(elements)
            if self.settings['FEED_MAX_ITEMS']:
                max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)
            for i in range(max_items):
                self._add_item_to_the_feed(feed, elements[i])

            if path:
                complete_path = os.path.join(self.output_path, path)
                try:
                    os.makedirs(os.path.dirname(complete_path))
                except Exception:
                    pass

                encoding = 'utf-8' if six.PY3 else None
                with self._open_w(complete_path, encoding) as fp:
                    feed.write(fp, 'utf-8')
                    logger.info('Writing %s', complete_path)

                signals.feed_written.send(complete_path,
                                          context=context,
                                          feed=feed)
            return feed
        finally:
            locale.setlocale(locale.LC_ALL, old_locale)
コード例 #7
0
ファイル: test_contents.py プロジェクト: Windeal/pelican
    def test_attach_to_same_dir(self):
        """attach_to() overrides a static file's save_as and url.
        """
        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))
        self.static.attach_to(page)

        expected_save_as = os.path.join('outpages', 'foo.jpg')
        self.assertEqual(self.static.save_as, expected_save_as)
        self.assertEqual(self.static.url, path_to_url(expected_save_as))
コード例 #8
0
 def _get_localcontext(context, name, kwargs, relative_urls):
     localcontext = context.copy()
     localcontext['localsiteurl'] = localcontext.get(
         'localsiteurl', None)
     if relative_urls:
         relative_url = path_to_url(get_relative_path(name))
         localcontext['SITEURL'] = relative_url
         localcontext['localsiteurl'] = relative_url
     localcontext['output_file'] = name
     localcontext.update(kwargs)
     return localcontext
コード例 #9
0
ファイル: test_contents.py プロジェクト: webglider/pelican
    def test_attach_to_same_dir(self):
        """attach_to() overrides a static file's save_as and url.
        """
        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))
        self.static.attach_to(page)

        expected_save_as = os.path.join('outpages', 'foo.jpg')
        self.assertEqual(self.static.save_as, expected_save_as)
        self.assertEqual(self.static.url, path_to_url(expected_save_as))
コード例 #10
0
ファイル: json_feed.py プロジェクト: rjames86/myblog
    def __init__(self, article_generator):
        self.articles = article_generator.articles
        self.settings = article_generator.settings
        self.context = article_generator.context
        self.generator = article_generator

        self.path = self.settings.get('JSON_FEED')

        self.site_url = self.context.get('SITEURL',
                                         path_to_url(get_relative_path(self.path)))

        self.feed_domain = self.context.get('FEED_DOMAIN')
コード例 #11
0
ファイル: contents.py プロジェクト: W7PEA/pelican
    def attach_to(self, content):
        """Override our output directory with that of the given content object.
        """

        # Determine our file's new output path relative to the linking
        # document. If it currently lives beneath the linking
        # document's source directory, preserve that relationship on output.
        # Otherwise, make it a sibling.

        linking_source_dir = os.path.dirname(content.source_path)
        tail_path = os.path.relpath(self.source_path, linking_source_dir)
        if tail_path.startswith(os.pardir + os.sep):
            tail_path = os.path.basename(tail_path)
        new_save_as = os.path.join(os.path.dirname(content.save_as), tail_path)

        # We do not build our new url by joining tail_path with the linking
        # document's url, because we cannot know just by looking at the latter
        # whether it points to the document itself or to its parent directory.
        # (An url like 'some/content' might mean a directory named 'some'
        # with a file named 'content', or it might mean a directory named
        # 'some/content' with a file named 'index.html'.) Rather than trying
        # to figure it out by comparing the linking document's url and save_as
        # path, we simply build our new url from our new save_as path.

        new_url = path_to_url(new_save_as)

        def _log_reason(reason):
            logger.warning(
                "The {attach} link in %s cannot relocate "
                "%s because %s. Falling back to "
                "{filename} link behavior instead.",
                content.get_relative_source_path(),
                self.get_relative_source_path(),
                reason,
                extra={"limit_msg": "More {attach} warnings silenced."},
            )

        # We never override an override, because we don't want to interfere
        # with user-defined overrides that might be in EXTRA_PATH_METADATA.
        if hasattr(self, "override_save_as") or hasattr(self, "override_url"):
            if new_save_as != self.save_as or new_url != self.url:
                _log_reason("its output location was already overridden")
            return

        # We never change an output path that has already been referenced,
        # because we don't want to break links that depend on that path.
        if self._output_location_referenced:
            if new_save_as != self.save_as or new_url != self.url:
                _log_reason("another link already referenced its location")
            return

        self.override_save_as = new_save_as
        self.override_url = new_url
コード例 #12
0
ファイル: json_feed.py プロジェクト: iqbalali/myblog
    def __init__(self, article_generator):
        self.articles = article_generator.articles
        self.settings = article_generator.settings
        self.context = article_generator.context
        self.generator = article_generator

        self.path = self.settings.get('JSON_FEED')

        self.site_url = self.context.get(
            'SITEURL', path_to_url(get_relative_path(self.path)))

        self.feed_domain = self.context.get('FEED_DOMAIN')
コード例 #13
0
ファイル: writers.py プロジェクト: Starch/pelican
    def write_feed(self, elements, context, path=None, url=None,
                   feed_type='atom', override_output=False, feed_title=None):
        """Generate a feed with the list of articles provided

        Return the feed. If no path or output_path is specified, just
        return the feed object.

        :param elements: the articles to put on the feed.
        :param context: the context to get the feed metadata.
        :param path: the path to output.
        :param url: the publicly visible feed URL; if None, path is used
            instead
        :param feed_type: the feed type to use (atom or rss)
        :param override_output: boolean telling if we can override previous
            output with the same name (and if next files written with the same
            name should be skipped to keep that one)
        :param feed_title: the title of the feed.o
        """
        if not is_selected_for_writing(self.settings, path):
            return

        self.site_url = context.get(
            'SITEURL', path_to_url(get_relative_path(path)))

        self.feed_domain = context.get('FEED_DOMAIN')
        self.feed_url = self.urljoiner(self.feed_domain, url if url else path)

        feed = self._create_new_feed(feed_type, feed_title, context)

        max_items = len(elements)
        if self.settings['FEED_MAX_ITEMS']:
            max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)
        for i in range(max_items):
            self._add_item_to_the_feed(feed, elements[i])

        signals.feed_generated.send(context, feed=feed)
        if path:
            complete_path = sanitised_join(self.output_path, path)

            try:
                os.makedirs(os.path.dirname(complete_path))
            except Exception:
                pass

            encoding = 'utf-8' if six.PY3 else None
            with self._open_w(complete_path, encoding, override_output) as fp:
                feed.write(fp, 'utf-8')
                logger.info('Writing %s', complete_path)

            signals.feed_written.send(
                complete_path, context=context, feed=feed)
        return feed
コード例 #14
0
    def attach_to(self, content):
        """Override our output directory with that of the given content object.
        """

        # Determine our file's new output path relative to the linking
        # document. If it currently lives beneath the linking
        # document's source directory, preserve that relationship on output.
        # Otherwise, make it a sibling.

        linking_source_dir = os.path.dirname(content.source_path)
        tail_path = os.path.relpath(self.source_path, linking_source_dir)
        if tail_path.startswith(os.pardir + os.sep):
            tail_path = os.path.basename(tail_path)
        new_save_as = os.path.join(os.path.dirname(content.save_as), tail_path)

        # We do not build our new url by joining tail_path with the linking
        # document's url, because we cannot know just by looking at the latter
        # whether it points to the document itself or to its parent directory.
        # (An url like 'some/content' might mean a directory named 'some'
        # with a file named 'content', or it might mean a directory named
        # 'some/content' with a file named 'index.html'.) Rather than trying
        # to figure it out by comparing the linking document's url and save_as
        # path, we simply build our new url from our new save_as path.

        new_url = path_to_url(new_save_as)

        def _log_reason(reason):
            logger.warning(
                "The {attach} link in %s cannot relocate "
                "%s because %s. Falling back to "
                "{filename} link behavior instead.",
                content.get_relative_source_path(),
                self.get_relative_source_path(),
                reason,
                extra={'limit_msg': "More {attach} warnings silenced."})

        # We never override an override, because we don't want to interfere
        # with user-defined overrides that might be in EXTRA_PATH_METADATA.
        if hasattr(self, 'override_save_as') or hasattr(self, 'override_url'):
            if new_save_as != self.save_as or new_url != self.url:
                _log_reason("its output location was already overridden")
            return

        # We never change an output path that has already been referenced,
        # because we don't want to break links that depend on that path.
        if self._output_location_referenced:
            if new_save_as != self.save_as or new_url != self.url:
                _log_reason("another link already referenced its location")
            return

        self.override_save_as = new_save_as
        self.override_url = new_url
コード例 #15
0
ファイル: test_contents.py プロジェクト: Windeal/pelican
    def test_attach_to_does_nothing_after_save_as_referenced(self):
        """attach_to() does nothing if the save_as was already referenced.
        (For example, by a {filename} link an a document processed earlier.)
        """
        original_save_as = self.static.save_as

        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))
        self.static.attach_to(page)

        self.assertEqual(self.static.save_as, original_save_as)
        self.assertEqual(self.static.url, path_to_url(original_save_as))
コード例 #16
0
ファイル: test_contents.py プロジェクト: yuyuexi/pelican
    def test_not_save_as_draft(self):
        """Static.save_as is not affected by draft status."""

        static = Static(content=None,
                        metadata=dict(status='draft', ),
                        settings=self.settings,
                        source_path=os.path.join('dir', 'foo.jpg'),
                        context=self.settings.copy())

        expected_save_as = posixize_path(os.path.join('dir', 'foo.jpg'))
        self.assertEqual(static.status, 'draft')
        self.assertEqual(static.save_as, expected_save_as)
        self.assertEqual(static.url, path_to_url(expected_save_as))
コード例 #17
0
ファイル: test_contents.py プロジェクト: webglider/pelican
    def test_attach_to_does_nothing_after_save_as_referenced(self):
        """attach_to() does nothing if the save_as was already referenced.
        (For example, by a {filename} link an a document processed earlier.)
        """
        original_save_as = self.static.save_as

        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))
        self.static.attach_to(page)

        self.assertEqual(self.static.save_as, original_save_as)
        self.assertEqual(self.static.url, path_to_url(original_save_as))
コード例 #18
0
ファイル: contents.py プロジェクト: 52M/pelican
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', SafeDatetime.now()),
         'author': self.author.slug if hasattr(self, 'author') else '',
         'category': self.category.slug if hasattr(self, 'category') else ''
     })
     return metadata
コード例 #19
0
ファイル: contents.py プロジェクト: chandrikachowdary/pelican
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', datetime.datetime.now()),
         'author': self.author.slug if hasattr(self, 'author') else '',
         'category': self.category.slug if hasattr(self, 'category') else ''
     })
     return metadata
コード例 #20
0
ファイル: test_contents.py プロジェクト: getpelican/pelican
    def test_not_save_as_draft(self):
        """Static.save_as is not affected by draft status."""

        static = Static(
            content=None,
            metadata=dict(status='draft',),
            settings=self.settings,
            source_path=os.path.join('dir', 'foo.jpg'),
            context=self.settings.copy())

        expected_save_as = os.path.join('dir', 'foo.jpg')
        self.assertEqual(static.status, 'draft')
        self.assertEqual(static.save_as, expected_save_as)
        self.assertEqual(static.url, path_to_url(expected_save_as))
コード例 #21
0
ファイル: contents.py プロジェクト: bartaelterman/snippets
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', datetime.now()),
         'author': getattr(self, 'author', ''),
         'category': getattr(self, 'category',
                             self.settings['DEFAULT_CATEGORY']),
     })
     return metadata
コード例 #22
0
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', datetime.now()),
         'author': getattr(self, 'author', ''),
         'category': getattr(self, 'category',
             self.settings['DEFAULT_CATEGORY']),
         })
     return metadata
コード例 #23
0
ファイル: writers.py プロジェクト: davidlesieur/pelican
    def write_feed(self, elements, context, path=None, feed_type='atom',
                   feed_title=None):
        """Generate a feed with the list of articles provided

        Return the feed. If no path or output_path is specified, just
        return the feed object.

        :param elements: the articles to put on the feed.
        :param context: the context to get the feed metadata.
        :param path: the path to output.
        :param feed_type: the feed type to use (atom or rss)
        :param feed_title: the title of the feed.
        """
        if not is_selected_for_writing(self.settings, path):
            return
        old_locale = locale.setlocale(locale.LC_ALL)
        locale.setlocale(locale.LC_ALL, str('C'))
        try:
            self.site_url = context.get(
                'SITEURL', path_to_url(get_relative_path(path)))

            self.feed_domain = context.get('FEED_DOMAIN')
            self.feed_url = '{}/{}'.format(self.feed_domain, path)

            feed = self._create_new_feed(feed_type, feed_title, context)

            max_items = len(elements)
            if self.settings['FEED_MAX_ITEMS']:
                max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)
            for i in range(max_items):
                self._add_item_to_the_feed(feed, elements[i])

            if path:
                complete_path = os.path.join(self.output_path, path)
                try:
                    os.makedirs(os.path.dirname(complete_path))
                except Exception:
                    pass

                encoding = 'utf-8' if six.PY3 else None
                with self._open_w(complete_path, encoding) as fp:
                    feed.write(fp, 'utf-8')
                    logger.info('Writing %s', complete_path)

                signals.feed_written.send(complete_path, context=context, feed=feed)
            return feed
        finally:
            locale.setlocale(locale.LC_ALL, old_locale)
コード例 #24
0
def update_author(article_generator, author):
    """Updates the Author metadata objects with extra information."""
    settings = article_generator.settings

    author_metadata = settings.get("AUTHOR_METADATA", {})
    author_dict = author_metadata.get(author.name, {})

    author.tagline = author_dict.get("tagline")
    author.discourse = author_dict.get("discourse")
    author.discourse_url = ("https://mixxx.discourse.group/u/{}/".format(
        author.discourse) if author.discourse else "")
    author.facebook = author_dict.get("facebook")
    author.facebook_url = ("https://www.facebook.com/{}/".format(
        author.facebook) if author.facebook else "")
    author.github = author_dict.get("github")
    author.github_url = ("https://github.com/{}".format(author.github)
                         if author.github else "")
    author.twitter = author_dict.get("twitter")
    author.twitter_url = ("https://twitter.com/{}/".format(author.twitter)
                          if author.twitter else "")
    author.email = author_dict.get("email")

    author.avatar_url = None
    if not author.github or "images" not in settings["STATIC_PATHS"]:
        return
    path = os.path.join(article_generator.path, "images", "avatars")
    avatar_path = os.path.join(path, f"{author.github}.png")
    if not os.path.exists(avatar_path):
        logger.warning(
            "Missing avatar for author '%s', expected %s",
            author.name,
            avatar_path,
        )
        logger.debug(
            "Downloading missing avatar for GitHub user %s...",
            author.github,
        )
        try:
            data = download_avatar(author.github)
        except urllib.error.URLError:
            logger.warning("Download of avatar failed, skipping...")
            return
        mkdir_p(path)
        with open(avatar_path, "w+b") as fp:
            fp.write(data)
        logger.warning("Downloaded missing avatar to: %s", avatar_path)
    author.avatar_url = path_to_url(
        os.path.relpath(avatar_path, article_generator.path))
コード例 #25
0
ファイル: contents.py プロジェクト: W7PEA/pelican
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get("path", self.get_relative_source_path())
     metadata.update(
         {
             "path": path_to_url(path),
             "slug": getattr(self, "slug", ""),
             "lang": getattr(self, "lang", "en"),
             "date": getattr(self, "date", SafeDatetime.now()),
             "author": self.author.slug if hasattr(self, "author") else "",
             "tag": self.tag.slug if hasattr(self, "tag") else "",
             "category": self.category.slug if hasattr(self, "category") else "",
         }
     )
     return metadata
コード例 #26
0
ファイル: test_contents.py プロジェクト: Windeal/pelican
    def test_attach_link_syntax(self):
        """{attach} link syntax triggers output path override & url replacement.
        """
        html = '<a href="{attach}../foo.jpg">link</a>'
        page = Page(content=html,
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
            context=self.context)
        content = page.get_content('')

        self.assertNotEqual(content, html,
            "{attach} link syntax did not trigger URL replacement.")

        expected_save_as = os.path.join('outpages', 'foo.jpg')
        self.assertEqual(self.static.save_as, expected_save_as)
        self.assertEqual(self.static.url, path_to_url(expected_save_as))
コード例 #27
0
ファイル: test_contents.py プロジェクト: webglider/pelican
    def test_attach_link_syntax(self):
        """{attach} link syntax triggers output path override & url replacement.
        """
        html = '<a href="{attach}../foo.jpg">link</a>'
        page = Page(content=html,
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
            context=self.context)
        content = page.get_content('')

        self.assertNotEqual(content, html,
            "{attach} link syntax did not trigger URL replacement.")

        expected_save_as = os.path.join('outpages', 'foo.jpg')
        self.assertEqual(self.static.save_as, expected_save_as)
        self.assertEqual(self.static.url, path_to_url(expected_save_as))
コード例 #28
0
def process_content(article):
    """
    Get a list of PDF, PS, and EPS files for which PNG previews must be generated.
    Also make the substitutions in article content so that the PNG will be used as
    a preview and provide a link to the original.
    """
    try:
        soup = BeautifulSoup(article._content, 'lxml')
    except FeatureNotFound:
        soup = BeautifulSoup(article._content, 'html.parser')

    for img in soup.find_all('img', src=FORMAT_RE):
        src = re.sub(article.settings['INTRASITE_LINK_REGEX'], '',
                     img['src'].strip())
        if src.startswith(('http://', 'https://', 'ftp://')): continue
        if src.startswith('/'):
            src = src[1:]
        else:
            # relative to the source path of this content
            src = article.get_relative_source_path(
                os.path.join(article.relative_dir, src))
        if src not in article._context['static_content']:
            unquoted_path = src.replace('%20', ' ')
            if unquoted_path in article._context['static_content']:
                src = unquoted_path
        linked_content = article._context['static_content'].get(src)
        if not linked_content:
            continue
        link = img.wrap(soup.new_tag("a"))
        link['href'] = img['src']
        png_save_as = os.path.join(preview_dir,
                                   linked_content.save_as + '.png')
        pdf_imgs[linked_content.source_path] = png_save_as
        siteurl = article.get_siteurl()
        if article.settings['RELATIVE_URLS']:
            siteurl = path_to_url(get_relative_path(article.save_as))
        png_url = '/'.join((siteurl, preview_dir, linked_content.url + '.png'))
        png_url = png_url.replace('\\', '/')
        img['src'] = png_url
        class_list = 'pdf-img'
        if 'class' in img.attrs:
            img['class'].append(class_list)
        else:
            img['class'] = [class_list]

    article._content = unicode(soup)
コード例 #29
0
    def generate_output(self, writer=None):
        '''
        Generate redirect files
        '''
        logger.info('Generating permalink files in %r',
                    self.permalink_output_path)

        clean_output_dir(self.permalink_output_path, [])
        mkdir_p(self.permalink_output_path)

        path = os.path.dirname(os.path.realpath(__file__))
        env = Environment(loader=FileSystemLoader(path))
        template = env.get_template('permalink.html')

        settings = self.settings.copy()
        if settings.get('RELATIVE_URLS', False):
            settings['SITEURL'] = path_to_url(
                get_relative_path(
                    os.path.join(settings['PERMALINK_PATH'], 'dummy.html')))

        with open(os.path.join(self.permalink_output_path, '.htaccess'),
                  'w') as redirect_file:
            for content in itertools.chain(self.context['articles'],
                                           self.context['pages']):

                for permalink_id in content.get_permalink_ids_iter():
                    relative_permalink_path = os.path.join(
                        self.settings['PERMALINK_PATH'],
                        permalink_id) + '.html'
                    permalink_path = os.path.join(self.output_path,
                                                  relative_permalink_path)

                    localcontext = settings.copy()
                    localcontext['content'] = content
                    localcontext['page'] = content
                    with open(permalink_path, 'wb') as f:
                        f.write(template.render(**localcontext))
                    signals.content_written.send(permalink_path,
                                                 context=localcontext)
                    redirect_file.write(
                        'Redirect permanent "/{relative_permalink_path}" "{url}"\n'
                        .format(
                            url=article_url(content),
                            permalink_id=permalink_id,
                            relative_permalink_path=relative_permalink_path,
                        ))
コード例 #30
0
ファイル: conference.py プロジェクト: agile-france/site-2013
 def generate_context(self):
     bio_pic_path = self.settings['BIO_PIC_PATH']
     for f in self.get_files(
             os.path.join(self.path, bio_pic_path), extensions=False):
         f_rel = os.path.relpath(f, self.path)
         content, metadata = read_file(
             f, fmt='static', settings=self.settings)
         basename = os.path.splitext(os.path.basename(f))[0]
         metadata['save_as'] = f_rel
         metadata['url'] = path_to_url(metadata['save_as'])
         metadata['slug'] = slugify(basename)
         sc = BioPic(
             content=None,
             metadata=metadata,
             settings=self.settings,
             source_path=f_rel)
         conference.bio_pics[sc.slug] = sc
         self.add_source_path(sc)
コード例 #31
0
def process_content(article):
    """
    Get a list of PDF, PS, and EPS files for which PNG previews must be generated.
    Also make the substitutions in article content so that the PNG will be used as
    a preview and provide a link to the original.
    """
    try:
        soup = BeautifulSoup(article._content, "lxml")
    except FeatureNotFound:
        soup = BeautifulSoup(article._content, "html.parser")

    for img in soup.find_all("img", src=FORMAT_RE):
        src = re.sub(article.settings["INTRASITE_LINK_REGEX"], "",
                     img["src"].strip())
        if src.startswith(("http://", "https://", "ftp://")):
            continue
        if src.startswith("/"):
            src = src[1:]
        else:
            # relative to the source path of this content
            src = article.get_relative_source_path(
                os.path.join(article.relative_dir, src))
        if src not in article._context["filenames"]:
            unquoted_path = src.replace("%20", " ")
            if unquoted_path in article._context["filenames"]:
                src = unquoted_path
        linked_content = article._context["filenames"].get(src)
        if not linked_content:
            continue
        link = img.wrap(soup.new_tag("a"))
        link["href"] = img["src"]
        png_save_as = os.path.join(preview_dir,
                                   linked_content.save_as + ".png")
        pdf_imgs[linked_content.source_path] = png_save_as
        siteurl = article.get_siteurl()
        if article.settings["RELATIVE_URLS"]:
            siteurl = path_to_url(get_relative_path(article.save_as))
        png_url = "/".join((siteurl, preview_dir, linked_content.url + ".png"))
        png_url = png_url.replace("\\", "/")
        img["src"] = png_url

    article._content = unicode(soup)
コード例 #32
0
ファイル: writers.py プロジェクト: jiqimao05/blogx.github.io
    def write_feed(self, elements, context, path=None, feed_type="atom"):
        """Generate a feed with the list of articles provided

        Return the feed. If no path or output_path is specified, just
        return the feed object.

        :param elements: the articles to put on the feed.
        :param context: the context to get the feed metadata.
        :param path: the path to output.
        :param feed_type: the feed type to use (atom or rss)
        """
        old_locale = locale.setlocale(locale.LC_ALL)
        locale.setlocale(locale.LC_ALL, str("C"))
        try:
            self.site_url = context.get("SITEURL", path_to_url(get_relative_path(path)))

            self.feed_domain = context.get("FEED_DOMAIN")
            self.feed_url = "{}/{}".format(self.feed_domain, path)

            feed = self._create_new_feed(feed_type, context)

            max_items = len(elements)
            if self.settings["FEED_MAX_ITEMS"]:
                max_items = min(self.settings["FEED_MAX_ITEMS"], max_items)
            for i in range(max_items):
                self._add_item_to_the_feed(feed, elements[i])

            if path:
                complete_path = os.path.join(self.output_path, path)
                try:
                    os.makedirs(os.path.dirname(complete_path))
                except Exception:
                    pass

                encoding = "utf-8" if six.PY3 else None
                with self._open_w(complete_path, encoding) as fp:
                    feed.write(fp, "utf-8")
                    logger.info("writing %s" % complete_path)
            return feed
        finally:
            locale.setlocale(locale.LC_ALL, old_locale)
コード例 #33
0
ファイル: contents.py プロジェクト: BluePandaLi/pelican
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     default_category = self.settings['DEFAULT_CATEGORY']
     slug_substitutions = self.settings.get('SLUG_SUBSTITUTIONS', ())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', datetime.now()),
         'author': slugify(
             getattr(self, 'author', ''),
             slug_substitutions
         ),
         'category': slugify(
             getattr(self, 'category', default_category),
             slug_substitutions
         )
     })
     return metadata
コード例 #34
0
ファイル: contents.py プロジェクト: runarberg/pelican
 def url_format(self):
     """Returns the URL, formatted with the proper values"""
     metadata = copy.copy(self.metadata)
     path = self.metadata.get('path', self.get_relative_source_path())
     default_category = self.settings['DEFAULT_CATEGORY']
     slug_substitutions = self.settings.get('SLUG_SUBSTITUTIONS', ())
     metadata.update({
         'path': path_to_url(path),
         'slug': getattr(self, 'slug', ''),
         'lang': getattr(self, 'lang', 'en'),
         'date': getattr(self, 'date', datetime.now()),
         'author': slugify(
             getattr(self, 'author', ''),
             slug_substitutions
         ),
         'category': slugify(
             getattr(self, 'category', default_category),
             slug_substitutions
         )
     })
     return metadata
コード例 #35
0
ファイル: test_contents.py プロジェクト: Windeal/pelican
    def test_attach_to_ignores_subsequent_calls(self):
        """attach_to() does nothing when called a second time.
        """
        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))

        self.static.attach_to(page)

        otherdir_settings = self.settings.copy()
        otherdir_settings.update(dict(
            PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'),
            PAGE_URL='otherpages/{slug}.html'))
        otherdir_page = Page(content="other page",
            metadata={'title': 'otherpage'}, settings=otherdir_settings,
            source_path=os.path.join('dir', 'otherpage.md'))

        self.static.attach_to(otherdir_page)

        otherdir_save_as = os.path.join('otherpages', 'foo.jpg')
        self.assertNotEqual(self.static.save_as, otherdir_save_as)
        self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as))
コード例 #36
0
ファイル: test_contents.py プロジェクト: webglider/pelican
    def test_attach_to_ignores_subsequent_calls(self):
        """attach_to() does nothing when called a second time.
        """
        page = Page(content="fake page",
            metadata={'title': 'fakepage'}, settings=self.settings,
            source_path=os.path.join('dir', 'fakepage.md'))

        self.static.attach_to(page)

        otherdir_settings = self.settings.copy()
        otherdir_settings.update(dict(
            PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'),
            PAGE_URL='otherpages/{slug}.html'))
        otherdir_page = Page(content="other page",
            metadata={'title': 'otherpage'}, settings=otherdir_settings,
            source_path=os.path.join('dir', 'otherpage.md'))

        self.static.attach_to(otherdir_page)

        otherdir_save_as = os.path.join('otherpages', 'foo.jpg')
        self.assertNotEqual(self.static.save_as, otherdir_save_as)
        self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as))
コード例 #37
0
ファイル: pdf_img.py プロジェクト: cmacmackin/pdf-img
def process_content(article):
    """
    Get a list of PDF, PS, and EPS files for which PNG previews must be generated.
    Also make the substitutions in article content so that the PNG will be used as
    a preview and provide a link to the original.
    """
    try:
        soup = BeautifulSoup(article._content,'lxml')
    except FeatureNotFound:
        soup = BeautifulSoup(article._content,'html.parser')
    
    for img in soup.find_all('img',src=FORMAT_RE):
        src = re.sub(article.settings['INTRASITE_LINK_REGEX'],'',img['src'].strip())
        if src.startswith(('http://','https://','ftp://')): continue
        if src.startswith('/'):
            src = src[1:]
        else:
            # relative to the source path of this content
            src = article.get_relative_source_path(os.path.join(article.relative_dir, src))
        if src not in article._context['filenames']:
            unquoted_path = src.replace('%20', ' ')
            if unquoted_path in article._context['filenames']:
                src = unquoted_path
        linked_content = article._context['filenames'].get(src)
        if not linked_content:
            continue
        link = img.wrap(soup.new_tag("a"))
        link['href'] = img['src']
        png_save_as = os.path.join(preview_dir, linked_content.save_as + '.png')
        pdf_imgs[linked_content.source_path] = png_save_as
        siteurl = article.get_siteurl()
        if article.settings['RELATIVE_URLS']:
            siteurl = path_to_url(get_relative_path(article.save_as))
        png_url = '/'.join((siteurl, preview_dir, linked_content.url + '.png'))
        png_url = png_url.replace('\\', '/')
        img['src'] = png_url
    
    article._content = unicode(soup)
コード例 #38
0
ファイル: writers.py プロジェクト: AndreLesa/pelican
    def write_file(self, name, template, context, relative_urls=False,
        paginated=None, **kwargs):
        """Render the template and write the file.

        :param name: name of the file to output
        :param template: template to use to generate the content
        :param context: dict to pass to the templates.
        :param relative_urls: use relative urls or absolutes ones
        :param paginated: dict of article list to paginate - must have the
            same length (same list in different orders)
        :param **kwargs: additional variables to pass to the templates
        """

        if name is False:
            return
        elif not name:
            # other stuff, just return for now
            return

        def _write_file(template, localcontext, output_path, name):
            """Render the template write the file."""
            old_locale = locale.setlocale(locale.LC_ALL)
            locale.setlocale(locale.LC_ALL, str('C'))
            try:
                output = template.render(localcontext)
            finally:
                locale.setlocale(locale.LC_ALL, old_locale)
            path = os.path.join(output_path, name)
            try:
                os.makedirs(os.path.dirname(path))
            except Exception:
                pass
            with self._open_w(path, 'utf-8') as f:
                f.write(output)
            logger.info('writing {}'.format(path))

        localcontext = context.copy()
        if relative_urls:
            relative_url = path_to_url(get_relative_path(name))
            context['localsiteurl'] = relative_url
            localcontext['SITEURL'] = relative_url

        localcontext['output_file'] = name
        localcontext.update(kwargs)

        # check paginated
        paginated = paginated or {}
        if paginated:
            # pagination needed, init paginators
            paginators = {}
            for key in paginated.keys():
                object_list = paginated[key]

                if self.settings.get('DEFAULT_PAGINATION'):
                    paginators[key] = Paginator(object_list,
                        self.settings.get('DEFAULT_PAGINATION'),
                        self.settings.get('DEFAULT_ORPHANS'))
                else:
                    paginators[key] = Paginator(object_list, len(object_list))

            # generated pages, and write
            name_root, ext = os.path.splitext(name)
            for page_num in range(list(paginators.values())[0].num_pages):
                paginated_localcontext = localcontext.copy()
                for key in paginators.keys():
                    paginator = paginators[key]
                    page = paginator.page(page_num + 1)
                    paginated_localcontext.update(
                            {'%s_paginator' % key: paginator,
                             '%s_page' % key: page})
                if page_num > 0:
                    paginated_name = '%s%s%s' % (
                        name_root, page_num + 1, ext)
                else:
                    paginated_name = name

                _write_file(template, paginated_localcontext, self.output_path,
                    paginated_name)
        else:
            # no pagination
            _write_file(template, localcontext, self.output_path, name)
コード例 #39
0
    def write_file(self,
                   name,
                   template,
                   context,
                   relative_urls=False,
                   paginated=None,
                   **kwargs):
        """Render the template and write the file.

        :param name: name of the file to output
        :param template: template to use to generate the content
        :param context: dict to pass to the templates.
        :param relative_urls: use relative urls or absolutes ones
        :param paginated: dict of article list to paginate - must have the
            same length (same list in different orders)
        :param **kwargs: additional variables to pass to the templates
        """

        if name is False:
            return
        elif not name:
            # other stuff, just return for now
            return

        def _write_file(template, localcontext, output_path, name):
            """Render the template write the file."""
            old_locale = locale.setlocale(locale.LC_ALL)
            locale.setlocale(locale.LC_ALL, str('C'))
            try:
                output = template.render(localcontext)
            finally:
                locale.setlocale(locale.LC_ALL, old_locale)
            path = os.path.join(output_path, name)
            try:
                os.makedirs(os.path.dirname(path))
            except Exception:
                pass
            with open(path, 'w', encoding='utf-8') as f:
                f.write(output)
            logger.info('writing {}'.format(path))

        localcontext = context.copy()
        if relative_urls:
            relative_url = path_to_url(get_relative_path(name))
            context['localsiteurl'] = relative_url
            localcontext['SITEURL'] = relative_url

        localcontext['output_file'] = name
        localcontext.update(kwargs)

        # check paginated
        paginated = paginated or {}
        if paginated:
            # pagination needed, init paginators
            paginators = {}
            for key in paginated.keys():
                object_list = paginated[key]

                if self.settings.get('DEFAULT_PAGINATION'):
                    paginators[key] = Paginator(
                        object_list, self.settings.get('DEFAULT_PAGINATION'),
                        self.settings.get('DEFAULT_ORPHANS'))
                else:
                    paginators[key] = Paginator(object_list, len(object_list))

            # generated pages, and write
            name_root, ext = os.path.splitext(name)
            for page_num in range(list(paginators.values())[0].num_pages):
                paginated_localcontext = localcontext.copy()
                for key in paginators.keys():
                    paginator = paginators[key]
                    page = paginator.page(page_num + 1)
                    paginated_localcontext.update({
                        '%s_paginator' % key: paginator,
                        '%s_page' % key: page
                    })
                if page_num > 0:
                    paginated_name = '%s%s%s' % (name_root, page_num + 1, ext)
                else:
                    paginated_name = name

                _write_file(template, paginated_localcontext, self.output_path,
                            paginated_name)
        else:
            # no pagination
            _write_file(template, localcontext, self.output_path, name)
コード例 #40
0
ファイル: writers.py プロジェクト: wk2008520/pelican
    def write_file(self, name, template, context, relative_urls=False,
        paginated=None, override_output=False, **kwargs):
        """Render the template and write the file.

        :param name: name of the file to output
        :param template: template to use to generate the content
        :param context: dict to pass to the templates.
        :param relative_urls: use relative urls or absolutes ones
        :param paginated: dict of article list to paginate - must have the
            same length (same list in different orders)
        :param override_output: boolean telling if we can override previous
            output with the same name (and if next files written with the same
            name should be skipped to keep that one)
        :param **kwargs: additional variables to pass to the templates
        """

        if name is False:
            return
        elif not name:
            # other stuff, just return for now
            return

        def _write_file(template, localcontext, output_path, name, override):
            """Render the template write the file."""
            old_locale = locale.setlocale(locale.LC_ALL)
            locale.setlocale(locale.LC_ALL, str('C'))
            try:
                output = template.render(localcontext)
            finally:
                locale.setlocale(locale.LC_ALL, old_locale)
            path = os.path.join(output_path, name)
            try:
                os.makedirs(os.path.dirname(path))
            except Exception:
                pass

            with self._open_w(path, 'utf-8', override=override) as f:
                f.write(output)
            logger.info('writing {}'.format(path))

            # Send a signal to say we're writing a file with some specific
            # local context.
            signals.content_written.send(path, context=localcontext)

        localcontext = context.copy()
        if relative_urls:
            relative_url = path_to_url(get_relative_path(name))
            context['localsiteurl'] = relative_url
            localcontext['SITEURL'] = relative_url

        localcontext['output_file'] = name
        localcontext.update(kwargs)

        # check paginated
        paginated = paginated or {}
        if paginated:
            name_root = os.path.splitext(name)[0]

            # pagination needed, init paginators
            paginators = {}
            for key in paginated.keys():
                object_list = paginated[key]

                paginators[key] = Paginator(
                    name_root,
                    object_list,
                    self.settings,
                )

            # generated pages, and write
            for page_num in range(list(paginators.values())[0].num_pages):
                paginated_localcontext = localcontext.copy()
                for key in paginators.keys():
                    paginator = paginators[key]
                    previous_page = paginator.page(page_num) \
                            if page_num > 0 else None
                    page = paginator.page(page_num + 1)
                    next_page = paginator.page(page_num + 2) \
                            if page_num + 1 < paginator.num_pages else None
                    paginated_localcontext.update(
                            {'%s_paginator' % key: paginator,
                             '%s_page' % key: page,
                             '%s_previous_page' % key: previous_page,
                             '%s_next_page' % key: next_page})

                _write_file(template, paginated_localcontext, self.output_path,
                            page.save_as, override_output)
        else:
            # no pagination
            _write_file(template, localcontext, self.output_path, name,
                        override_output)
コード例 #41
0
ファイル: writers.py プロジェクト: AlexandreMoulti/pelican
    def write_file(self, name, template, context, relative_urls=False,
                   paginated=None, override_output=False, **kwargs):
        """Render the template and write the file.

        :param name: name of the file to output
        :param template: template to use to generate the content
        :param context: dict to pass to the templates.
        :param relative_urls: use relative urls or absolutes ones
        :param paginated: dict of article list to paginate - must have the
            same length (same list in different orders)
        :param override_output: boolean telling if we can override previous
            output with the same name (and if next files written with the same
            name should be skipped to keep that one)
        :param **kwargs: additional variables to pass to the templates
        """

        if name is False or name == "":
            return
        elif not name:
            # other stuff, just return for now
            return

        def _write_file(template, localcontext, output_path, name, override):
            """Render the template write the file."""
            old_locale = locale.setlocale(locale.LC_ALL)
            locale.setlocale(locale.LC_ALL, str('C'))
            try:
                output = template.render(localcontext)
            finally:
                locale.setlocale(locale.LC_ALL, old_locale)
            path = os.path.join(output_path, name)
            try:
                os.makedirs(os.path.dirname(path))
            except Exception:
                pass

            with self._open_w(path, 'utf-8', override=override) as f:
                f.write(output)
            logger.info('writing {}'.format(path))

            # Send a signal to say we're writing a file with some specific
            # local context.
            signals.content_written.send(path, context=localcontext)

        localcontext = context.copy()
        if relative_urls:
            relative_url = path_to_url(get_relative_path(name))
            context['localsiteurl'] = relative_url
            localcontext['SITEURL'] = relative_url

        localcontext['output_file'] = name
        localcontext.update(kwargs)

        # pagination
        if paginated:

            # pagination needed, init paginators
            paginators = {key: Paginator(name, val, self.settings)
                          for key, val in paginated.items()}

            # generated pages, and write
            for page_num in range(list(paginators.values())[0].num_pages):
                paginated_localcontext = localcontext.copy()
                for key in paginators.keys():
                    paginator = paginators[key]
                    previous_page = paginator.page(page_num) \
                        if page_num > 0 else None
                    page = paginator.page(page_num + 1)
                    next_page = paginator.page(page_num + 2) \
                        if page_num + 1 < paginator.num_pages else None
                    paginated_localcontext.update(
                        {'%s_paginator' % key: paginator,
                         '%s_page' % key: page,
                         '%s_previous_page' % key: previous_page,
                         '%s_next_page' % key: next_page})

                _write_file(template, paginated_localcontext, self.output_path,
                            page.save_as, override_output)
        else:
            # no pagination
            _write_file(template, localcontext, self.output_path, name,
                        override_output)