예제 #1
0
def _getWipData(path, site, fs_endpoints):
    auto_formats = site.piecrust_app.config.get('site/auto_formats', ['html'])
    pathname, pathext = os.path.splitext(path)
    if pathext not in auto_formats:
        return None

    source = None
    for endpoint, s in fs_endpoints.items():
        if path.startswith(endpoint):
            source = s
            break
    if source is None:
        return None

    fac = source.buildPageFactory(os.path.join(site.root_dir, path))
    route = site.piecrust_app.getSourceRoute(source.name, fac.metadata)
    if not route:
        return None

    qp = QualifiedPage(fac.buildPage(), route, fac.metadata)
    uri = qp.getUri()
    _, slug = split_uri(site.piecrust_app, uri)

    with open(fac.path, 'r', encoding='utf8') as fp:
        raw_text = fp.read()

    header, offset = parse_config_header(raw_text)
    extract = text_preview(raw_text, offset=offset)
    return {
            'title': qp.config.get('title'),
            'slug': slug,
            'url': url_for('.edit_page', slug=slug),
            'text': extract
            }
예제 #2
0
파일: page.py 프로젝트: qman1989/PieCrust2
def _do_load_page(app, path, path_mtime):
    # Check the cache first.
    cache = app.cache.getCache('pages')
    cache_path = hashlib.md5(path.encode('utf8')).hexdigest() + '.json'
    page_time = path_mtime or os.path.getmtime(path)
    if cache.isValid(cache_path, page_time):
        cache_data = json.loads(cache.read(cache_path),
                object_pairs_hook=collections.OrderedDict)
        config = PageConfiguration(values=cache_data['config'],
                validate=False)
        content = json_load_segments(cache_data['content'])
        return config, content, True

    # Nope, load the page from the source file.
    logger.debug("Loading page configuration from: %s" % path)
    with codecs.open(path, 'r', 'utf-8') as fp:
        raw = fp.read()
    header, offset = parse_config_header(raw)

    if not 'format' in header:
        auto_formats = app.config.get('site/auto_formats')
        name, ext = os.path.splitext(path)
        header['format'] = auto_formats.get(ext, None)

    config = PageConfiguration(header)
    content = parse_segments(raw, offset)
    config.set('segments', list(content.keys()))

    # Save to the cache.
    cache_data = {
            'config': config.get(),
            'content': json_save_segments(content)}
    cache.write(cache_path, json.dumps(cache_data))

    return config, content, False
예제 #3
0
파일: page.py 프로젝트: thhgcn/PieCrust2
def _do_load_page(app, path, path_mtime):
    # Check the cache first.
    cache = app.cache.getCache("pages")
    cache_path = hashlib.md5(path.encode("utf8")).hexdigest() + ".json"
    page_time = path_mtime or os.path.getmtime(path)
    if cache.isValid(cache_path, page_time):
        cache_data = json.loads(cache.read(cache_path), object_pairs_hook=collections.OrderedDict)
        config = PageConfiguration(values=cache_data["config"], validate=False)
        content = json_load_segments(cache_data["content"])
        return config, content, True

    # Nope, load the page from the source file.
    logger.debug("Loading page configuration from: %s" % path)
    with open(path, "r", encoding="utf-8") as fp:
        raw = fp.read()
    header, offset = parse_config_header(raw)

    if "format" not in header:
        auto_formats = app.config.get("site/auto_formats")
        name, ext = os.path.splitext(path)
        header["format"] = auto_formats.get(ext, None)

    config = PageConfiguration(header)
    content = parse_segments(raw, offset)
    config.set("segments", list(content.keys()))

    # Save to the cache.
    cache_data = {"config": config.getAll(), "content": json_save_segments(content)}
    cache.write(cache_path, json.dumps(cache_data))

    return config, content, False
예제 #4
0
def _do_load_page(app, path, path_mtime):
    # Check the cache first.
    cache = app.cache.getCache('pages')
    cache_path = hashlib.md5(path.encode('utf8')).hexdigest() + '.json'
    page_time = path_mtime or os.path.getmtime(path)
    if cache.isValid(cache_path, page_time):
        cache_data = json.loads(cache.read(cache_path),
                                object_pairs_hook=collections.OrderedDict)
        config = PageConfiguration(values=cache_data['config'], validate=False)
        content = json_load_segments(cache_data['content'])
        return config, content, True

    # Nope, load the page from the source file.
    logger.debug("Loading page configuration from: %s" % path)
    with codecs.open(path, 'r', 'utf-8') as fp:
        raw = fp.read()
    header, offset = parse_config_header(raw)

    if not 'format' in header:
        auto_formats = app.config.get('site/auto_formats')
        name, ext = os.path.splitext(path)
        header['format'] = auto_formats.get(ext, None)

    config = PageConfiguration(header)
    content = parse_segments(raw, offset)
    config.set('segments', list(content.keys()))

    # Save to the cache.
    cache_data = {
        'config': config.get(),
        'content': json_save_segments(content)
    }
    cache.write(cache_path, json.dumps(cache_data))

    return config, content, False
예제 #5
0
파일: page.py 프로젝트: zaxebo1/PieCrust2
def _do_load_page(source, content_item):
    # Check the cache first.
    app = source.app
    cache = app.cache.getCache('pages')
    cache_token = "%s@%s" % (source.name, content_item.spec)
    cache_path = hashlib.md5(cache_token.encode('utf8')).hexdigest() + '.json'
    page_time = source.getItemMtime(content_item)
    if cache.isValid(cache_path, page_time):
        cache_data = json.loads(cache.read(cache_path),
                                object_pairs_hook=collections.OrderedDict)
        config = PageConfiguration(values=cache_data['config'], validate=False)
        content = json_load_segments(cache_data['content'])
        return config, content, True

    # Nope, load the page from the source file.
    logger.debug("Loading page configuration from: %s" % content_item.spec)
    with source.openItem(content_item, 'r', encoding='utf-8') as fp:
        raw = fp.read()
    header, offset = parse_config_header(raw)

    config = PageConfiguration(header)
    content = parse_segments(raw, offset)
    config.set('segments', list(content.keys()))

    # Save to the cache.
    cache_data = {
        'config': config.getAll(),
        'content': json_save_segments(content)
    }
    cache.write(cache_path, json.dumps(cache_data))

    app.env.stats.stepCounter('PageLoads')

    return config, content, False
예제 #6
0
    def convertStatic(self, app, path, rel_path, strip_header=False):
        logger.debug("  Converting static: %s" % rel_path)
        out_path = os.path.join(app.root_dir, 'assets', rel_path)
        logger.debug("  %s -> %s" % (path, out_path))
        os.makedirs(os.path.dirname(out_path), 0o755, True)

        if strip_header:
            with open(path, 'r', encoding='utf8') as fp:
                content = fp.write()
            config, offset = parse_config_header(content)
            content = content[offset:]
            with open(out_path, 'w', encoding='utf8') as fp:
                fp.write(content)
            return

        shutil.copy2(path, out_path)
예제 #7
0
def _do_load_page(source, content_item):
    # Check the cache first.
    app = source.app
    cache = app.cache.getCache('pages')
    cache_token = "%s@%s" % (source.name, content_item.spec)
    cache_path = hashlib.md5(cache_token.encode('utf8')).hexdigest() + '.json'
    page_time = source.getItemMtime(content_item)
    if cache.isValid(cache_path, page_time):
        cache_data = json.loads(
            cache.read(cache_path),
            object_pairs_hook=collections.OrderedDict)
        config = PageConfiguration(
            values=cache_data['config'],
            validate=False)
        content = json_load_segments(cache_data['content'])
        return config, content, True

    # Nope, load the page from the source file.
    logger.debug("Loading page configuration from: %s" % content_item.spec)
    with source.openItem(content_item, 'r', encoding='utf-8') as fp:
        raw = fp.read()
    header, offset = parse_config_header(raw)

    config = PageConfiguration(header)
    content = parse_segments(raw, offset)
    config.set('segments', list(content.keys()))

    # Save to the cache.
    cache_data = {
        'config': config.getAll(),
        'content': json_save_segments(content)}
    cache.write(cache_path, json.dumps(cache_data))

    app.env.stats.stepCounter('PageLoads')

    return config, content, False
예제 #8
0
    def _doConvertPage(self, app, path, out_path, is_template=False):
        logger.debug("  %s -> %s" % (path, out_path))
        os.makedirs(os.path.dirname(out_path), 0o755, True)

        with open(path, 'r', encoding='utf8') as fp:
            contents = fp.read()

        config, offset = parse_config_header(contents)
        text = contents[offset:]
        text_before = text

        wrap_content_tag = True

        if is_template:
            if 'layout' in config:
                # Liquid doesn't support template inheritance but
                # Jinja does.
                text = ("{%% extends '%s.html' %%}\n\n"
                        "{%% block jekyllcontent %%}\n"
                        "%s\n"
                        "{%% endblock %%}\n" % (config['layout'], text))
                wrap_content_tag = False
        else:
            if 'layout' in config:
                if config['layout'] == 'nil':
                    config['layout'] = 'none'

        # Convert the template stuff we can:
        # - content tag may have to be wrapped in a `jekyllcontent`
        #   because Jekyll uses implicit layout inheritance
        #   placements.
        if wrap_content_tag:
            text = re.sub(
                    r'{{\s*content\s*}}',
                    r'{% block jekyllcontent %}{{ content }}{% endblock %}',
                    text)
        # - list of posts
        text = re.sub(
            '(?<=\{%|\{\{)([^\}]*)site.posts',
            '\\1blog.posts',
            text);
        text = re.sub(
            '(?<=\{%|\{\{)([^\}]*)paginator.posts',
            '\\1pagination.posts',
            text);
        # - list of categories or tags
        text = re.sub(
            '(?<=\{%|\{\{)([^\}]*)site.categories',
            '\\1blog.categories',
            text);
        text = re.sub(
            '(?<=\{%|\{\{)([^\}]*)site.tags',
            '\\1blog.tags',
            text);
        # - list of related posts
        text = re.sub(
            '(?<=\{%|\{\{)(?<!%\})site.related_posts',
            '\\1pagination.related_posts',
            text);
        # - enumeration limits
        text = re.sub(
            '{%\s*for\s+([^}]+)\s+limit\:\s*(\d+)',
            '{% for \\1[:\\2]',
            text);
        text = re.sub(
            '{%\s*for\s+([^}]+)\s+offset\:\s*(\d+)',
            '{% for \\1[\\2:]',
            text);
        # - code highlighting
        text = re.sub(
            '{%\s*highlight\s+([\w\d]+)\s*%}',
            '{% geshi \'\\1\' %}',
            text);
        text = re.sub(
            '{%\s*endhighlight\s*%}',
            '{% endgeshi %}',
            text);
        # - unless tag
        text = re.sub(
            '{%\s*unless\s+([^}]+)\s*%}',
            '{% if not \\1 %}',
            text);
        text = re.sub(
            '{%\s*endunless\s*%}',
            '{% endif %}',
            text);
        # - variable assignment
        text = re.sub(
            '\{%\s*assign\s+',
            '{% set ',
            text);
        # - include tag
        text = re.sub(
            '\{%\s*include\s+([\w\d\.\-_]+)\s*%}',
            '{% include "\\1" %}',
            text);
        # - truncate filter
        text = re.sub(
            '\|\s*truncate\:\s*(\d+)',
            '|truncate(\\1)',
            text);
        # - date filter
        text = re.sub(
            '\|\s*date\:\s*"([^"]+)"',
            '|date("\\1")',
            text);
        # - some filters we don't need
        text = re.sub(
            '\|\s*date_to_string',
            '',
            text);

        if text != text_before:
            # We changed the text, so create a backup.
            shutil.copy2(path, '%s.orig' % out_path)

        with open(out_path, 'w', encoding='utf8') as fp:
            if not is_template:
                fp.write("---\n")
                fp.write(yaml.dump(config))
                fp.write("---\n")
            fp.write(text)