Пример #1
0
def build_page_data(ctx):
    app = ctx.app
    page = ctx.page
    first_uri, _ = split_sub_uri(app, ctx.uri)

    pc_data = PieCrustData()
    pgn_source = ctx.pagination_source or get_default_pagination_source(page)
    paginator = Paginator(page, pgn_source, page_num=ctx.page_num, pgn_filter=ctx.pagination_filter)
    assetor = Assetor(page, first_uri)
    linker = PageLinkerData(page.source, page.rel_path)
    data = {"piecrust": pc_data, "page": {}, "assets": assetor, "pagination": paginator, "family": linker}
    page_data = data["page"]
    page_data.update(copy.deepcopy(page.source_metadata))
    page_data.update(page.config.getDeepcopy(app.debug))
    page_data["url"] = ctx.uri
    page_data["timestamp"] = time.mktime(page.datetime.timetuple())
    date_format = app.config.get("site/date_format")
    if date_format:
        page_data["date"] = page.datetime.strftime(date_format)

    # TODO: handle slugified taxonomy terms.

    site_data = build_site_data(page)
    merge_dicts(data, site_data)

    # Do this at the end because we want all the data to be ready to be
    # displayed in the debugger window.
    if app.config.get("site/show_debug_info") and not app.config.get("baker/is_baking"):
        pc_data._enableDebugInfo(page, data)

    return data
Пример #2
0
    def importWebsite(self):
        ctx = self._open()

        # Site configuration.
        logger.info("Generating site configuration...")
        site_config = self._getSiteConfig(ctx)
        site_config.setdefault('site', {})
        site_config['site'].update({
            'post_url': '%year%/%month%/%slug%',
            'category_url': 'category/%category%'
        })

        site_config_path = os.path.join(self.app.root_dir, CONFIG_PATH)
        with open(site_config_path, 'r') as fp:
            cfg_data = yaml.load(fp, Loader=ConfigurationLoader)

        cfg_data = cfg_data or {}
        merge_dicts(cfg_data, site_config)

        with open(site_config_path, 'w') as fp:
            yaml.dump(cfg_data,
                      fp,
                      default_flow_style=False,
                      allow_unicode=True,
                      Dumper=ConfigurationDumper)

        # Content
        for p in self._getPosts(ctx):
            if p['type'] == 'attachment':
                self._createAsset(p)
            else:
                self._createPost(p)

        self._close(ctx)
Пример #3
0
    def importWebsite(self):
        ctx = self._open()

        # Site configuration.
        logger.info("Generating site configuration...")
        site_config = self._getSiteConfig(ctx)
        site_config.setdefault('site', {})
        site_config['site'].update({
                'post_url': '%year%/%month%/%slug%',
                'category_url': 'category/%category%'})

        site_config_path = os.path.join(self.app.root_dir, CONFIG_PATH)
        with open(site_config_path, 'r') as fp:
            cfg_data = yaml.load(fp, Loader=ConfigurationLoader)

        cfg_data = cfg_data or {}
        merge_dicts(cfg_data, site_config)

        with open(site_config_path, 'w') as fp:
            yaml.dump(cfg_data, fp, default_flow_style=False,
                      allow_unicode=True,
                      Dumper=ConfigurationDumper)

        # Content
        for p in self._getPosts(ctx):
            if p['type'] == 'attachment':
                self._createAsset(p)
            else:
                self._createPost(p)

        self._close(ctx)
Пример #4
0
    def _load(self):
        if self.paths is None:
            self._values = self._validateAll({})
            return

        path_times = [os.path.getmtime(p) for p in self.paths]

        cache_key_hash = hashlib.md5(
                ("version=%s&cache=%d" % (
                    APP_VERSION, CACHE_VERSION)).encode('utf8'))
        for p in self.paths:
            cache_key_hash.update(("&path=%s" % p).encode('utf8'))
        cache_key = cache_key_hash.hexdigest()

        if self.cache.isValid('config.json', path_times):
            logger.debug("Loading configuration from cache...")
            config_text = self.cache.read('config.json')
            self._values = json.loads(
                    config_text,
                    object_pairs_hook=collections.OrderedDict)

            actual_cache_key = self._values.get('__cache_key')
            if actual_cache_key == cache_key:
                self._values['__cache_valid'] = True
                return
            logger.debug("Outdated cache key '%s' (expected '%s')." % (
                    actual_cache_key, cache_key))

        logger.debug("Loading configuration from: %s" % self.paths)
        values = {}
        try:
            for i, p in enumerate(self.paths):
                with open(p, 'r', encoding='utf-8') as fp:
                    loaded_values = yaml.load(
                            fp.read(),
                            Loader=ConfigurationLoader)
                if loaded_values is None:
                    loaded_values = {}
                for fixup in self.fixups:
                    fixup(i, loaded_values)
                merge_dicts(values, loaded_values)

            for fixup in self.fixups:
                fixup(len(self.paths), values)

            self._values = self._validateAll(values)
        except Exception as ex:
            raise Exception("Error loading configuration from: %s" %
                            ', '.join(self.paths)) from ex

        logger.debug("Caching configuration...")
        self._values['__cache_key'] = cache_key
        config_text = json.dumps(self._values)
        self.cache.write('config.json', config_text)

        self._values['__cache_valid'] = False
Пример #5
0
    def runtest(self):
        fs = mock_fs()

        # Website config.
        config = {
                'site': {
                    'default_format': 'none',
                    'default_page_layout': 'none',
                    'default_post_layout': 'none'}
                }
        test_config = self.spec.get('config')
        if test_config is not None:
            merge_dicts(config, test_config)
        fs.withConfig(config)

        # Input file-system.
        input_files = self.spec.get('in')
        if input_files is not None:
            _add_mock_files(fs, '/kitchen', input_files)

        # Output file-system.
        expected_output_files = self.spec.get('out')
        expected_partial_files = self.spec.get('outfiles')

        # Bake!
        from piecrust.baking.baker import Baker
        with mock_fs_scope(fs):
            out_dir = fs.path('kitchen/_counter')
            app = fs.getApp()
            baker = Baker(app, out_dir)
            baker.bake()

        if expected_output_files:
            actual = fs.getStructure('kitchen/_counter')
            error = _compare_dicts(expected_output_files, actual)
            if error:
                raise ExpectedBakeOutputError(error)

        if expected_partial_files:
            keys = list(sorted(expected_partial_files.keys()))
            for key in keys:
                try:
                    actual = fs.getFileEntry('kitchen/_counter/' +
                                             key.lstrip('/'))
                except Exception as e:
                    raise ExpectedBakeOutputError([
                        "Can't access output file %s: %s" % (key, e)])

                expected = expected_partial_files[key]
                # HACK because for some reason PyYAML adds a new line for those
                # and I have no idea why.
                actual = actual.rstrip('\n')
                expected = expected.rstrip('\n')
                cmpres = _compare_str(expected, actual, key)
                if cmpres:
                    raise ExpectedBakeOutputError(cmpres)
Пример #6
0
def _merge_route_configs(values, from_default):
    actual_routes = values.get('site', {}).get('routes', [])
    default_routes = from_default.get('site', {}).get('routes', [])
    for dr in list(default_routes):  # copy because we'll trim it as we go.
        ar = next((i for i in actual_routes
                   if i.get('source') == dr['source']),
                  None)
        if ar is not None:
            merge_dicts(ar, dr)
            default_routes.remove(dr)
Пример #7
0
    def _processThemeLayer(self, theme_values, values):
        # Generate the default theme model.
        gen_default_theme_model = bool(try_get_dict_values(
            (theme_values, 'site/use_default_theme_content'),
            default=True))
        if gen_default_theme_model:
            logger.debug("Generating default theme content model...")
            cc = copy.deepcopy(default_theme_content_model_base)
            merge_dicts(values, cc)

        # Merge the theme config into the result config.
        merge_dicts(values, theme_values)
Пример #8
0
    def _processThemeLayer(self, theme_values, values):
        # Generate the default theme model.
        gen_default_theme_model = bool(
            try_get_dict_values(
                (theme_values, 'site/use_default_theme_content'),
                default=True))
        if gen_default_theme_model:
            logger.debug("Generating default theme content model...")
            cc = copy.deepcopy(default_theme_content_model_base)
            merge_dicts(values, cc)

        # Merge the theme config into the result config.
        merge_dicts(values, theme_values)
Пример #9
0
    def _processSiteLayer(self, site_values, values):
        # Default site content.
        gen_default_site_model = bool(
            try_get_dict_values((site_values, 'site/use_default_content'),
                                (values, 'site/use_default_content'),
                                default=True))
        if gen_default_site_model:
            logger.debug("Generating default content model...")
            cc = copy.deepcopy(default_content_model_base)
            merge_dicts(values, cc)

            dcm = get_default_content_model(site_values, values)
            merge_dicts(values, dcm)

            blogsc = try_get_dict_values((site_values, 'site/blogs'),
                                         (values, 'site/blogs'))
            if blogsc is None:
                blogsc = ['posts']
                set_dict_value(site_values, 'site/blogs', blogsc)

            is_only_blog = (len(blogsc) == 1)
            for blog_name in reversed(blogsc):
                blog_cfg = get_default_content_model_for_blog(
                    blog_name,
                    is_only_blog,
                    site_values,
                    values,
                    theme_site=self.theme_config)
                merge_dicts(values, blog_cfg)

        # Merge the site config into the result config.
        merge_dicts(values, site_values)
Пример #10
0
    def _processSiteLayer(self, site_values, values):
        # Default site content.
        gen_default_site_model = bool(try_get_dict_values(
            (site_values, 'site/use_default_content'),
            (values, 'site/use_default_content'),
            default=True))
        if gen_default_site_model:
            logger.debug("Generating default content model...")
            cc = copy.deepcopy(default_content_model_base)
            merge_dicts(values, cc)

            dcm = get_default_content_model(site_values, values)
            merge_dicts(values, dcm)

            blogsc = try_get_dict_values(
                (site_values, 'site/blogs'),
                (values, 'site/blogs'))
            if blogsc is None:
                blogsc = ['posts']
                set_dict_value(site_values, 'site/blogs', blogsc)

            is_only_blog = (len(blogsc) == 1)
            for blog_name in reversed(blogsc):
                blog_cfg = get_default_content_model_for_blog(
                    blog_name, is_only_blog, site_values, values,
                    theme_site=self.theme_config)
                merge_dicts(values, blog_cfg)

            for route in dcm['site']['routes']:
                values

        # Merge the site config into the result config.
        _merge_route_configs(values, site_values)
        merge_dicts(values, site_values)
Пример #11
0
    def scm(self):
        if self._scm is None:
            cfg = copy.deepcopy(self._global_config.get('scm', {}))
            merge_dicts(cfg, self.piecrust_app.config.get('scm', {}))

            if os.path.isdir(os.path.join(self.root_dir, '.hg')):
                from .scm.mercurial import MercurialSourceControl
                self._scm = MercurialSourceControl(self.root_dir, cfg)
            elif os.path.isdir(os.path.join(self.root_dir, '.git')):
                from .scm.git import GitSourceControl
                self._scm = GitSourceControl(self.root_dir, cfg)
            else:
                self._scm = False

        return self._scm
Пример #12
0
    def _load(self):
        if self.paths is None:
            self._values = self._validateAll({})
            return

        path_times = [os.path.getmtime(p) for p in self.paths]
        cache_key_hash = hashlib.md5(
            ("version=%s&cache=%d" %
             (APP_VERSION, CACHE_VERSION)).encode('utf8'))
        for p in self.paths:
            cache_key_hash.update(("&path=%s" % p).encode('utf8'))
        cache_key = cache_key_hash.hexdigest()

        if self.cache.isValid('config.json', path_times):
            logger.debug("Loading configuration from cache...")
            config_text = self.cache.read('config.json')
            self._values = json.loads(
                config_text, object_pairs_hook=collections.OrderedDict)

            actual_cache_key = self._values.get('__cache_key')
            if actual_cache_key == cache_key:
                self._values['__cache_valid'] = True
                return
            logger.debug("Outdated cache key '%s' (expected '%s')." %
                         (actual_cache_key, cache_key))

        values = {}
        logger.debug("Loading configuration from: %s" % self.paths)
        for i, p in enumerate(self.paths):
            with codecs.open(p, 'r', 'utf-8') as fp:
                loaded_values = yaml.load(fp.read(),
                                          Loader=ConfigurationLoader)
            if loaded_values is None:
                loaded_values = {}
            for fixup in self.fixups:
                fixup(i, loaded_values)
            merge_dicts(values, loaded_values)

        for fixup in self.fixups:
            fixup(len(self.paths), values)

        self._values = self._validateAll(values)

        logger.debug("Caching configuration...")
        self._values['__cache_key'] = cache_key
        config_text = json.dumps(self._values)
        self.cache.write('config.json', config_text)
        self._values['__cache_valid'] = False
Пример #13
0
    def _validateAll(self, values):
        if values is None:
            values = {}

        values = merge_dicts(copy.deepcopy(default_configuration), values)

        return values
Пример #14
0
    def _generateDefaultContentModel(self, values):
        dcmcopy = copy.deepcopy(default_content_model_base)
        values = merge_dicts(dcmcopy, values)

        blogsc = values['site'].get('blogs')
        if blogsc is None:
            blogsc = ['posts']
            values['site']['blogs'] = blogsc

        is_only_blog = (len(blogsc) == 1)
        for blog_name in blogsc:
            blog_cfg = get_default_content_model_for_blog(
                    blog_name, is_only_blog, values)
            values = merge_dicts(blog_cfg, values)

        dcm = get_default_content_model(values)
        values = merge_dicts(dcm, values)

        return values
Пример #15
0
    def _prepareMockFs(self):
        fs = mock_fs()

        if self.spec.get('no_kitchen', False):
            fs.withDir('/')
            return fs

        # Suppress any formatting or layout so we can compare
        # much simpler strings.
        config = {
            'site': {
                'default_format': 'none',
                'default_page_layout': 'none',
                'default_post_layout': 'none'
            }
        }

        # Website or theme config.
        test_theme_config = self.spec.get('theme_config')
        if test_theme_config is not None:
            merge_dicts(config, test_theme_config)
            fs.withThemeConfig(config)
        else:
            test_config = self.spec.get('config')
            if test_config is not None:
                merge_dicts(config, test_config)
            fs.withConfig(config)

        # Input file-system.
        input_files = self.spec.get('in')
        if input_files is not None:
            _add_mock_files(fs, '/kitchen', input_files)

        if self.mock_debug:
            res = '\nMock File-System:\n'
            res += 'At: %s\n' % fs.path('')
            res += '\n'.join(print_fs_tree(fs.path('')))
            res += '\n'
            print(res)

        return fs
Пример #16
0
def build_page_data(ctx):
    app = ctx.app
    page = ctx.page
    first_uri, _ = split_sub_uri(app, ctx.uri)

    pc_data = PieCrustData()
    pgn_source = ctx.pagination_source or get_default_pagination_source(page)
    paginator = Paginator(page,
                          pgn_source,
                          page_num=ctx.page_num,
                          pgn_filter=ctx.pagination_filter)
    assetor = Assetor(page, first_uri)
    linker = PageLinkerData(page.source, page.rel_path)
    data = {
        'piecrust': pc_data,
        'page': {},
        'assets': assetor,
        'pagination': paginator,
        'family': linker
    }
    page_data = data['page']
    page_data.update(copy.deepcopy(page.source_metadata))
    page_data.update(page.config.getDeepcopy(app.debug))
    page_data['url'] = ctx.uri
    page_data['timestamp'] = time.mktime(page.datetime.timetuple())
    date_format = app.config.get('site/date_format')
    if date_format:
        page_data['date'] = page.datetime.strftime(date_format)

    #TODO: handle slugified taxonomy terms.

    site_data = build_site_data(page)
    merge_dicts(data, site_data)

    # Do this at the end because we want all the data to be ready to be
    # displayed in the debugger window.
    if (app.config.get('site/show_debug_info')
            and not app.config.get('baker/is_baking')):
        pc_data._enableDebugInfo(page, data)

    return data
Пример #17
0
    def _prepareMockFs(self):
        fs = mock_fs()

        # Website config.
        config = {
                'site': {
                    'default_format': 'none',
                    'default_page_layout': 'none',
                    'default_post_layout': 'none'}
                }
        test_config = self.spec.get('config')
        if test_config is not None:
            merge_dicts(config, test_config)
        fs.withConfig(config)

        # Input file-system.
        input_files = self.spec.get('in')
        if input_files is not None:
            _add_mock_files(fs, '/kitchen', input_files)

        return fs
Пример #18
0
    def _prepareMockFs(self):
        fs = mock_fs()

        if self.spec.get('no_kitchen', False):
            fs.withDir('/')
            return fs

        # Suppress any formatting or layout so we can compare
        # much simpler strings.
        config = {
            'site': {
                'default_format': 'none',
                'default_page_layout': 'none',
                'default_post_layout': 'none'}
        }

        # Website or theme config.
        test_theme_config = self.spec.get('theme_config')
        if test_theme_config is not None:
            merge_dicts(config, test_theme_config)
            fs.withThemeConfig(config)
        else:
            test_config = self.spec.get('config')
            if test_config is not None:
                merge_dicts(config, test_config)
            fs.withConfig(config)

        # Input file-system.
        input_files = self.spec.get('in')
        if input_files is not None:
            _add_mock_files(fs, '/kitchen', input_files)

        if self.mock_debug:
            res = '\nMock File-System:\n'
            res += 'At: %s\n' % fs.path('')
            res += '\n'.join(print_fs_tree(fs.path('')))
            res += '\n'
            print(res)

        return fs
Пример #19
0
    def _prepareMockFs(self):
        fs = mock_fs()

        # Website config.
        config = {
            'site': {
                'default_format': 'none',
                'default_page_layout': 'none',
                'default_post_layout': 'none'
            }
        }
        test_config = self.spec.get('config')
        if test_config is not None:
            merge_dicts(config, test_config)
        fs.withConfig(config)

        # Input file-system.
        input_files = self.spec.get('in')
        if input_files is not None:
            _add_mock_files(fs, '/kitchen', input_files)

        return fs
Пример #20
0
    def _combineConfigs(self, theme_values, site_values):
        # Start with the default configuration.
        values = copy.deepcopy(default_configuration)

        if not self.theme_config:
            # If the theme config wants the default model, add it.
            theme_sitec = theme_values.setdefault(
                    'site', collections.OrderedDict())
            gen_default_theme_model = bool(theme_sitec.setdefault(
                    'use_default_theme_content', True))
            if gen_default_theme_model:
                self._generateDefaultThemeModel(values)

            # Now override with the actual theme config values.
            values = merge_dicts(values, theme_values)

        # Make all sources belong to the "theme" realm at this point.
        srcc = values['site'].get('sources')
        if srcc:
            for sn, sc in srcc.items():
                sc['realm'] = REALM_THEME

        # If the site config wants the default model, add it.
        site_sitec = site_values.setdefault(
                'site', collections.OrderedDict())
        gen_default_site_model = bool(site_sitec.setdefault(
                'use_default_content', True))
        if gen_default_site_model:
            self._generateDefaultSiteModel(values, site_values)

        # And override with the actual site config values.
        values = merge_dicts(values, site_values)

        # Set the theme site flag.
        if self.theme_config:
            values['site']['theme_site'] = True

        return values
Пример #21
0
    def _validateAll(self, values):
        if values is None:
            values = {}

        # Add the loaded values to the default configuration.
        values = merge_dicts(copy.deepcopy(default_configuration), values)

        # Figure out if we need to generate the configuration for the
        # default content model.
        sitec = values.setdefault('site', {})
        if (
                ('sources' not in sitec and
                 'routes' not in sitec and
                 'taxonomies' not in sitec) or
                sitec.get('use_default_content')):
            logger.debug("Generating default content model...")
            values = self._generateDefaultContentModel(values)

        # Add a section for our cached information.
        cachec = collections.OrderedDict()
        values['__cache'] = cachec
        cache_writer = _ConfigCacheWriter(cachec)
        globs = globals()

        def _visitor(path, val, parent_val, parent_key):
            callback_name = '_validate_' + path.replace('/', '_')
            callback = globs.get(callback_name)
            if callback:
                try:
                    val2 = callback(val, values, cache_writer)
                except Exception as ex:
                    raise Exception("Error raised in validator '%s'." %
                                    callback_name) from ex
                if val2 is None:
                    raise Exception("Validator '%s' isn't returning a "
                                    "coerced value." % callback_name)
                parent_val[parent_key] = val2

        visit_dict(values, _visitor)

        return values
Пример #22
0
    def _generateDefaultSiteModel(self, values, user_overrides):
        logger.debug("Generating default content model...")
        cc = copy.deepcopy(default_content_model_base)
        merge_dicts(values, cc)

        dcm = get_default_content_model(values, user_overrides)
        merge_dicts(values, dcm)

        blogsc = try_get_dict_value(user_overrides, 'site/blogs')
        if blogsc is None:
            blogsc = ['posts']
            set_dict_value(user_overrides, 'site/blogs', blogsc)

        is_only_blog = (len(blogsc) == 1)
        for blog_name in blogsc:
            blog_cfg = get_default_content_model_for_blog(
                    blog_name, is_only_blog, values, user_overrides,
                    theme_site=self.theme_config)
            merge_dicts(values, blog_cfg)
Пример #23
0
    def _load(self):
        # Figure out where to load this configuration from.
        paths = []
        if self._theme_path:
            paths.append(self._theme_path)
        if self._path:
            paths.append(self._path)
        paths += self._custom_paths

        # Build the cache-key.
        path_times = [os.path.getmtime(p) for p in paths]
        cache_key_hash = hashlib.md5(
            ("version=%s&cache=%d" % (
                APP_VERSION, CACHE_VERSION)).encode('utf8'))
        for p in paths:
            cache_key_hash.update(("&path=%s" % p).encode('utf8'))
        if self._cache_hash_mod:
            cache_key_hash.update(self._cache_hash_mod.encode('utf8'))
        cache_key = cache_key_hash.hexdigest()

        # Check the cache for a valid version.
        if path_times and self._cache.isValid('config.json', path_times):
            logger.debug("Loading configuration from cache...")
            config_text = self._cache.read('config.json')
            self._values = json.loads(
                config_text,
                object_pairs_hook=collections.OrderedDict)

            actual_cache_key = self._values.get('__cache_key')
            if actual_cache_key == cache_key:
                # The cached version has the same key! Awesome!
                self._values['__cache_valid'] = True
                return
            logger.debug("Outdated cache key '%s' (expected '%s')." % (
                actual_cache_key, cache_key))

        # Nope, load from the paths.
        try:
            # Theme values.
            theme_values = None
            if self._theme_path:
                logger.debug("Loading theme layer from: %s" % self._theme_path)
                theme_values = self._loadFrom(self._theme_path)

            # Site and variant values.
            site_paths = []
            if self._path:
                site_paths.append(self._path)
            site_paths += self._custom_paths

            site_values = {}
            for path in site_paths:
                logger.debug("Loading config layer from: %s" % path)
                cur_values = self._loadFrom(path)
                merge_dicts(site_values, cur_values)

            # Do it!
            values = self._processConfigs(theme_values, site_values)
            self._values = self._validateAll(values)
        except Exception as ex:
            logger.exception(ex)
            raise Exception(
                "Error loading configuration from: %s" %
                ', '.join(paths)) from ex

        logger.debug("Caching configuration...")
        self._values['__cache_key'] = cache_key
        config_text = json.dumps(self._values)
        self._cache.write('config.json', config_text)

        self._values['__cache_valid'] = False
Пример #24
0
 def _generateDefaultThemeModel(self, values):
     logger.debug("Generating default theme content model...")
     cc = copy.deepcopy(default_theme_content_model_base)
     merge_dicts(values, cc)
Пример #25
0
    def _load(self):
        # Figure out where to load this configuration from.
        paths = []
        if self._theme_path:
            paths.append(self._theme_path)
        if self._path:
            paths.append(self._path)
        paths += self._custom_paths

        # Build the cache-key.
        path_times = [os.path.getmtime(p) for p in paths]
        cache_key_hash = hashlib.md5(
            ("version=%s&cache=%d" %
             (APP_VERSION, CACHE_VERSION)).encode('utf8'))
        for p in paths:
            cache_key_hash.update(("&path=%s" % p).encode('utf8'))
        if self._cache_hash_mod:
            cache_key_hash.update(self._cache_hash_mod.encode('utf8'))
        cache_key = cache_key_hash.hexdigest()

        # Check the cache for a valid version.
        if path_times and self._cache.isValid('config.json', path_times):
            logger.debug("Loading configuration from cache...")
            config_text = self._cache.read('config.json')
            self._values = json.loads(
                config_text, object_pairs_hook=collections.OrderedDict)

            actual_cache_key = self._values.get('__cache_key')
            if actual_cache_key == cache_key:
                # The cached version has the same key! Awesome!
                self._values['__cache_valid'] = True
                return
            logger.debug("Outdated cache key '%s' (expected '%s')." %
                         (actual_cache_key, cache_key))

        # Nope, load from the paths.
        try:
            # Theme values.
            theme_values = None
            if self._theme_path:
                logger.debug("Loading theme layer from: %s" % self._theme_path)
                theme_values = self._loadFrom(self._theme_path)

            # Site and variant values.
            site_paths = []
            if self._path:
                site_paths.append(self._path)
            site_paths += self._custom_paths

            site_values = {}
            for path in site_paths:
                logger.debug("Loading config layer from: %s" % path)
                cur_values = self._loadFrom(path)
                merge_dicts(site_values, cur_values)

            # Do it!
            values = self._processConfigs(theme_values, site_values)
            self._values = self._validateAll(values)
        except Exception as ex:
            logger.exception(ex)
            raise Exception("Error loading configuration from: %s" %
                            ', '.join(paths)) from ex

        logger.debug("Caching configuration...")
        self._values['__cache_key'] = cache_key
        config_text = json.dumps(self._values)
        self._cache.write('config.json', config_text)

        self._values['__cache_valid'] = False
Пример #26
0
    def _load(self):
        # Figure out where to load this configuration from.
        paths = [self._theme_path, self._path] + self._custom_paths
        paths = list(filter(lambda i: i is not None, paths))

        # Build the cache-key.
        path_times = [os.path.getmtime(p) for p in paths]
        cache_key_hash = hashlib.md5(
                ("version=%s&cache=%d" % (
                    APP_VERSION, CACHE_VERSION)).encode('utf8'))
        for p in paths:
            cache_key_hash.update(("&path=%s" % p).encode('utf8'))
        cache_key = cache_key_hash.hexdigest()

        # Check the cache for a valid version.
        if self._cache.isValid('config.json', path_times):
            logger.debug("Loading configuration from cache...")
            config_text = self._cache.read('config.json')
            self._values = json.loads(
                    config_text,
                    object_pairs_hook=collections.OrderedDict)

            actual_cache_key = self._values.get('__cache_key')
            if actual_cache_key == cache_key:
                # The cached version has the same key! Awesome!
                self._values['__cache_valid'] = True
                return
            logger.debug("Outdated cache key '%s' (expected '%s')." % (
                    actual_cache_key, cache_key))

        # Nope, load from the paths.
        try:
            # Theme config.
            theme_values = {}
            if self._theme_path:
                theme_values = self._loadFrom(self._theme_path)

            # Site config.
            site_values = {}
            if self._path:
                site_values = self._loadFrom(self._path)

            # Combine!
            logger.debug("Processing loaded configurations...")
            values = self._combineConfigs(theme_values, site_values)

            # Load additional paths.
            if self._custom_paths:
                logger.debug("Loading %d additional configuration paths." %
                             len(self._custom_paths))
                for p in self._custom_paths:
                    loaded = self._loadFrom(p)
                    if loaded:
                        merge_dicts(values, loaded)

            # Run final fixups
            if self._post_fixups:
                logger.debug("Applying %d configuration fixups." %
                             len(self._post_fixups))
                for f in self._post_fixups:
                    f(values)

            self._values = self._validateAll(values)
        except Exception as ex:
            logger.exception(ex)
            raise Exception(
                    "Error loading configuration from: %s" %
                    ', '.join(paths)) from ex

        logger.debug("Caching configuration...")
        self._values['__cache_key'] = cache_key
        config_text = json.dumps(self._values)
        self._cache.write('config.json', config_text)

        self._values['__cache_valid'] = False
Пример #27
0
def _create_hentry(data):
    name = _mf2get(data, 'name')
    summary = _mf2get(data, 'summary')
    location = _mf2get(data, 'location')
    reply_to = _mf2get(data, 'in-reply-to')
    status = _mf2get(data, 'post-status')
    # pubdate = _mf2get(data, 'published') or 'now'

    categories = data.get('category')

    # Get the content.
    post_format = None
    content = _mf2get(data, 'content')
    if isinstance(content, dict):
        content = content.get('html')
        post_format = 'none'
    if not content:
        logger.error("No content specified!")
        logger.error(data)
        abort(400)

    # Clean-up stuff.
    # TODO: setting to conserve Windows-type line endings?
    content = content.replace('\r\n', '\n')
    if summary:
        summary = summary.replace('\r\n', '\n')

    # Get the slug.
    slug = _mf2get(data, 'slug') or _mf2get(data, 'mp-slug')
    now = datetime.datetime.now()
    if not slug:
        slug = '%02d%02d%02d' % (now.hour, now.minute, now.second)

    # Create the post in the correct content source.
    # Note that this won't actually write anything to disk yet, we're
    # just creating it in memory.
    pcapp = g.site.piecrust_app
    source_name = pcapp.config.get('micropub/source', 'posts')
    source = pcapp.getSource(source_name)

    metadata = {
        'date': now,
        'slug': slug
    }
    logger.debug("Creating item with metadata: %s" % metadata)
    content_item = source.createContent(metadata)
    if content_item is None:
        logger.error("Can't create item for: %s" % metadata)
        abort(500)

    paths_to_commit = []

    # Get the media to attach to the post.
    photos = None
    if 'photo' in request.files:
        photos = [request.files['photo']]
    elif 'photo[]' in request.files:
        photos = request.files.getlist('photo[]')
    photo_urls = data.get('photo')

    # Create the assets folder if we have anything to put there.
    # TODO: add proper APIs for creating related assets.
    if photo_urls or photos:
        photo_dir, _ = os.path.splitext(content_item.spec)
        photo_dir += '-assets'
        try:
            os.makedirs(photo_dir, mode=0o775, exist_ok=True)
        except OSError:
            # An `OSError` can still be raised in older versions of Python
            # if the permissions don't match an existing folder.
            # Let's ignore it.
            pass

    # Photo URLs come from files uploaded via the media endpoint...
    # They're waiting for us in the upload cache folder, so let's
    # move them to the post's assets folder.
    photo_names = []
    if photo_urls:
        photo_cache_dir = os.path.join(
            g.site.root_dir,
            CACHE_DIR, g.site.piecrust_factory.cache_key,
            'uploads')

        p_thumb_size = pcapp.config.get('micropub/resize_photos', 800)

        for p_url in photo_urls:
            _, __, p_fn = p_url.rpartition('/')
            p_cache_path = os.path.join(photo_cache_dir, p_fn)
            p_asset_path = os.path.join(photo_dir, p_fn)
            logger.info("Moving upload '%s' to '%s'." %
                        (p_cache_path, p_asset_path))
            try:
                os.rename(p_cache_path, p_asset_path)
                paths_to_commit.append(p_asset_path)
            except OSError:
                logger.error("Can't move '%s' to '%s'." %
                             (p_cache_path, p_asset_path))
                raise

            p_fn_no_ext, _ = os.path.splitext(p_fn)
            if p_thumb_size > 0:
                from PIL import Image
                im = Image.open(p_asset_path)
                im.thumbnail((p_thumb_size, p_thumb_size))
                p_thumb_path = os.path.join(photo_dir,
                                            '%s_thumb.jpg' % p_fn_no_ext)
                im.save(p_thumb_path)
                paths_to_commit.append(p_thumb_path)

                p_thumb_no_ext = '%s_thumb' % p_fn_no_ext
                photo_names.append((p_thumb_no_ext, p_fn_no_ext))
            else:
                photo_names.append((p_fn_no_ext, None))

    # There could also be some files uploaded along with the post
    # so upload them right now.
    if photos:
        for photo in photos:
            if not photo or not photo.filename:
                logger.warning("Got empty photo in request files... skipping.")
                continue

            fn = secure_filename(photo.filename)
            fn = re_unsafe_asset_char.sub('_', fn)
            photo_path = os.path.join(photo_dir, fn)
            logger.info("Uploading file to: %s" % photo_path)
            photo.save(photo_path)
            paths_to_commit.append(photo_path)

            # TODO: generate thumbnail.

            fn_no_ext, _ = os.path.splitext(fn)
            photo_names.append((fn_no_ext, None))

    # Build the config.
    do_publish = True
    post_config = {}
    if name:
        post_config['title'] = name
    if categories:
        post_config['tags'] = categories
    if location:
        post_config['location'] = location
    if reply_to:
        post_config['reply_to'] = reply_to
    if status and status != 'published':
        post_config['draft'] = True
        do_publish = False
    if post_format:
        post_config['format'] = post_format
    post_config['time'] = '%02d:%02d:%02d' % (now.hour, now.minute, now.second)

    # If there's no title, this is a "microblogging" post.
    if not name:
        micro_config = pcapp.config.get('micropub/microblogging')
        if micro_config:
            merge_dicts(post_config, micro_config)

    logger.debug("Writing to item: %s" % content_item.spec)
    paths_to_commit.append(content_item.spec)
    with source.openItem(content_item, mode='w', encoding='utf8') as fp:
        fp.write('---\n')
        yaml.dump(post_config, fp,
                  default_flow_style=False,
                  allow_unicode=True)
        fp.write('---\n')

        if summary:
            fp.write(summary)
            fp.write('\n')
            fp.write('<!--break-->\n\n')
        fp.write(content)

        if photo_names:
            fp.write('\n\n')
            for pthumb, pfull in photo_names:
                if pfull:
                    fp.write('<a href="{{assets["%s"]}}">'
                             '<img src="{{assets["%s"]}}" alt="%s"/>'
                             '</a>\n\n' %
                             (pfull, pthumb, pthumb))
                else:
                    fp.write('<img src="{{assets["%s"]}}" alt="%s"/>\n\n' %
                             (pthumb, pthumb))

        if os.supports_fd:
            import stat
            try:
                os.chmod(
                    fp.fileno(),
                    stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP)
            except OSError:
                pass

    autocommit = pcapp.config.get('micropub/autocommit', False)
    if autocommit:
        scm = g.site.scm
        if scm:
            commit_msg = None
            if isinstance(autocommit, dict):
                commit_msg = autocommit.get('message')
            if not commit_msg:
                post_title = post_config.get('title')
                if post_title:
                    commit_msg = "New post: %s" % post_title
                else:
                    commit_msg = "New post"
            logger.debug("Commit files: %s" % paths_to_commit)
            scm.commit(paths_to_commit, commit_msg)

    return source_name, content_item, do_publish
Пример #28
0
def test_merge_dicts(local, incoming, expected):
    local2 = copy.deepcopy(local)
    merge_dicts(local2, incoming)
    assert local2 == expected
Пример #29
0
def test_merge_dicts(local, incoming, expected):
    local2 = copy.deepcopy(local)
    merge_dicts(local2, incoming)
    assert local2 == expected