コード例 #1
0
ファイル: worker.py プロジェクト: sinistersnare/PieCrust2
    def initialize(self):
        # Create the app local to this worker.
        app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug)
        app._useSubCacheDir(self.ctx.sub_cache_dir)
        app.config.set('baker/is_baking', True)
        app.env.base_asset_url_format = '%uri%'
        app.env.fs_cache_only_for_main_page = True
        app.env.registerTimer("BakeWorker_%d_Total" % self.wid)
        app.env.registerTimer("BakeWorkerInit")
        app.env.registerTimer("JobReceive")
        apply_variant_and_values(app, self.ctx.config_variant,
                                 self.ctx.config_values)
        self.ctx.app = app

        # Load previous record
        if self.ctx.previous_record_path:
            self.ctx.previous_record = BakeRecord.load(
                    self.ctx.previous_record_path)
            self.ctx.previous_record_index = {}
            for e in self.ctx.previous_record.entries:
                key = _get_transition_key(e.path, e.taxonomy_info)
                self.ctx.previous_record_index[key] = e

        # Create the job handlers.
        job_handlers = {
                JOB_LOAD: LoadJobHandler(self.ctx),
                JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx),
                JOB_BAKE: BakeJobHandler(self.ctx)}
        for jt, jh in job_handlers.items():
            app.env.registerTimer(type(jh).__name__)
        self.job_handlers = job_handlers

        app.env.stepTimerSince("BakeWorkerInit", self.work_start_time)
コード例 #2
0
    def initialize(self):
        # Create the app local to this worker.
        app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug)
        app._useSubCacheDir(self.ctx.sub_cache_dir)
        app.config.set('baker/is_baking', True)
        app.config.set('baker/worker_id', self.wid)
        app.env.base_asset_url_format = '%uri%'
        app.env.fs_cache_only_for_main_page = True
        app.env.registerTimer("BakeWorker_%d_Total" % self.wid)
        app.env.registerTimer("BakeWorkerInit")
        app.env.registerTimer("JobReceive")
        apply_variant_and_values(app, self.ctx.config_variant,
                                 self.ctx.config_values)
        self.ctx.app = app

        # Load previous record
        if self.ctx.previous_record_path:
            self.ctx.previous_record = BakeRecord.load(
                    self.ctx.previous_record_path)
            self.ctx.previous_record_index = {}
            for e in self.ctx.previous_record.entries:
                key = _get_transition_key(e.path, e.taxonomy_info)
                self.ctx.previous_record_index[key] = e

        # Create the job handlers.
        job_handlers = {
                JOB_LOAD: LoadJobHandler(self.ctx),
                JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx),
                JOB_BAKE: BakeJobHandler(self.ctx)}
        for jt, jh in job_handlers.items():
            app.env.registerTimer(type(jh).__name__)
        self.job_handlers = job_handlers

        app.env.stepTimerSince("BakeWorkerInit", self.work_start_time)
コード例 #3
0
ファイル: worker.py プロジェクト: germanschnyder/PieCrust2
    def initialize(self):
        # Create the app local to this worker.
        app = self.ctx.appfactory.create()
        app.config.set('baker/is_baking', True)
        app.config.set('baker/worker_id', self.wid)
        app.env.base_asset_url_format = '%uri%'
        app.env.fs_cache_only_for_main_page = True
        app.env.registerTimer("BakeWorker_%d_Total" % self.wid)
        app.env.registerTimer("BakeWorkerInit")
        app.env.registerTimer("JobReceive")
        app.env.registerCounter("SourceUseAbortions")
        app.env.registerManifest("LoadJobs")
        app.env.registerManifest("RenderJobs")
        app.env.registerManifest("BakeJobs")
        self.ctx.app = app

        # Load previous record
        if self.ctx.previous_record_path:
            self.ctx.previous_record = BakeRecord.load(
                    self.ctx.previous_record_path)
            self.ctx.previous_record_index = {}
            for e in self.ctx.previous_record.entries:
                key = _get_transition_key(e.path, e.extra_key)
                self.ctx.previous_record_index[key] = e

        # Create the job handlers.
        job_handlers = {
                JOB_LOAD: LoadJobHandler(self.ctx),
                JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx),
                JOB_BAKE: BakeJobHandler(self.ctx)}
        for jt, jh in job_handlers.items():
            app.env.registerTimer(type(jh).__name__)
        self.job_handlers = job_handlers

        app.env.stepTimerSince("BakeWorkerInit", self.work_start_time)
コード例 #4
0
ファイル: baking.py プロジェクト: germanschnyder/PieCrust2
    def _getBakeRecord(self, ctx, record_name):
        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            logger.warning(
                    "No page bake record has been created for this output "
                    "path.")
            return None

        record = BakeRecord.load(record_cache.getCachePath(record_name))
        return record
コード例 #5
0
ファイル: baking.py プロジェクト: thhgcn/PieCrust2
    def _showBakeRecord(self, ctx, record_name, pattern, out_pattern):
        # Show the bake record.
        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            raise Exception("No record has been created for this output path. "
                            "Did you bake there yet?")

        record = BakeRecord.load(record_cache.getCachePath(record_name))
        logging.info("Bake record for: %s" % record.out_dir)
        logging.info("From: %s" % record_name)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.bake_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.path, pattern):
                continue
            if out_pattern and not (
                    any([o for o in entry.out_paths
                         if fnmatch.fnmatch(o, out_pattern)])):
                continue

            flags = _get_flag_descriptions(
                    entry.flags,
                    {
                        BakeRecordEntry.FLAG_NEW: 'new',
                        BakeRecordEntry.FLAG_SOURCE_MODIFIED: 'modified',
                        BakeRecordEntry.FLAG_OVERRIDEN: 'overriden'})

            logging.info(" - ")

            rel_path = os.path.relpath(entry.path, ctx.app.root_dir)
            logging.info("   path:      %s" % rel_path)
            logging.info("   source:    %s" % entry.source_name)
            if entry.taxonomy_info:
                ti = entry.taxonomy_info
                logging.info("   taxonomy:  %s = %s (in %s)" %
                             (ti.taxonomy_name, ti.term, ti.source_name))
            else:
                logging.info("   taxonomy:  <none>")
            logging.info("   flags:     %s" % _join(flags))
            logging.info("   config:    %s" % entry.config)

            if entry.errors:
                logging.error("   errors: %s" % entry.errors)

            logging.info("   %d sub-pages:" % len(entry.subs))
            for sub in entry.subs:
                sub_flags = _get_flag_descriptions(
                        sub.flags,
                        {
                            SubPageBakeInfo.FLAG_BAKED: 'baked',
                            SubPageBakeInfo.FLAG_FORCED_BY_SOURCE:
                                'forced by source',
                            SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS:
                                'forced by missing previous record entry',
                            SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS:
                                'forced by previous errors',
                            SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED:
                                'formatting invalidated'})

                logging.info("   - ")
                logging.info("     URL:    %s" % sub.out_uri)
                logging.info("     path:   %s" % os.path.relpath(
                        sub.out_path, record.out_dir))
                logging.info("     flags:  %s" % _join(sub_flags))

                if sub.render_info:
                    pass_names = {
                            PASS_FORMATTING: 'formatting pass',
                            PASS_RENDERING: 'rendering pass'}
                    for p, ri in sub.render_info.items():
                        logging.info("     - %s" % pass_names[p])
                        logging.info("       used sources:  %s" %
                                     _join(ri.used_source_names))
                        pgn_info = 'no'
                        if ri.used_pagination:
                            pgn_info = 'yes'
                        if ri.pagination_has_more:
                            pgn_info += ', has more'
                        logging.info("       used pagination: %s", pgn_info)
                        logging.info("       used assets: %s",
                                     'yes' if ri.used_assets else 'no')
                        logging.info("       used terms: %s" %
                                     _join(
                                            ['%s=%s (%s)' % (tn, t, sn)
                                             for sn, tn, t in
                                             ri.used_taxonomy_terms]))
                else:
                    logging.info("     no render info")

                if sub.errors:
                    logging.error("   errors: %s" % sub.errors)
コード例 #6
0
ファイル: baking.py プロジェクト: kinow/PieCrust2
    def _showBakeRecord(self, ctx, record_name, pattern, out_pattern):
        # Show the bake record.
        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            raise Exception("No record has been created for this output path. "
                            "Did you bake there yet?")

        record = BakeRecord.load(record_cache.getCachePath(record_name))
        logging.info("Bake record for: %s" % record.out_dir)
        logging.info("From: %s" % record_name)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.bake_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.path, pattern):
                continue
            if out_pattern and not (any([
                    o for o in entry.out_paths
                    if fnmatch.fnmatch(o, out_pattern)
            ])):
                continue

            flags = _get_flag_descriptions(
                entry.flags, {
                    BakeRecordEntry.FLAG_NEW: 'new',
                    BakeRecordEntry.FLAG_SOURCE_MODIFIED: 'modified',
                    BakeRecordEntry.FLAG_OVERRIDEN: 'overriden'
                })

            logging.info(" - ")

            rel_path = os.path.relpath(entry.path, ctx.app.root_dir)
            logging.info("   path:      %s" % rel_path)
            logging.info("   source:    %s" % entry.source_name)
            if entry.taxonomy_info:
                ti = entry.taxonomy_info
                logging.info("   taxonomy:  %s = %s (in %s)" %
                             (ti.taxonomy_name, ti.term, ti.source_name))
            else:
                logging.info("   taxonomy:  <none>")
            logging.info("   flags:     %s" % _join(flags))
            logging.info("   config:    %s" % entry.config)

            if entry.errors:
                logging.error("   errors: %s" % entry.errors)

            logging.info("   %d sub-pages:" % len(entry.subs))
            for sub in entry.subs:
                sub_flags = _get_flag_descriptions(
                    sub.flags, {
                        SubPageBakeInfo.FLAG_BAKED:
                        'baked',
                        SubPageBakeInfo.FLAG_FORCED_BY_SOURCE:
                        'forced by source',
                        SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS:
                        'forced by missing previous record entry',
                        SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS:
                        'forced by previous errors',
                        SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED:
                        'formatting invalidated'
                    })

                logging.info("   - ")
                logging.info("     URL:    %s" % sub.out_uri)
                logging.info("     path:   %s" %
                             os.path.relpath(sub.out_path, record.out_dir))
                logging.info("     flags:  %s" % _join(sub_flags))

                if sub.render_info:
                    pass_names = {
                        PASS_FORMATTING: 'formatting pass',
                        PASS_RENDERING: 'rendering pass'
                    }
                    for p, ri in sub.render_info.items():
                        logging.info("     - %s" % pass_names[p])
                        logging.info("       used sources:  %s" %
                                     _join(ri.used_source_names))
                        pgn_info = 'no'
                        if ri.used_pagination:
                            pgn_info = 'yes'
                        if ri.pagination_has_more:
                            pgn_info += ', has more'
                        logging.info("       used pagination: %s", pgn_info)
                        logging.info("       used assets: %s",
                                     'yes' if ri.used_assets else 'no')
                        logging.info("       used terms: %s" % _join([
                            '%s=%s (%s)' % (tn, t, sn)
                            for sn, tn, t in ri.used_taxonomy_terms
                        ]))
                else:
                    logging.info("     no render info")

                if sub.errors:
                    logging.error("   errors: %s" % sub.errors)
コード例 #7
0
ファイル: baking.py プロジェクト: qman1989/PieCrust2
    def run(self, ctx):
        out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter')
        record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest()
        suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last
        record_name = '%s%s.record' % (record_id, suffix)

        pattern = None
        if ctx.args.path:
            pattern = '*%s*' % ctx.args.path.strip('*')

        out_pattern = None
        if ctx.args.out:
            out_pattern = '*%s*' % ctx.args.out.strip('*')

        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            raise Exception("No record has been created for this output path. "
                            "Did you bake there yet?")

        # Show the bake record.
        record = BakeRecord.load(record_cache.getCachePath(record_name))
        logging.info("Bake record for: %s" % record.out_dir)
        logging.info("From: %s" % record_name)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.bake_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.rel_path, pattern):
                continue
            if out_pattern and not (
                    any([o for o in entry.out_paths
                         if fnmatch.fnmatch(o, out_pattern)])):
                continue

            flags = []
            if entry.flags & BakeRecordPageEntry.FLAG_OVERRIDEN:
                flags.append('overriden')

            passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'}

            logging.info(" - ")
            logging.info("   path:      %s" % entry.rel_path)
            logging.info("   spec:      %s:%s" % (entry.source_name,
                                                  entry.rel_path))
            if entry.taxonomy_info:
                tn, t, sn = entry.taxonomy_info
                logging.info("   taxonomy:  %s (%s:%s)" %
                             (t, sn, tn))
            else:
                logging.info("   taxonomy:  <none>")
            logging.info("   flags:     %s" % ', '.join(flags))
            logging.info("   config:    %s" % entry.config)

            logging.info("   %d sub-pages:" % len(entry.subs))
            for sub in entry.subs:
                logging.info("   - ")
                logging.info("     URL:    %s" % sub.out_uri)
                logging.info("     path:   %s" % os.path.relpath(sub.out_path,
                                                                 out_dir))
                logging.info("     baked?: %s" % sub.was_baked)

                sub_flags = []
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE:
                    sub_flags.append('forced by source')
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS:
                    sub_flags.append('forced by missing previous record entry')
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS:
                    sub_flags.append('forced by previous errors')
                logging.info("     flags:  %s" % ', '.join(sub_flags))

                for p, pi in sub.render_passes.items():
                    logging.info("     %s pass:"******"       used srcs:  %s" %
                                 ', '.join(pi.used_source_names))
                    logging.info("       used terms: %s" %
                                 ', '.join(
                                        ['%s (%s:%s)' % (t, sn, tn)
                                         for sn, tn, t in pi.used_taxonomy_terms]))

                if sub.errors:
                    logging.error("   errors: %s" % sub.errors)

            logging.info("   assets:    %s" % ', '.join(entry.assets))
            if entry.errors:
                logging.error("   errors: %s" % entry.errors)

        record_cache = ctx.app.cache.getCache('proc')
        if not record_cache.has(record_name):
            return

        # Show the pipeline record.
        record = ProcessorPipelineRecord.load(
                record_cache.getCachePath(record_name))
        logging.info("")
        logging.info("Processing record for: %s" % record.out_dir)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.process_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.rel_input, pattern):
                continue
            if out_pattern and not (
                    any([o for o in entry.rel_outputs
                         if fnmatch.fnmatch(o, out_pattern)])):
                continue

            flags = []
            if entry.flags & FLAG_PREPARED:
                flags.append('prepared')
            if entry.flags & FLAG_PROCESSED:
                flags.append('processed')
            if entry.flags & FLAG_OVERRIDEN:
                flags.append('overriden')
            if entry.flags & FLAG_BYPASSED_STRUCTURED_PROCESSING:
                flags.append('external')
            logger.info(" - ")
            logger.info("   path:      %s" % entry.rel_input)
            logger.info("   out paths: %s" % entry.rel_outputs)
            logger.info("   flags:     %s" % flags)
            logger.info("   proc tree: %s" % format_proc_tree(
                    entry.proc_tree, 14*' '))
            if entry.errors:
                logger.error("   errors: %s" % entry.errors)
コード例 #8
0
ファイル: baking.py プロジェクト: qman1989/PieCrust2
    def run(self, ctx):
        out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter')
        record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest()
        suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last
        record_name = '%s%s.record' % (record_id, suffix)

        pattern = None
        if ctx.args.path:
            pattern = '*%s*' % ctx.args.path.strip('*')

        out_pattern = None
        if ctx.args.out:
            out_pattern = '*%s*' % ctx.args.out.strip('*')

        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            raise Exception("No record has been created for this output path. "
                            "Did you bake there yet?")

        # Show the bake record.
        record = BakeRecord.load(record_cache.getCachePath(record_name))
        logging.info("Bake record for: %s" % record.out_dir)
        logging.info("From: %s" % record_name)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.bake_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.rel_path, pattern):
                continue
            if out_pattern and not (any([
                    o for o in entry.out_paths
                    if fnmatch.fnmatch(o, out_pattern)
            ])):
                continue

            flags = []
            if entry.flags & BakeRecordPageEntry.FLAG_OVERRIDEN:
                flags.append('overriden')

            passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'}

            logging.info(" - ")
            logging.info("   path:      %s" % entry.rel_path)
            logging.info("   spec:      %s:%s" %
                         (entry.source_name, entry.rel_path))
            if entry.taxonomy_info:
                tn, t, sn = entry.taxonomy_info
                logging.info("   taxonomy:  %s (%s:%s)" % (t, sn, tn))
            else:
                logging.info("   taxonomy:  <none>")
            logging.info("   flags:     %s" % ', '.join(flags))
            logging.info("   config:    %s" % entry.config)

            logging.info("   %d sub-pages:" % len(entry.subs))
            for sub in entry.subs:
                logging.info("   - ")
                logging.info("     URL:    %s" % sub.out_uri)
                logging.info("     path:   %s" %
                             os.path.relpath(sub.out_path, out_dir))
                logging.info("     baked?: %s" % sub.was_baked)

                sub_flags = []
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE:
                    sub_flags.append('forced by source')
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS:
                    sub_flags.append('forced by missing previous record entry')
                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS:
                    sub_flags.append('forced by previous errors')
                logging.info("     flags:  %s" % ', '.join(sub_flags))

                for p, pi in sub.render_passes.items():
                    logging.info("     %s pass:"******"       used srcs:  %s" %
                                 ', '.join(pi.used_source_names))
                    logging.info("       used terms: %s" % ', '.join([
                        '%s (%s:%s)' % (t, sn, tn)
                        for sn, tn, t in pi.used_taxonomy_terms
                    ]))

                if sub.errors:
                    logging.error("   errors: %s" % sub.errors)

            logging.info("   assets:    %s" % ', '.join(entry.assets))
            if entry.errors:
                logging.error("   errors: %s" % entry.errors)

        record_cache = ctx.app.cache.getCache('proc')
        if not record_cache.has(record_name):
            return

        # Show the pipeline record.
        record = ProcessorPipelineRecord.load(
            record_cache.getCachePath(record_name))
        logging.info("")
        logging.info("Processing record for: %s" % record.out_dir)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.process_time))
        if record.success:
            logging.info("Status: success")
        else:
            logging.error("Status: failed")
        logging.info("Entries:")
        for entry in record.entries:
            if pattern and not fnmatch.fnmatch(entry.rel_input, pattern):
                continue
            if out_pattern and not (any([
                    o for o in entry.rel_outputs
                    if fnmatch.fnmatch(o, out_pattern)
            ])):
                continue

            flags = []
            if entry.flags & FLAG_PREPARED:
                flags.append('prepared')
            if entry.flags & FLAG_PROCESSED:
                flags.append('processed')
            if entry.flags & FLAG_OVERRIDEN:
                flags.append('overriden')
            if entry.flags & FLAG_BYPASSED_STRUCTURED_PROCESSING:
                flags.append('external')
            logger.info(" - ")
            logger.info("   path:      %s" % entry.rel_input)
            logger.info("   out paths: %s" % entry.rel_outputs)
            logger.info("   flags:     %s" % flags)
            logger.info("   proc tree: %s" %
                        format_proc_tree(entry.proc_tree, 14 * ' '))
            if entry.errors:
                logger.error("   errors: %s" % entry.errors)