Example #1
0
 def _createLoadJobs(self, ctx):
     # Here we load all the pages in the source, making sure they all
     # have a valid cache for their configuration and contents.
     jobs = []
     for item in self.source.getAllContents():
         jobs.append(create_job(self, item.spec))
     if len(jobs) > 0:
         return jobs
     return None
Example #2
0
 def _createLoadJobs(self, ctx):
     # Here we load all the pages in the source, making sure they all
     # have a valid cache for their configuration and contents.
     jobs = []
     for item in self.source.getAllContents():
         jobs.append(create_job(self, item.spec))
     if len(jobs) > 0:
         return jobs
     return None
Example #3
0
    def createJobs(self, ctx):
        logger.debug("Building blog archives for: %s" % self.inner_source.name)
        self._buildDirtyYears(ctx)
        logger.debug("Got %d dirty years out of %d." %
                     (len(self._dirty_years), len(self._all_years)))

        jobs = []
        rec_fac = self.createRecordEntry
        current_record = ctx.current_record

        for y in self._dirty_years:
            item_spec = '_index[%04d]' % y

            jobs.append(create_job(self, item_spec, year=y))

            entry = rec_fac(item_spec)
            current_record.addEntry(entry)

        if len(jobs) > 0:
            return jobs, "archive"
        return None, None
Example #4
0
    def createJobs(self, ctx):
        logger.debug("Building blog archives for: %s" %
                     self.inner_source.name)
        self._buildDirtyYears(ctx)
        logger.debug("Got %d dirty years out of %d." %
                     (len(self._dirty_years), len(self._all_years)))

        jobs = []
        rec_fac = self.createRecordEntry
        current_record = ctx.current_record

        for y in self._dirty_years:
            item_spec = '_index[%04d]' % y

            jobs.append(create_job(self, item_spec,
                                   year=y))

            entry = rec_fac(item_spec)
            current_record.addEntry(entry)

        if len(jobs) > 0:
            return jobs, "archive"
        return None, None
Example #5
0
    def createJobs(self, ctx):
        logger.debug("Building '%s' taxonomy pages for source: %s" %
                     (self.taxonomy.name, self.inner_source.name))
        self._analyzer = _TaxonomyTermsAnalyzer(self, ctx.record_histories)
        self._analyzer.analyze()

        logger.debug(
            "Queuing %d '%s' jobs." %
            (len(self._analyzer.dirty_slugified_terms), self.taxonomy.name))
        jobs = []
        rec_fac = self.createRecordEntry
        current_record = ctx.current_record

        for slugified_term in self._analyzer.dirty_slugified_terms:
            item_spec = '_index[%s]' % slugified_term

            jobs.append(create_job(self, item_spec, term=slugified_term))

            entry = rec_fac(item_spec)
            current_record.addEntry(entry)

        if len(jobs) > 0:
            return jobs, "taxonomize"
        return None, None
Example #6
0
    def _createSegmentJobs(self, ctx):
        jobs = []

        app = self.app
        pass_num = ctx.pass_num
        out_dir = self.ctx.out_dir
        uri_getter = self.source.route.getUri
        pretty_urls = app.config.get('site/pretty_urls')

        history = ctx.record_histories.getHistory(ctx.record_name).copy()
        history.build()

        cur_rec_used_paths = {}
        history.current.user_data['used_paths'] = cur_rec_used_paths
        all_records = ctx.record_histories.current.records

        for prev, cur in history.diffs:
            # Ignore pages that disappeared since last bake.
            if cur is None:
                continue

            # Skip draft pages.
            if cur.hasFlag(PagePipelineRecordEntry.FLAG_IS_DRAFT):
                continue

            # Skip pages that haven't changed since last bake.
            if (prev and not cur.hasFlag(
                    PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED)):
                continue

            # For pages that are known to use other sources in their own
            # content segments (we don't care about the layout yet), we
            # postpone them to the next pipeline pass immediately, because they
            # might need populated render caches for those sources' pages.
            if prev:
                usn1, _ = prev.getAllUsedSourceNames()
                if usn1:
                    logger.debug("Postponing: %s" % cur.item_spec)
                    cur.flags |= \
                        PagePipelineRecordEntry.FLAG_ABORTED_FOR_SOURCE_USE
                    continue

            # Check if this item has been overriden by a previous pipeline
            # run... for instance, we could be the pipeline for a "theme pages"
            # source, and some of our pages have been overriden by a user
            # page that writes out to the same URL.
            uri = uri_getter(cur.route_params)
            out_path = get_output_path(app, out_dir, uri, pretty_urls)
            override = _find_used_path_spec(all_records, out_path)
            if override is not None:
                override_source_name, override_entry_spec = override
                override_source = app.getSource(override_source_name)
                if override_source.config['realm'] == \
                        self.source.config['realm']:
                    logger.error(
                        "Page '%s' would get baked to '%s' "
                        "but is overriden by '%s'." %
                        (cur.item_spec, out_path, override_entry_spec))
                else:
                    logger.debug(
                        "Page '%s' would get baked to '%s' "
                        "but is overriden by '%s'." %
                        (cur.item_spec, out_path, override_entry_spec))

                cur.flags |= PagePipelineRecordEntry.FLAG_OVERRIDEN
                continue

            # Nope, all good, let's create a job for this item.
            cur.flags |= PagePipelineRecordEntry.FLAG_SEGMENTS_RENDERED
            cur_rec_used_paths[out_path] = cur.item_spec

            jobs.append(create_job(self, cur.item_spec, pass_num=pass_num))

        if len(jobs) > 0:
            return jobs
        return None
Example #7
0
    def _createLayoutJobs(self, ctx):
        # Get the list of all sources that had anything baked.
        dirty_source_names = set()
        all_records = ctx.record_histories.current.records
        for rec in all_records:
            rec_dsn = rec.user_data.get('dirty_source_names')
            if rec_dsn:
                dirty_source_names |= rec_dsn

        jobs = []
        pass_num = ctx.pass_num
        history = ctx.record_histories.getHistory(ctx.record_name).copy()
        history.build()
        for prev, cur in history.diffs:
            if not cur or cur.hasFlag(PagePipelineRecordEntry.FLAG_OVERRIDEN):
                continue

            do_bake = False
            force_segments = False
            force_layout = False

            # Make sure we bake the layout for pages that got their segments
            # re-rendered.
            if cur.hasFlag(PagePipelineRecordEntry.FLAG_SEGMENTS_RENDERED):
                do_bake = True

            # Now look at the stuff we baked for our own source on the second
            # pass.  For anything that wasn't baked (i.e. it was considered 'up
            # to date') we look at the records from last time, and if they say
            # that some page was using a source that is "dirty", then we force
            # bake it.
            #
            # The common example for this is a blog index page which hasn't
            # been touched, but needs to be re-baked because someone added or
            # edited a post.
            if prev:
                usn1, usn2 = prev.getAllUsedSourceNames()
                force_segments = any(
                    map(lambda u: u in dirty_source_names, usn1))
                force_layout = any(map(lambda u: u in dirty_source_names,
                                       usn2))

                if force_segments or force_layout:
                    # Yep, we need to force-rebake some aspect of this page.
                    do_bake = True

                elif not do_bake:
                    # This page uses other sources, but no source was dirty
                    # this time around (it was a null build, maybe). We
                    # don't have any work to do, but we need to carry over
                    # any information we have, otherwise the post bake step
                    # will think we need to delete last bake's outputs.
                    cur.subs = copy.deepcopy(prev.subs)
                    for cur_sub in cur.subs:
                        cur_sub['flags'] = \
                            SubPageFlags.FLAG_COLLAPSED_FROM_LAST_RUN

            if do_bake:
                jobs.append(
                    create_job(self,
                               cur.item_spec,
                               pass_num=pass_num,
                               force_segments=force_segments,
                               force_layout=force_layout))

        if len(jobs) > 0:
            return jobs
        return None
Example #8
0
    def _createSegmentJobs(self, ctx):
        jobs = []

        app = self.app
        pass_num = ctx.pass_num
        out_dir = self.ctx.out_dir
        uri_getter = self.source.route.getUri
        pretty_urls = app.config.get('site/pretty_urls')

        history = ctx.record_histories.getHistory(ctx.record_name).copy()
        history.build()

        cur_rec_used_paths = {}
        history.current.user_data['used_paths'] = cur_rec_used_paths
        all_records = ctx.record_histories.current.records

        for prev, cur in history.diffs:
            # Ignore pages that disappeared since last bake.
            if cur is None:
                continue

            # Skip draft pages.
            if cur.hasFlag(PagePipelineRecordEntry.FLAG_IS_DRAFT):
                continue

            # Skip pages that haven't changed since last bake.
            if (prev and not cur.hasFlag(
                    PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED)):
                continue

            # For pages that are known to use other sources in their own
            # content segments (we don't care about the layout yet), we
            # postpone them to the next pipeline pass immediately, because they
            # might need populated render caches for those sources' pages.
            if prev:
                usn1, _ = prev.getAllUsedSourceNames()
                if usn1:
                    logger.debug("Postponing: %s" % cur.item_spec)
                    cur.flags |= \
                        PagePipelineRecordEntry.FLAG_ABORTED_FOR_SOURCE_USE
                    continue

            # Check if this item has been overriden by a previous pipeline
            # run... for instance, we could be the pipeline for a "theme pages"
            # source, and some of our pages have been overriden by a user
            # page that writes out to the same URL.
            uri = uri_getter(cur.route_params)
            out_path = get_output_path(app, out_dir, uri, pretty_urls)
            override = _find_used_path_spec(all_records, out_path)
            if override is not None:
                override_source_name, override_entry_spec = override
                override_source = app.getSource(override_source_name)
                if override_source.config['realm'] == \
                        self.source.config['realm']:
                    logger.error(
                        "Page '%s' would get baked to '%s' "
                        "but is overriden by '%s'." %
                        (cur.item_spec, out_path, override_entry_spec))
                else:
                    logger.debug(
                        "Page '%s' would get baked to '%s' "
                        "but is overriden by '%s'." %
                        (cur.item_spec, out_path, override_entry_spec))

                cur.flags |= PagePipelineRecordEntry.FLAG_OVERRIDEN
                continue

            # Nope, all good, let's create a job for this item.
            cur.flags |= PagePipelineRecordEntry.FLAG_SEGMENTS_RENDERED
            cur_rec_used_paths[out_path] = cur.item_spec

            jobs.append(create_job(self, cur.item_spec,
                                   pass_num=pass_num))

        if len(jobs) > 0:
            return jobs
        return None
Example #9
0
    def _createLayoutJobs(self, ctx):
        # Get the list of all sources that had anything baked.
        dirty_source_names = set()
        all_records = ctx.record_histories.current.records
        for rec in all_records:
            rec_dsn = rec.user_data.get('dirty_source_names')
            if rec_dsn:
                dirty_source_names |= rec_dsn

        jobs = []
        pass_num = ctx.pass_num
        history = ctx.record_histories.getHistory(ctx.record_name).copy()
        history.build()
        for prev, cur in history.diffs:
            if not cur or cur.hasFlag(PagePipelineRecordEntry.FLAG_OVERRIDEN):
                continue

            do_bake = False
            force_segments = False
            force_layout = False

            # Make sure we bake the layout for pages that got their segments
            # re-rendered.
            if cur.hasFlag(PagePipelineRecordEntry.FLAG_SEGMENTS_RENDERED):
                do_bake = True

            # Now look at the stuff we baked for our own source on the second
            # pass.  For anything that wasn't baked (i.e. it was considered 'up
            # to date') we look at the records from last time, and if they say
            # that some page was using a source that is "dirty", then we force
            # bake it.
            #
            # The common example for this is a blog index page which hasn't
            # been touched, but needs to be re-baked because someone added or
            # edited a post.
            if prev:
                usn1, usn2 = prev.getAllUsedSourceNames()
                force_segments = any(map(lambda u: u in dirty_source_names,
                                     usn1))
                force_layout = any(map(lambda u: u in dirty_source_names,
                                   usn2))

                if force_segments or force_layout:
                    # Yep, we need to force-rebake some aspect of this page.
                    do_bake = True

                elif not do_bake:
                    # This page uses other sources, but no source was dirty
                    # this time around (it was a null build, maybe). We
                    # don't have any work to do, but we need to carry over
                    # any information we have, otherwise the post bake step
                    # will think we need to delete last bake's outputs.
                    cur.subs = copy.deepcopy(prev.subs)
                    for cur_sub in cur.subs:
                        cur_sub['flags'] = \
                            SubPageFlags.FLAG_COLLAPSED_FROM_LAST_RUN

            if do_bake:
                jobs.append(create_job(self, cur.item_spec,
                                       pass_num=pass_num,
                                       force_segments=force_segments,
                                       force_layout=force_layout))

        if len(jobs) > 0:
            return jobs
        return None