def queue_index_actions(blog): ''' Pushes to the publishing queue all the index pages for a given blog that are marked for Immediate publishing. ''' try: templates = Template.select().where( Template.blog == blog, Template.template_type == template_type.index, Template.publishing_mode == publishing_mode.immediate) if templates.count() == 0: raise Template.DoesNotExist except Template.DoesNotExist: raise Template.DoesNotExist( "No index templates exist for blog {}.".format(blog.for_log)) else: mappings = TemplateMapping.select().where( TemplateMapping.template << templates) fileinfos = FileInfo.select().where( FileInfo.template_mapping << mappings) for f in fileinfos: push_to_queue(job_type=job_type.index, priority=1, blog=blog, site=blog.site, data_integer=f.id)
def delete_page_fileinfo(page): ''' Deletes the fileinfo entry associated with a specific page. This does not delete fileinfos that are general archives for that page, only fileinfos associated with page templates. :param page: The page object to remove from the fileinfo index. ''' # We should probably move this to models.Page from core.models import Queue from core.cms.queue import job_type fileinfos_to_delete = FileInfo.select().where(FileInfo.page == page) # We don't use page.fileinfos because that automatically # regenerates any missing fileinfos. # TODO: have an option, false by default, to suppress that for n in fileinfos_to_delete: Queue.delete().where( Queue.job_type == job_type.page, Queue.data_integer == n.id, Queue.blog == page.blog, ).execute() n.delete_instance()
def queue_index_actions(blog, include_manual=False): ''' Pushes to the publishing queue all the index pages for a given blog that are marked for Immediate publishing. :param blog: The blog object whose index templates will be pushed to the queue. :param include_manual: If set to True, all templates, including those set to the Manual publishing mode, will be pushed to the queue. Default is False, since those templates are not pushed in most publishing actions. ''' templates = blog.index_templates.select().where( Template.publishing_mode != publishing_mode.do_not_publish) if include_manual is False: templates = templates.select().where( Template.publishing_mode == publishing_mode.immediate) if templates.count() == 0: raise Template.DoesNotExist( "No valid index templates exist for blog {}.".format(blog.for_log)) mappings = TemplateMapping.select().where( TemplateMapping.template << templates) fileinfos = FileInfo.select().where(FileInfo.template_mapping << mappings) for f in fileinfos: Queue.push(job_type=job_type.index, priority=1, blog=blog, site=blog.site, data_integer=f.id)
def queue_index_actions(blog, include_manual=False): ''' Pushes to the publishing queue all the index pages for a given blog that are marked for Immediate publishing. :param blog: The blog object whose index templates will be pushed to the queue. :param include_manual: If set to True, all templates, including those set to the Manual publishing mode, will be pushed to the queue. Default is False, since those templates are not pushed in most publishing actions. ''' templates = blog.index_templates.select().where( Template.publishing_mode != publishing_mode.do_not_publish) if include_manual is False: templates = templates.select().where( Template.publishing_mode == publishing_mode.immediate) if templates.count() == 0: raise Template.DoesNotExist("No valid index templates exist for blog {}.".format( blog.for_log)) mappings = TemplateMapping.select().where(TemplateMapping.template << templates) fileinfos = FileInfo.select().where(FileInfo.template_mapping << mappings) for f in fileinfos: Queue.push(job_type=job_type.index, priority=1, blog=blog, site=blog.site, data_integer=f.id)
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where( FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings = build_archives_fileinfos_by_mappings( n, (page, )) if len(fileinfo_mappings) == 0: logger.info( 'No archive fileinfos could be built for page {} with template {}' .format(page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError( 'Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where(FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings=build_archives_fileinfos_by_mappings(n,(page,)) if len(fileinfo_mappings)==0: logger.info('No archive fileinfos could be built for page {} with template {}'.format( page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError('Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def queue_archive_template_fast(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core import cms from core.libs.bottle import HTTPResponse r = HTTPResponse() fileinfos = FileInfo.select().where(FileInfo.template_mapping << template.mappings).paginate(pass_id, 50) # TODO: if action is fast and no fileinfos present, redirect to full rebuild? if fileinfos.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfos: Queue.push(job_type=cms.queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header('Refresh', "0;{}/template/{}/{}/fast/{}".format( BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format( BASE_PATH, blog.id, redir)) return r
def queue_archive_template_fast(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core import cms from core.libs.bottle import HTTPResponse r = HTTPResponse() fileinfos = FileInfo.select().where( FileInfo.template_mapping << template.mappings).paginate(pass_id, 50) # TODO: if action is fast and no fileinfos present, redirect to full rebuild? if fileinfos.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfos: Queue.push(job_type=cms.queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header( 'Refresh', "0;{}/template/{}/{}/fast/{}".format(BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format(BASE_PATH, blog.id, redir)) return r
def queue_page_actions(page, no_neighbors=False, no_archive=False): ''' Pushes a Page object along with all its related items into the queue for publication. This includes any archive indices associated with the page, and the page's next and previous entries in its respective categories. Note that this will only queue items that are actually set to be published. :param page: The Page object whose actions are to be queued. :param no_neighbors: Set to True to suppress generation of next/previous posts. Useful if you've loaded all the posts for a blog into a queue. :param no_archive: Set to True to suppress generation of archive pages associated with this page. Also useful for mass-queued actions. ''' # fileinfos = FileInfo.select().where(FileInfo.page == page) fileinfos = page.fileinfos blog = page.blog site = page.blog.site for f in fileinfos: push_to_queue(job_type=job_type.page, blog=blog, site=site, data_integer=f.id) if no_archive is False: queue_page_archive_actions(page) if no_neighbors is False: next_page = page.next_page previous_page = page.previous_page # Next and previous across categories should be done through this # mechanism somehow if next_page is not None: fileinfos_next = FileInfo.select().where( FileInfo.page == next_page) for f in fileinfos_next: push_to_queue(job_type=job_type.page, blog=blog, site=site, data_integer=f.id) queue_page_archive_actions(next_page) if previous_page is not None: fileinfos_previous = FileInfo.select().where( FileInfo.page == previous_page) for f in fileinfos_previous: push_to_queue(job_type=job_type.page, blog=blog, site=site, data_integer=f.id) queue_page_archive_actions(previous_page)