def blog_publish(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) queue_length = Queue.job_counts(blog=blog) if queue_length > 0: start_message = template('queue/queue_run_include', queue=Queue.jobs(blog), percentage_complete=0, blog=blog, break_path='{}/blog/{}/publish/break'.format( BASE_URL, blog.id)) Queue.start(blog, queue_length) else: start_message = "Queue empty." tags = template_tags(blog_id=blog.id, user=user) # return template( 'queue/queue_run_ui', start=queue_length, start_message=start_message, action_url="../../blog/{}/publish/progress/{}".format( blog.id, queue_length), title='Publishing queue progress', # search_context=(search_context['blog_queue'], blog), menu=generate_menu('blog_queue', blog), **tags.__dict__)
def blog_publish_process(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) control_jobs = Queue.control_jobs(blog) if control_jobs.count() > 0: queue_count = q_p(blog) # queue_count = transaction(queue.process_queue)(blog) time.sleep(RETRY_INTERVAL * 5) else: jobs = Queue.jobs(blog) if jobs.count() > 0: queue_count = jobs.count() Queue.start(blog, queue_count) queue_count = q_p(blog) # queue_count = transaction(queue.process_queue)(blog) time.sleep(RETRY_INTERVAL * 5) else: queue_count = 0 import settings return template('queue/queue_counter_include', blog=blog, settings=settings, queue_count=queue_count)
def queue_index_actions(blog, include_manual=False): ''' Pushes to the publishing queue all the index pages for a given blog that are marked for Immediate publishing. :param blog: The blog object whose index templates will be pushed to the queue. :param include_manual: If set to True, all templates, including those set to the Manual publishing mode, will be pushed to the queue. Default is False, since those templates are not pushed in most publishing actions. ''' templates = blog.index_templates.select().where( Template.publishing_mode != publishing_mode.do_not_publish) if include_manual is False: templates = templates.select().where( Template.publishing_mode == publishing_mode.immediate) if templates.count() == 0: raise Template.DoesNotExist("No valid index templates exist for blog {}.".format( blog.for_log)) mappings = TemplateMapping.select().where(TemplateMapping.template << templates) fileinfos = FileInfo.select().where(FileInfo.template_mapping << mappings) for f in fileinfos: Queue.push(job_type=job_type.index, priority=1, blog=blog, site=blog.site, data_integer=f.id)
def queue_index_actions(blog, include_manual=False): ''' Pushes to the publishing queue all the index pages for a given blog that are marked for Immediate publishing. :param blog: The blog object whose index templates will be pushed to the queue. :param include_manual: If set to True, all templates, including those set to the Manual publishing mode, will be pushed to the queue. Default is False, since those templates are not pushed in most publishing actions. ''' templates = blog.index_templates.select().where( Template.publishing_mode != publishing_mode.do_not_publish) if include_manual is False: templates = templates.select().where( Template.publishing_mode == publishing_mode.immediate) if templates.count() == 0: raise Template.DoesNotExist( "No valid index templates exist for blog {}.".format(blog.for_log)) mappings = TemplateMapping.select().where( TemplateMapping.template << templates) fileinfos = FileInfo.select().where(FileInfo.template_mapping << mappings) for f in fileinfos: Queue.push(job_type=job_type.index, priority=1, blog=blog, site=blog.site, data_integer=f.id)
def blog_publish(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) queue_length = Queue.job_counts(blog=blog) if queue_length > 0: start_message = template('queue/queue_run_include', queue=Queue.jobs(blog), percentage_complete=0, blog=blog, break_path='{}/blog/{}/publish/break'.format(BASE_URL, blog.id) ) Queue.start(blog, queue_length) else: start_message = "Queue empty." tags = template_tags(blog_id=blog.id, user=user) # return template('queue/queue_run_ui', start=queue_length, start_message=start_message, action_url="../../blog/{}/publish/progress/{}".format(blog.id, queue_length), title='Publishing queue progress', # search_context=(search_context['blog_queue'], blog), menu=generate_menu('blog_queue', blog), **tags.__dict__)
def delete_page_fileinfo(page): ''' Deletes the fileinfo entry associated with a specific page. This does not delete fileinfos that are general archives for that page, only fileinfos associated with page templates. :param page: The page object to remove from the fileinfo index. ''' # We should probably move this to models.Page from core.models import Queue from core.cms.queue import job_type fileinfos_to_delete = FileInfo.select().where(FileInfo.page == page) # We don't use page.fileinfos because that automatically # regenerates any missing fileinfos. # TODO: have an option, false by default, to suppress that for n in fileinfos_to_delete: Queue.delete().where( Queue.job_type == job_type.page, Queue.data_integer == n.id, Queue.blog == page.blog, ).execute() n.delete_instance()
def blog_queue_clear(blog_id): ''' Clear all pending jobs out of the queue ''' user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) Queue.clear(blog)
def process_queue(blog): ''' Processes the jobs currently in the queue for the selected blog. ''' with db.atomic(): queue_control = publishing_lock(blog, True) if queue_control is None: return 0 queue_control.data_string = 'Running' queue_control.save() queue = Queue.select().order_by( Queue.priority.desc(), Queue.date_touched.desc()).where( Queue.blog == blog, Queue.is_control == False).limit(MAX_BATCH_OPS) queue_length = queue.count() if queue_length > 0: logger.info( "Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) for q in queue: try: job_type.action[q.job_type](q) except BaseException: raise else: remove_from_queue(q.id) queue_control = Queue.get(Queue.blog == blog, Queue.is_control == True) queue_control.data_integer -= queue_length if queue_control.data_integer <= 0: queue_control.delete_instance() logger.info("Queue job #{} @ {} (blog #{}) finished.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id)) else: queue_control.data_string = 'Paused' queue_control.save() logger.info( "Queue job #{} @ {} (blog #{}) continuing with {} items left.". format(queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) return queue_control.data_integer
def process_queue(blog): ''' Processes the jobs currently in the queue for the selected blog. ''' queue_control = Queue.acquire(blog, True) if queue_control is None: return 0 if queue_control.job_type == job_type.control: process_queue_publish(queue_control, blog) elif queue_control.job_type == job_type.insert: process_queue_insert(queue_control, blog) return Queue.job_counts(blog=blog)
def blog_publish_progress(blog_id, original_queue_length): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) queue_count = 0 control_jobs = Queue.control_jobs(blog) if control_jobs.count() > 0: # queue_count = queue.process_queue(blog) # queue_count = transaction(queue.process_queue)(blog) queue_count = q_p(blog) time.sleep(RETRY_INTERVAL * 5) else: queue_count = 0 percentage_complete = int( (1 - (int(queue_count) / int(original_queue_length))) * 100) import settings return template('queue/queue_run_include', queue_count=queue_count, blog=blog, break_path='{}/blog/{}/publish/break'.format( BASE_URL, blog.id), settings=settings, percentage_complete=percentage_complete)
def blog_publish_progress(blog_id, original_queue_length): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) queue_count = 0 control_jobs = Queue.control_jobs(blog) if control_jobs.count() > 0: # queue_count = queue.process_queue(blog) # queue_count = transaction(queue.process_queue)(blog) queue_count = q_p(blog) time.sleep(RETRY_INTERVAL * 5) else: queue_count = 0 percentage_complete = int((1 - (int(queue_count) / int(original_queue_length))) * 100) import settings return template('queue/queue_run_include', queue_count=queue_count, blog=blog, break_path='{}/blog/{}/publish/break'.format(BASE_URL, blog.id), settings=settings, percentage_complete=percentage_complete)
def system_queue(): user = auth.is_logged_in(request) permission = auth.is_sys_admin(user) queue = Queue.select().order_by(Queue.site.asc(), Queue.blog.asc(), Queue.job_type.asc(), Queue.date_touched.desc()) return listing(request, None, queue, 'queue', 'system_queue', user=user)
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where( FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings = build_archives_fileinfos_by_mappings( n, (page, )) if len(fileinfo_mappings) == 0: logger.info( 'No archive fileinfos could be built for page {} with template {}' .format(page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError( 'Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def queue_page_archive_actions(page): ''' Pushes to the publishing queue all the page archives for a given page object. :param page: The page object whose archives will be pushed to the publishing queue. ''' #=========================================================================== # NOTE: I tried to speed this up by checking the list of fileinfos # related to mappings for the page (if any), and then pushing those # if they exist, but I haven't seen evidence it does anything tangible # for performance. # I need to double-check that old mappings are in fact invalidated # when they are changed. #=========================================================================== archive_templates = page.blog.archive_templates tags = template_tags(page=page) for n in archive_templates: try: if n.publishing_mode != publishing_mode.do_not_publish: fileinfo_mappings = FileInfo.select().where(FileInfo.page == page, FileInfo.template_mapping << n.mappings) if fileinfo_mappings.count() == 0: fileinfo_mappings=build_archives_fileinfos_by_mappings(n,(page,)) if len(fileinfo_mappings)==0: logger.info('No archive fileinfos could be built for page {} with template {}'.format( page.for_log, n.for_log)) else: for fileinfo_mapping in fileinfo_mappings: Queue.push(job_type=job_type.archive, blog=page.blog, site=page.blog.site, priority=7, data_integer=fileinfo_mapping.id) except Exception as e: from core.error import QueueAddError raise QueueAddError('Archive template {} for page {} could not be queued: '.format( n, page.for_log, e))
def queue_archive_template_fast(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core import cms from core.libs.bottle import HTTPResponse r = HTTPResponse() fileinfos = FileInfo.select().where(FileInfo.template_mapping << template.mappings).paginate(pass_id, 50) # TODO: if action is fast and no fileinfos present, redirect to full rebuild? if fileinfos.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfos: Queue.push(job_type=cms.queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header('Refresh', "0;{}/template/{}/{}/fast/{}".format( BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format( BASE_PATH, blog.id, redir)) return r
def remove_from_queue(queue_id): ''' Removes a specific job ID from the queue. :param queue_id: The ID number of the job queue item to remove. ''' queue_delete = Queue.get(Queue.id == queue_id) return queue_delete.delete_instance()
def queue_ssi_actions(blog): ''' Pushes to the publishing queue all the SSIs for a given blog. :param blog: The blog object whose SSI templates will be pushed to the queue. ''' templates = blog.ssi_templates.select() if templates.count() == 0: return None for n in templates: for f in n.fileinfos: Queue.push(job_type=job_type.include, priority=10, blog=blog, site=blog.site, data_integer=f.id)
def check_publishing_lock(blog, action_description, warn_only=False): ''' Checks for a publishing lock and returns a status message if busy. ''' try: Queue.acquire(blog) except QueueInProgressException as e: msg = "{} is not available right now. Proceed with caution. Reason: {}".format( action_description, e) if warn_only is True: return Status( type='warning', message=msg, no_sure=True, ) else: raise QueueInProgressException(msg) else: return None
def queue_archive_template_fast(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core import cms from core.libs.bottle import HTTPResponse r = HTTPResponse() fileinfos = FileInfo.select().where( FileInfo.template_mapping << template.mappings).paginate(pass_id, 50) # TODO: if action is fast and no fileinfos present, redirect to full rebuild? if fileinfos.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfos: Queue.push(job_type=cms.queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header( 'Refresh', "0;{}/template/{}/{}/fast/{}".format(BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format(BASE_PATH, blog.id, redir)) return r
def queue_archive_template_all(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core.cms import fileinfo, queue from core.libs.bottle import HTTPResponse r = HTTPResponse() pages = blog.pages.published.paginate(pass_id, 50) if pages.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfo.build_archives_fileinfos_by_mappings(template, pages): Queue.push(job_type=queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header('Refresh', "0;{}/template/{}/{}/all/{}".format( BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format( BASE_PATH, blog.id, redir)) return r
def queue_ssi_actions(blog): ''' Pushes to the publishing queue all the SSIs for a given blog. :param blog: The blog object whose SSI templates will be pushed to the queue. ''' templates = blog.ssi_templates.select() if templates.count() == 0: return None for n in templates: for f in n.fileinfos: Queue.push( job_type=job_type.include, priority=10, blog=blog, site=blog.site, data_integer=f.id)
def blog_break_queue(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) Queue.stop(blog) tags = template_tags(blog=blog, user=user) return template('queue/queue_run_ui', start=None, action_url='', start_message=''' <p>Queue publishing stopped. Note that queued items are still in the queue, and may still be processed on the next queue run.</p> <p><a href="{}/blog/{}/queue/clear"><button class="btn">Clear the queue</button></a> to remove them entirely.</p> '''.format(BASE_URL, blog_id), title='Publishing queue progress', # search_context=(search_context['blog_queue'], blog), menu=generate_menu('blog_queue', blog), **tags.__dict__)
def blog_break_queue(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) Queue.stop(blog) tags = template_tags(blog=blog, user=user) return template( 'queue/queue_run_ui', start=None, action_url='', start_message=''' <p>Queue publishing stopped. Note that queued items are still in the queue, and may still be processed on the next queue run.</p> <p><a href="{}/blog/{}/queue/clear"><button class="btn">Clear the queue</button></a> to remove them entirely.</p> '''.format(BASE_URL, blog_id), title='Publishing queue progress', # search_context=(search_context['blog_queue'], blog), menu=generate_menu('blog_queue', blog), **tags.__dict__)
def queue_archive_template_all(template_id, action, pass_id=0): from core.models import Template, Queue template = Template.load(template_id) blog = template.blog from core.cms import fileinfo, queue from core.libs.bottle import HTTPResponse r = HTTPResponse() pages = blog.pages.published.paginate(pass_id, 50) if pages.count() > 0: r.body = "Adding {}".format(pass_id * 50) for f in fileinfo.build_archives_fileinfos_by_mappings( template, pages): Queue.push(job_type=queue.job_type.archive, blog=blog, site=blog.site, data_integer=f.id) pass_id += 1 r.add_header( 'Refresh', "0;{}/template/{}/{}/all/{}".format(BASE_PATH, template_id, action, pass_id)) else: r.body = "Queue insertion finished." if action == 'publish': redir = 'publish' else: redir = 'queue' r.add_header('Refresh', "0;{}/blog/{}/{}".format(BASE_PATH, blog.id, redir)) return r
def publishing_lock(blog, return_queue=False): ''' Checks to see if a publishing job for a given blog is currently running. If it is, it raises an exception. If the return_queue flag is set, it returns the queue_control object instead. If no job is locked, then it returns None. ''' try: queue_control = Queue.get(Queue.blog == blog, Queue.is_control == True) except Queue.DoesNotExist: return None if return_queue is True: return queue_control else: raise QueueInProgressException( "Publishing job currently running for blog {}".format( queue_control.blog.for_log))
def manage_queue(request, queue_id=None): # Try to locate the object to use it as an instance and if not, create a new one to use it # in a new form. # common_data = common_ticket_data() if request.user.is_superuser: if queue_id: actual_queue = get_object_or_404(Queue, pk=queue_id) else: actual_queue = Queue() # POST mode if request.method == 'POST': form = QueueForm(request.POST, instance=actual_queue) if form.is_valid(): form.save() return redirect("/settings/queue") else: # Non-POST mode, show only form = QueueForm(instance=actual_queue) return render(request, 'queues/create_edit_queue.html', locals()) else: raise Http404
def template_save(request, user, cms_template, blog=None): ''' Core logic for saving changes to a template. ''' # TODO: move the bulk of this into the actual model # the .getunicode stuff should be moved out, # make that part of the ui # we should just submit cms_template as self, # make whatever mods to it are needed in the ui func, # and perform the validation we did elsewhere, perhaps from core.cms import fileinfo, invalidate_cache from core.utils import is_blank from core.error import TemplateSaveException, PageNotChanged import datetime status = [] _forms = request.forms cms_template.title = _forms.getunicode('template_title') cms_template.body = _forms.getunicode('template_body') if is_blank(cms_template.title): cms_template.title = "New Template (#{})".format(cms_template.id) mode = _forms.getunicode('publishing_mode') if mode in publishing_mode.modes: cms_template.publishing_mode = mode else: raise TemplateSaveException("Invalid publishing mode selected.") cms_template.modified_date = datetime.datetime.utcnow() try: cms_template.save(user) except PageNotChanged as e: status.append("(Template unchanged.)") except Exception as e: raise e new_mappings = [] for n in _forms: if n.startswith('template_mapping_'): mapping_id = int(n[len('template_mapping_'):]) try: template_mapping = TemplateMapping.get( TemplateMapping.id == mapping_id) except TemplateMapping.DoesNotExist: raise TemplateSaveException( 'Template mapping with ID #{} does not exist.'.format( mapping_id)) else: if is_blank(_forms.getunicode(n)): raise TemplateSaveException( 'Template mapping #{} ({}) cannot be blank. Use None to specify no mapping.' .format(mapping_id, template_mapping.path_string)) else: if _forms.getunicode(n) != template_mapping.path_string: template_mapping.path_string = _forms.getunicode(n) new_mappings.append(template_mapping) for n in new_mappings: n.save() status.append("Mapping #{} ({}) rebuilt.".format(n.id, n.path_string)) if new_mappings: fileinfo.build_mapping_xrefs(new_mappings) build_action = "all" else: build_action = "fast" invalidate_cache() # TODO: eventually everything after this will be removed b/c of AJAX save # tags = template_tags(template_id=cms_template.id, user=user) save_action = _forms.getunicode('save') from core.libs.bottle import response from settings import BASE_URL from core.models import Queue x_open = False if int(save_action) in (2, 3): if cms_template.template_type == template_type.page: x_open = True response.add_header( 'X-Open', '{}/template/{}/queue/{}'.format(BASE_URL, cms_template.id, build_action)) if cms_template.template_type == template_type.archive: x_open = True response.add_header( 'X-Open', '{}/template/{}/queue/{}'.format(BASE_URL, cms_template.id, build_action)) if cms_template.template_type in (template_type.include, template_type.index): # I don't think this is needed anymore, we can remove it # TODO: test it # if new_mappings: # cms.build_archives_fileinfos_by_mappings(cms_template) for f in cms_template.fileinfos_published: Queue.push(job_type=f.template_mapping.template.template_type, blog=cms_template.blog, site=cms_template.blog.site, data_integer=f.id) status.append( "{} files regenerated from template and sent to publishing queue.". format(cms_template.fileinfos_published.count())) if blog is not None: blog.theme_modified = True blog.save() from core.log import logger logger.info("Template {} edited by user {}.".format( cms_template.for_log, user.for_log)) response.body = ' '.join(status) if x_open: return response else: return response.body
def queue_page_actions(pages, no_neighbors=False, no_archive=False): ''' Pushes a Page object along with all its related items into the queue for publication. This includes any archive indices associated with the page, and the page's next and previous entries in its respective categories. Note that this will only queue items that are actually set to be published. :param page: The Page object whose actions are to be queued. :param no_neighbors: Set to True to suppress generation of next/previous posts. Useful if you've loaded all the posts for a blog into a queue and don't need to have this performed here. :param no_archive: Set to True to suppress generation of archive pages associated with this page. Also useful for mass-queued actions. ''' if pages is None: return for page in pages: if page is None: continue try: blog, site = page.blog, page.blog.site for f in page.fileinfos: if f.template_mapping.template.publishing_mode != publishing_mode.do_not_publish: Queue.push(job_type=job_type.page, blog=blog, site=site, priority=8, data_integer=f.id) if no_archive is False: queue_page_archive_actions(page) if no_neighbors is False: next_page = page.next_page previous_page = page.previous_page if next_page is not None: for f in next_page.fileinfos: if f.template_mapping.template.publishing_mode != publishing_mode.do_not_publish: Queue.push(job_type=job_type.page, blog=blog, site=site, priority=8, data_integer=f.id) queue_page_archive_actions(next_page) if previous_page is not None: for f in previous_page.fileinfos: if f.template_mapping.template.publishing_mode != publishing_mode.do_not_publish: Queue.push(job_type=job_type.page, blog=blog, site=site, priority=8, data_integer=f.id) queue_page_archive_actions(previous_page) except OperationalError as e: raise e except Exception as e: from core.error import QueueAddError raise QueueAddError('Page {} could not be queued: '.format( page.for_log, e))
scheduled_pages = Page.select().where( Page.status == page_status.scheduled, Page.publication_date <= datetime.datetime.utcnow()).order_by( Page.publication_date.desc()) total_pages = scheduled_pages.count() print('{} pages scheduled'.format(total_pages)) if total_pages > 0: for p in scheduled_pages.select(Page.blog).distinct(): b = p.blog blogs_to_check[b.id] = b queue_count = Queue.select(Queue.blog).distinct() if queue_count.count() > 0: for n in queue_count: b = n.blog print('Blog {} has existing queue items'.format(b.id)) blogs_to_check[b.id] = b if blogs_to_check: print("Starting run.") from core.cms.queue import (queue_page_actions, queue_index_actions, queue_ssi_actions) from core.models import db from core.log import logger from time import sleep
def _remove_from_queue(queue_deletes): ''' Batch deletion of queue jobs. ''' deletes = Queue.delete().where(Queue.id << queue_deletes) return deletes.execute()
def process_queue_publish(queue_control, blog): ''' Processes the publishing queue for a given blog. Takes in a queue_control entry, and returns an integer of the number of jobs remaining in the queue for that blog. Typically invoked by the process_queue function. :param queue_control: The queue_control entry, from the queue, to use for this publishing queue run. :param blog: The blog object that is in context for this job. ''' # from . import invalidate_cache # invalidate_cache() queue_control.lock() queue_original = Queue.select().order_by(Queue.priority.desc(), Queue.date_touched.desc()).where(Queue.blog == blog, Queue.is_control == False) queue = queue_original.limit(MAX_BATCH_OPS).naive() queue_original_length = queue_original.count() queue_length = queue.count() start_queue = time.clock() if queue_length > 0: logger.info("Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_original_length)) removed_jobs = [] start = time.clock() for q in queue: job_type.action[q.job_type](q) removed_jobs.append(q.id) if (time.clock() - start) > LOOP_TIMEOUT: break Queue.remove(removed_jobs) # we don't need to have an entirely new job! # we should recycle the existing one, yes? new_queue_control = Queue.control_job(blog) # new_queue_control = Queue.get(Queue.blog == blog, # Queue.is_control == True) queue_original_length -= len(removed_jobs) new_queue_control.data_integer = queue_original_length end_queue = time.clock() total_time = end_queue - start_queue if new_queue_control.data_integer <= 0: new_queue_control.delete_instance() logger.info("Queue job #{} @ {} (blog #{}) finished ({:.4f} secs).".format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, total_time)) else: # new_queue_control.is_running = False # new_queue_control.save() new_queue_control.unlock() logger.info("Queue job #{} @ {} (blog #{}) processed {} items ({:.4f} secs, {} remaining).".format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, len(removed_jobs), total_time, queue_original_length, )) return new_queue_control.data_integer
def process_queue_publish(queue_control, blog): ''' Processes the publishing queue for a given blog. Takes in a queue_control entry, and returns an integer of the number of jobs remaining in the queue for that blog. Typically invoked by the process_queue function. :param queue_control: The queue_control entry, from the queue, to use for this publishing queue run. :param blog: The blog object that is in context for this job. ''' # from . import invalidate_cache # invalidate_cache() queue_control.lock() queue_original = Queue.select().order_by(Queue.priority.desc(), Queue.date_touched.desc()).where( Queue.blog == blog, Queue.is_control == False) queue = queue_original.limit(MAX_BATCH_OPS).naive() queue_original_length = queue_original.count() queue_length = queue.count() start_queue = time.clock() if queue_length > 0: logger.info("Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_original_length)) removed_jobs = [] start = time.clock() for q in queue: job_type.action[q.job_type](q) removed_jobs.append(q.id) if (time.clock() - start) > LOOP_TIMEOUT: break Queue.remove(removed_jobs) # we don't need to have an entirely new job! # we should recycle the existing one, yes? new_queue_control = Queue.control_job(blog) # new_queue_control = Queue.get(Queue.blog == blog, # Queue.is_control == True) queue_original_length -= len(removed_jobs) new_queue_control.data_integer = queue_original_length end_queue = time.clock() total_time = end_queue - start_queue if new_queue_control.data_integer <= 0: new_queue_control.delete_instance() logger.info( "Queue job #{} @ {} (blog #{}) finished ({:.4f} secs).".format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, total_time)) else: # new_queue_control.is_running = False # new_queue_control.save() new_queue_control.unlock() logger.info( "Queue job #{} @ {} (blog #{}) processed {} items ({:.4f} secs, {} remaining)." .format( new_queue_control.id, date_format(new_queue_control.date_touched), new_queue_control.blog.id, len(removed_jobs), total_time, queue_original_length, )) return new_queue_control.data_integer
def push_to_queue(**ka): ''' Inserts a single job item into the work queue. :param job_type: A string representing the type of job to be inserted. 'Page','Index', eventually 'Fileinfo' :param data_integer: Any integer data passed along with the job. For a job control item, this is the number of items remaining for that particular job. :param blog: The blog object associated with the job. :param site: The site object associated with the job. :param priority: An integer, from 0-9, representing the processing priority associated with the job. Higher-priority jobs are processed first. Most individual pages are given a high priority; indexes are lower. ''' try: queue_job = Queue.get(Queue.job_type == ka['job_type'], Queue.data_integer == ka['data_integer'], Queue.blog == ka['blog'], Queue.site == ka['site']) except Queue.DoesNotExist: queue_job = Queue() else: return queue_job.job_type = ka['job_type'] queue_job.data_integer = int(ka.get('data_integer', None)) queue_job.blog = ka.get('blog', Blog()).id queue_job.site = ka.get('site', Site()).id queue_job.priority = ka.get('priority', 9) queue_job.is_control = ka.get('is_control', False) if queue_job.is_control: queue_job.data_string = (queue_job.job_type + ": Blog {}".format(queue_job.blog.for_log)) else: queue_job.data_string = ( queue_job.job_type + ": " + FileInfo.get(FileInfo.id == queue_job.data_integer).file_path) queue_job.date_touched = datetime.datetime.now() queue_job.save()
scheduled_pages = Page.select().where( Page.status == page_status.scheduled, Page.publication_date <= datetime.datetime.utcnow()).order_by( Page.publication_date.desc()) total_pages = scheduled_pages.count() print ('{} pages scheduled'.format(total_pages)) if total_pages > 0: for p in scheduled_pages.select(Page.blog).distinct(): b = p.blog blogs_to_check[b.id] = b queue_count = Queue.select(Queue.blog).distinct() if queue_count.count() > 0: for n in queue_count: b = n.blog print ('Blog {} has existing queue items'.format(b.id)) blogs_to_check[b.id] = b if blogs_to_check: print ("Starting run.") from core.cms.queue import (queue_page_actions, queue_index_actions, queue_ssi_actions) from core.models import db from core.log import logger from time import sleep