def process_queue(blog): ''' Processes the jobs currently in the queue for the selected blog. ''' with db.atomic(): queue_control = publishing_lock(blog, True) if queue_control is None: return 0 queue_control.data_string = 'Running' queue_control.save() queue = Queue.select().order_by( Queue.priority.desc(), Queue.date_touched.desc()).where( Queue.blog == blog, Queue.is_control == False).limit(MAX_BATCH_OPS) queue_length = queue.count() if queue_length > 0: logger.info( "Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) for q in queue: try: job_type.action[q.job_type](q) except BaseException: raise else: remove_from_queue(q.id) queue_control = Queue.get(Queue.blog == blog, Queue.is_control == True) queue_control.data_integer -= queue_length if queue_control.data_integer <= 0: queue_control.delete_instance() logger.info("Queue job #{} @ {} (blog #{}) finished.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id)) else: queue_control.data_string = 'Paused' queue_control.save() logger.info( "Queue job #{} @ {} (blog #{}) continuing with {} items left.". format(queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) return queue_control.data_integer
def remove_from_queue(queue_id): ''' Removes a specific job ID from the queue. :param queue_id: The ID number of the job queue item to remove. ''' queue_delete = Queue.get(Queue.id == queue_id) return queue_delete.delete_instance()
def push_to_queue(**ka): ''' Inserts a single job item into the work queue. :param job_type: A string representing the type of job to be inserted. 'Page','Index', eventually 'Fileinfo' :param data_integer: Any integer data passed along with the job. For a job control item, this is the number of items remaining for that particular job. :param blog: The blog object associated with the job. :param site: The site object associated with the job. :param priority: An integer, from 0-9, representing the processing priority associated with the job. Higher-priority jobs are processed first. Most individual pages are given a high priority; indexes are lower. ''' try: queue_job = Queue.get(Queue.job_type == ka['job_type'], Queue.data_integer == ka['data_integer'], Queue.blog == ka['blog'], Queue.site == ka['site']) except Queue.DoesNotExist: queue_job = Queue() else: return queue_job.job_type = ka['job_type'] queue_job.data_integer = int(ka.get('data_integer', None)) queue_job.blog = ka.get('blog', Blog()).id queue_job.site = ka.get('site', Site()).id queue_job.priority = ka.get('priority', 9) queue_job.is_control = ka.get('is_control', False) if queue_job.is_control: queue_job.data_string = (queue_job.job_type + ": Blog {}".format(queue_job.blog.for_log)) else: queue_job.data_string = ( queue_job.job_type + ": " + FileInfo.get(FileInfo.id == queue_job.data_integer).file_path) queue_job.date_touched = datetime.datetime.now() queue_job.save()
def publishing_lock(blog, return_queue=False): ''' Checks to see if a publishing job for a given blog is currently running. If it is, it raises an exception. If the return_queue flag is set, it returns the queue_control object instead. If no job is locked, then it returns None. ''' try: queue_control = Queue.get(Queue.blog == blog, Queue.is_control == True) except Queue.DoesNotExist: return None if return_queue is True: return queue_control else: raise QueueInProgressException( "Publishing job currently running for blog {}".format( queue_control.blog.for_log))