def save_task(task, broker): """ Saves the task package to Django or the cache """ # SAVE LIMIT < 0 : Don't save success if not task.get('save', Conf.SAVE_LIMIT > 0) and task['success']: return # async next in a chain if task.get('chain', None): tasks.async_chain(task['chain'], group=task['group'], cached=task['cached'], sync=task['sync'], broker=broker) # SAVE LIMIT > 0: Prune database, SAVE_LIMIT 0: No pruning db.close_old_connections() try: if task['success'] and 0 < Conf.SAVE_LIMIT <= Success.objects.count(): Success.objects.last().delete() Task.objects.create(id=task['id'], name=task['name'], func=task['func'], hook=task.get('hook'), args=task['args'], kwargs=task['kwargs'], started=task['started'], stopped=task['stopped'], result=task['result'], group=task.get('group'), success=task['success']) except Exception as e: logger.error(e)
def save_cached(task, broker): task_key = '{}:{}'.format(broker.list_key, task['id']) try: group = task.get('group', None) # if it's a group append to the group list if group: if task.get('save', False): group_key = '{}:{}:keys'.format(broker.list_key, group) group_list = broker.cache.get(group_key) if group_list: group_list = signing.PickleSerializer.loads(group_list) else: group_list = [] # save the group list group_list.append(task_key) broker.cache.set(group_key, signing.PickleSerializer.dumps(group_list)) # async next in a chain if task.get('chain', None) and task['success']: tasks.async_chain(task['chain'], group=group, cached=task['cached'], sync=task['sync'], priority=task['priority'], broker=broker) # save the task if task.get('save', False): broker.cache.set(task_key, signing.PickleSerializer.dumps(task)) except Exception as e: logger.error(e)
def save_task(task, broker): """ Saves the task package to Django or the cache """ # SAVE LIMIT < 0 : Don't save success if not task.get('save', Conf.SAVE_LIMIT >= 0) and task['success']: return # async next in a chain if task.get('chain', None): tasks.async_chain(task['chain'], group=task['group'], cached=task['cached'], sync=task['sync'], broker=broker) # SAVE LIMIT > 0: Prune database, SAVE_LIMIT 0: No pruning db.close_old_connections() try: if task['success'] and 0 < Conf.SAVE_LIMIT <= Success.objects.count(): Success.objects.last().delete() # check if this task has previous results if Task.objects.filter(id=task['id'], name=task['name']).exists(): existing_task = Task.objects.get(id=task['id'], name=task['name']) # only update the result if it hasn't succeeded yet if not existing_task.success: existing_task.stopped = task['stopped'] existing_task.result = task['result'] existing_task.success = task['success'] existing_task.task_status = task['task_status'] existing_task.progress_fraction = task.get( 'progress_fraction', 0) existing_task.progress_data = task.get('progress_data') existing_task.save() else: Task.objects.create( id=task['id'], name=task['name'], func=task['func'], hook=task.get('hook'), args=task['args'], kwargs=task['kwargs'], started=task['started'], stopped=task['stopped'], result=task['result'], group=task.get('group'), success=task['success'], worker_process_pid=task.get('worker_process_pid'), progress_fraction=task.get('progress_fraction', 0), progress_data=task.get('progress_data'), task_status=task['task_status'], ) except Exception as e: import traceback traceback.print_exc() logger.error("Got exception while saving task: {}".format(e))
def save_cached(task, broker): task_key = '{}:{}'.format(broker.list_key, task['id']) timeout = task['cached'] if timeout is True: timeout = None try: group = task.get('group', None) iter_count = task.get('iter_count', 0) # if it's a group append to the group list if group: group_key = '{}:{}:keys'.format(broker.list_key, group) group_list = broker.cache.get(group_key) or [] # if it's an iter group, check if we are ready if iter_count and len(group_list) == iter_count - 1: group_args = '{}:{}:args'.format(broker.list_key, group) # collate the results into a Task result results = [ signing.SignedPackage.loads(broker.cache.get(k))['result'] for k in group_list ] results.append(task['result']) task['result'] = results task['id'] = group task['args'] = signing.SignedPackage.loads( broker.cache.get(group_args)) task.pop('iter_count', None) task.pop('group', None) if task.get('iter_cached', None): task['cached'] = task.pop('iter_cached', None) save_cached(task, broker=broker) else: save_task(task, broker) broker.cache.delete_many(group_list) broker.cache.delete_many([group_key, group_args]) return # save the group list group_list.append(task_key) broker.cache.set(group_key, group_list, timeout) # async next in a chain if task.get('chain', None): tasks.async_chain(task['chain'], group=group, cached=task['cached'], sync=task['sync'], broker=broker) # save the task broker.cache.set(task_key, signing.SignedPackage.dumps(task), timeout) except Exception as e: logger.error(e)
def save_task(task, broker): """ Saves the task package to Django or the cache """ # SAVE LIMIT < 0 : Don't save success if not task.get('save', Conf.SAVE_LIMIT >= 0) and task['success']: return # async next in a chain if task.get('chain', None): tasks.async_chain(task['chain'], group=task['group'], cached=task['cached'], sync=task['sync'], broker=broker) # SAVE LIMIT > 0: Prune database, SAVE_LIMIT 0: No pruning db.close_old_connections() try: if task['success'] and 0 < Conf.SAVE_LIMIT <= Success.objects.count(): Success.objects.last().delete() # check if this task has previous results if Task.objects.filter(id=task['id'], name=task['name']).exists(): existing_task = Task.objects.get(id=task['id'], name=task['name']) # only update the result if it hasn't succeeded yet if not existing_task.success: existing_task.stopped = task['stopped'] existing_task.result = task['result'] existing_task.success = task['success'] existing_task.task_status = task['task_status'] existing_task.progress_fraction = task.get('progress_fraction', 0) existing_task.progress_data = task.get('progress_data') existing_task.save() else: Task.objects.create(id=task['id'], name=task['name'], func=task['func'], hook=task.get('hook'), args=task['args'], kwargs=task['kwargs'], started=task['started'], stopped=task['stopped'], result=task['result'], group=task.get('group'), success=task['success'], worker_process_pid=task.get('worker_process_pid'), progress_fraction=task.get('progress_fraction', 0), progress_data=task.get('progress_data'), task_status=task['task_status'], ) except Exception as e: import traceback; traceback.print_exc() logger.error("Got exception while saving task: {}".format(e))
def save_cached(task, broker): task_key = '{}:{}'.format(broker.list_key, task['id']) timeout = task['cached'] if timeout is True: timeout = None try: group = task.get('group', None) iter_count = task.get('iter_count', 0) # if it's a group append to the group list if group: task_key = '{}:{}:{}'.format(broker.list_key, group, task['id']) group_key = '{}:{}:keys'.format(broker.list_key, group) group_list = broker.cache.get(group_key) or [] # if it's an iter group, check if we are ready if iter_count and len(group_list) == iter_count - 1: group_args = '{}:{}:args'.format(broker.list_key, group) # collate the results into a Task result results = [signing.SignedPackage.loads(broker.cache.get(k))['result'] for k in group_list] results.append(task['result']) task['result'] = results task['id'] = group task['args'] = signing.SignedPackage.loads(broker.cache.get(group_args)) task.pop('iter_count', None) task.pop('group', None) if task.get('iter_cached', None): task['cached'] = task.pop('iter_cached', None) save_cached(task, broker=broker) else: save_task(task, broker) broker.cache.delete_many(group_list) broker.cache.delete_many([group_key, group_args]) return # save the group list group_list.append(task_key) broker.cache.set(group_key, group_list, timeout) # async next in a chain if task.get('chain', None): tasks.async_chain(task['chain'], group=group, cached=task['cached'], sync=task['sync'], broker=broker) # save the task broker.cache.set(task_key, signing.SignedPackage.dumps(task), timeout) except Exception as e: logger.error(e)