def task_update_api(): """ """ spec_list = [ {'task_id': { 'kind' : int, 'permission': 'task' } }, {'task_ids': { 'kind': list, 'permission': 'task', 'required': False } }, {'mode': { 'kind' : str, 'valid_values_list': ['toggle_deferred'] } }, {'status': { 'kind': str, 'valid_values_list': ['archived'] } } ] log, input, untrusted_input = regular_input.master(request=request, spec_list=spec_list) if len(log["error"].keys()) >= 1: return jsonify(log=log), 400 with sessionMaker.session_scope() as session: task_list = [] if input['task_id']: task = Task.get_by_id(session = session, task_id = input['task_id']) task_list.append(task) else: task_list = Task.list( session=session, task_ids=input['task_ids'] ) for task in task_list: Permission_Task.by_task_id_core(task.id) task_update = Task_Update( session = session, task = task, mode = input['mode'], status= input['status']) task_update.main() if len(task_update.log["error"].keys()) >= 1: return jsonify( log = task_update.log), 400 return jsonify( log = task_update.log), 200
def refresh_stat_count_tasks(self, session): task_count_available = Task.list(session, status='available', job_id=self.id, project_id=self.project_id, return_mode="count") task_count_complete = Task.list(session, status='complete', job_id=self.id, project_id=self.project_id, return_mode="count") self.stat_count_tasks = task_count_complete + task_count_available session.add(self) return
def __check_if_task_exists(self, job: Job, file: File): task = Task.get_by_job_and_file(session=self.session, job=job, file=file) if task: logger.debug('Task already exists.') return task
def task_next_core(session, job_id, project_string_id, task_id, input): task = Task.get_next_previous_task_by_task_id(session=session, task_id=task_id, job_id=job_id, direction=input['direction']) if not task: return False task_serialized = task.serialize_builder_view_by_id(session) return task_serialized
def valid_review_task_for_user(session, task, user): parent = Task.get_by_id(session, task.parent_id) # task.parent not working for some reason if parent: if user == parent.assignee_user: return False return True
def task_by_id_core(session, task_id, input): task = Task.get_by_id(session = session, task_id = task_id) if input['builder_or_trainer_mode'] == "builder": task_serialized = task.serialize_builder_view_by_id(session) if input['builder_or_trainer_mode'] == "trainer": # TODO task_serialized = task.serialize_trainer_annotate(session) return task_serialized
def __generate_payload_for_task(self, session, start_time=None): tasks = [] task = Task.get_by_id(session=session, task_id=self.notification_relation.task_id) if task: tasks.append(task) time_column = Task.time_created if self.type == 'task_completed': time_column = Task.time_completed if start_time: tasks = session.query(Task).filter( Task.project_id == self.notification_relation.task.project_id, time_column <= datetime.datetime.utcnow(), time_column >= start_time).all() payload = [ task.serialize_builder_view_by_id(session=session) for task in tasks ] return payload
def get_video_image_list(project_string_id, task_id, video_parent_file_id): spec_list = [{"frame_list": {'kind': list, 'required': True}}] log, input, untrusted_input = regular_input.master(request=request, spec_list=spec_list) if len(log["error"].keys()) >= 1: return jsonify(log=log), 400 with sessionMaker.session_scope() as session: # A little security measure if len(input['frame_list']) > 100: return jsonify("Max 100 frames at a time"), 400 if task_id != -1: task = Task.get_by_id(session=session, task_id=task_id) if task.file is None: return jsonify("Task has no video file associated."), 400 else: video_file = task.file else: project = Project.get(session, project_string_id) video_file = File.get_by_id_and_project( session=session, project_id=project.id, file_id=video_parent_file_id, directory_id=project.directory_default_id # migration ) if video_file is None: return jsonify("bad video_parent_file_id id"), 400 return get_url_for_frame_list_response(session=session, video_file=video_file, frame_list=input['frame_list'])
def recursively_get_next_available(session, job, user): """ Goal, give consideration to task types, and not expect that first result from shared.database matches "business?" logic Example of saying a person can't review their own task """ ignore_task_IDS_list = [] while True: task = Task.get_next_available_task_by_job_id( session = session, job_id = job.id, ignore_task_IDS_list = ignore_task_IDS_list) if task is None: return None if task.task_type == 'draw': return task if task.task_type == 'review': result = valid_review_task_for_user(session = session, task = task, user = user) if result is True: return task else: ignore_task_IDS_list.append(task.id)
def create_task_from_file(self, file, job=None, incoming_directory=None): job_obj = self.job if job is not None: logger.debug('Creating task from file {} and job {}'.format( file.id, job.id)) job_obj = job if job_obj.file_handling == "isolate": new_file = File.copy_file_from_existing( session=self.session, working_dir=job_obj.directory, existing_file=file, copy_instance_list=False, add_link=False, remove_link=False, orginal_directory_id=job_obj.completion_directory_id, deep_copy=True, ann_is_complete_reset=True) else: # assume use existing new_file = file new_file.ann_is_complete = False self.session.add(new_file) task = Task.new(self.session, job_obj, new_file.id, job_obj.guide_default_id, job_obj.label_dict, file_original_id=file.id, kind='human', task_type='draw', incoming_directory=incoming_directory) # Set job as not completed. job_obj.status = 'active' self.session.add(job_obj) self.session.add(task) return task
def create_task(task_data, session): task = Task() session.add(task) task.is_live = task_data.get('is_live', True) # # # if 'job' not in task_data: job = create_job({'name': 'jobtest:{}'.format(task_data.get('name'))}, session) else: job = task_data.get('job') task.job_id = job.id task.job = job if 'file' not in task_data: # TODO: add file create mock. file_id = None else: file_id = task_data.get('file').id task.file_id = file_id task.file = task_data.get('file') # TODO: might need to create mock functions for the following relations task.guide_id = task_data.get('guide_id', None) task.label_dict = task_data.get('label_dict', {}) task.file_original_id = task_data.get('file_original_id', None) task.file_original = task_data.get('file_original', None) task.completion_directory_id = task_data.get('completion_directory_id', None) task.incoming_directory_id = task_data.get('incoming_directory_id', None) task.task_type = task_data.get('task_type', 'draw') if task.task_type == 'draw': # Set draw tasks to be available instead of # default of created task.status = 'available' # Cache from job task.status = task_data.get('status', 'available') task.project_id = job.project_id task.job_type = job.type task.label_mode = job.label_mode # Have defaults task.kind = task.task_type = task_data.get('kind', 'human') regular_methods.commit_with_rollback(session) return task
def task_related_files_core(session, task_id): related_files = Task.get_related_files(session=session, task_id=task_id) files_data = [] for file in related_files: files_data.append(file.serialize()) return file
def task_next_issue_core(session, task_id): next_task_id = Task.get_next_task_with_issues(session = session, task_id = task_id) return next_task_id