def predictions_to_completions(project, params, items): for i in items: task = project.source_storage.get(i) predictions = task.get('predictions', []) if len(predictions) == 0: continue prediction = predictions[-1] # load task with completion from target storage task_with_completions = project.target_storage.get(i) task = copy(task if task_with_completions is None else task_with_completions) completions = task.get('completions', None) or [{'id': i * 9000}] completion = { 'id': max([c['id'] for c in completions]) + 1, 'created_at': timestamp_now(), 'lead_time': 0, 'result': prediction.get('result', []) } if 'completions' not in task: task['completions'] = [] task['completions'].append(completion) project.target_storage.set(i, task) return {'response_code': 200}
def propagate_completions(project, params, items): if len(items) < 2: raise DataManagerException('Select more than two tasks, the first task completion will be picked as source') # check first completion completed_task = items[0] task = project.target_storage.get(completed_task) if task is None or len(task.get('completions', [])) == 0: raise DataManagerException('The first selected task with ID = ' + str(completed_task) + ' should have at least one completion to propagate') # get first completion source_completion = task['completions'][0] # copy first completion to new completions for each task for i in items[1:]: task = project.target_storage.get(i) if task is None: task = project.source_storage.get(i) completion = deepcopy(source_completion) # start completion id from task_id * 9000 completions = task.get('completions', None) or [{'id': i * 9000}] completion['id'] = max([c['id'] for c in completions]) + 1 completion['created_at'] = timestamp_now() if 'completions' not in task: task['completions'] = [] task['completions'].append(completion) project.target_storage.set(i, task) return {'response_code': 200}
def save_completion(self, task_id, completion): """ Save completion :param task_id: task id :param completion: json data from label (editor) """ # try to get completions with task first task = self.get_task_with_completions(task_id) # init task if completions with task not exists if not task: task = deepcopy(self.source_storage.get(task_id)) task = self.resolve_undefined_task_data(task) task['completions'] = [] else: task = deepcopy(task) # remove possible stored predictions task.pop('predictions', None) # update old completion updated = False if 'id' in completion: for i, item in enumerate(task['completions']): if item['id'] == completion['id']: # we don't want to explicitly make "was_cancelled" key if it's not presented in storage if 'was_cancelled' in task['completions'][ i] and not completion.get('was_cancelled', False): task['completions'][i]['was_cancelled'] = False task['completions'][i].update(completion) updated = True # write a new completion if not updated: # start completion id from task_id * 1000 completions = task.get('completions', None) or [{ 'id': task_id * 1000 }] completion['id'] = max([c['id'] for c in completions]) + 1 task['completions'].append(completion) try: self._update_derived_output_schema(completion) except Exception as exc: logger.error(exc, exc_info=True) logger.debug(json.dumps(completion, indent=2)) # save completion time completion['created_at'] = timestamp_now() # write task + completions to file self.target_storage.set(task_id, task) logger.debug('Completion for task ' + str(task_id) + ' saved with id =' + str(completion['id'])) return completion['id']
def save_completion(self, task_id, completion): """Save completion :param task_id: task id :param completion: json data from label (editor) """ # try to get completions with task first task = self.get_task_with_completions(task_id) # init task if completions with task not exists if not task: task = deepcopy(self.source_storage.get(task_id)) task["completions"] = [] else: task = deepcopy(task) # remove possible stored predictions task.pop("predictions", None) # update old completion updated = False if "id" in completion: for i, item in enumerate(task["completions"]): if item["id"] == completion["id"]: task["completions"][i].update(completion) updated = True # write new completion if not updated: completion["id"] = task["id"] * 1000 + len(task["completions"]) + 1 task["completions"].append(completion) try: self._update_derived_output_schema(completion) except Exception as exc: logger.error(exc, exc_info=True) logger.debug(json.dumps(completion, indent=2)) # save completion time completion["created_at"] = timestamp_now() # write task + completions to file self.target_storage.set(task_id, task) logger.debug("Completion for task " + str(task_id) + " saved with id =" + str(completion["id"])) return completion["id"]
def save_completion(self, task_id, completion): """ Save completion :param task_id: task id :param completion: json data from label (editor) """ # try to get completions with task first task = self.get_task_with_completions(task_id) # init task if completions with task not exists if not task: task = deepcopy(self.source_storage.get(task_id)) task['completions'] = [] else: task = deepcopy(task) # update old completion updated = False if 'id' in completion: for i, item in enumerate(task['completions']): if item['id'] == completion['id']: task['completions'][i].update(completion) updated = True # write new completion if not updated: completion['id'] = task['id'] * 1000 + len(task['completions']) + 1 task['completions'].append(completion) try: self._update_derived_output_schema(completion) except Exception as exc: logger.error(exc, exc_info=True) logger.debug(json.dumps(completion, indent=2)) # save completion time completion['created_at'] = timestamp_now() # write task + completions to file self.target_storage.set(task_id, task) logger.debug('Completion ' + str(task_id) + ' saved:\n' + json.dumps(task, indent=2)) return completion['id']