def update(call: APICall, company_id, req_model: UpdateRequest): task_id = req_model.task with translate_errors_context(): task = Task.get_for_writing( id=task_id, company=company_id, _only=["id", "project"] ) if not task: raise errors.bad_request.InvalidTaskId(id=task_id) partial_update_dict, valid_fields = prepare_update_fields(call, task, call.data) if not partial_update_dict: return UpdateResponse(updated=0) updated_count, updated_fields = Task.safe_update( company_id=company_id, id=task_id, partial_update_dict=partial_update_dict, injected_update=dict(last_change=datetime.utcnow()), ) if updated_count: new_project = updated_fields.get("project", task.project) if new_project != task.project: _reset_cached_tags(company_id, projects=[new_project, task.project]) else: _update_cached_tags( company_id, project=task.project, fields=updated_fields ) update_project_time(updated_fields.get("project")) unprepare_from_saved(call, updated_fields) return UpdateResponse(updated=updated_count, fields=updated_fields)
def delete_task( task_id: str, company_id: str, move_to_trash: bool, force: bool, return_file_urls: bool, delete_output_models: bool, status_message: str, status_reason: str, ) -> Tuple[int, Task, CleanupResult]: task = TaskBLL.get_task_with_access( task_id, company_id=company_id, requires_write_access=True ) if ( task.status != TaskStatus.created and EntityVisibility.archived.value not in task.system_tags and not force ): raise errors.bad_request.TaskCannotBeDeleted( "due to status, use force=True", task=task.id, expected=TaskStatus.created, current=task.status, ) try: TaskBLL.dequeue_and_change_status( task, company_id=company_id, status_message=status_message, status_reason=status_reason, ) except APIError: # dequeue may fail if the task was not enqueued pass cleanup_res = cleanup_task( task, force=force, return_file_urls=return_file_urls, delete_output_models=delete_output_models, ) if move_to_trash: collection_name = task._get_collection_name() archived_collection = "{}__trash".format(collection_name) task.switch_collection(archived_collection) try: # A simple save() won't do due to mongoengine caching (nothing will be saved), so we have to force # an insert. However, if for some reason such an ID exists, let's make sure we'll keep going. task.save(force_insert=True) except Exception: pass task.switch_collection(collection_name) task.delete() update_project_time(task.project) return 1, task, cleanup_res
def create(call: APICall, company_id, req_model: CreateRequest): task, fields = _validate_and_get_task_from_call(call) with translate_errors_context(), TimingContext("mongo", "save_task"): task.save() _update_cached_tags(company_id, project=task.project, fields=fields) update_project_time(task.project) call.result.data_model = IdResponse(id=task.id)
def update_batch(call: APICall, company_id, _): items = call.batched_data if items is None: raise errors.bad_request.BatchContainsNoItems() with translate_errors_context(): items = {i["task"]: i for i in items} tasks = { t.id: t for t in Task.get_many_for_writing(company=company_id, query=Q(id__in=list(items))) } if len(tasks) < len(items): missing = tuple(set(items).difference(tasks)) raise errors.bad_request.InvalidTaskId(ids=missing) now = datetime.utcnow() bulk_ops = [] updated_projects = set() for id, data in items.items(): task = tasks[id] fields, valid_fields = prepare_update_fields(call, data) partial_update_dict = Task.get_safe_update_dict(fields) if not partial_update_dict: continue partial_update_dict.update(last_change=now) update_op = UpdateOne({ "_id": id, "company": company_id }, {"$set": partial_update_dict}) bulk_ops.append(update_op) new_project = partial_update_dict.get("project", task.project) if new_project != task.project: updated_projects.update({new_project, task.project}) elif any(f in partial_update_dict for f in ("tags", "system_tags")): updated_projects.add(task.project) updated = 0 if bulk_ops: res = Task._get_collection().bulk_write(bulk_ops) updated = res.modified_count if updated and updated_projects: projects = list(updated_projects) _reset_cached_tags(company_id, projects=projects) update_project_time(project_ids=projects) call.result.data = {"updated": updated}
def cleanup_tasks(cls, threshold_sec): relevant_status = (TaskStatus.in_progress, ) threshold = timedelta(seconds=threshold_sec) ref_time = datetime.utcnow() - threshold log.info( f"Starting cleanup cycle for running tasks last updated before {ref_time}" ) tasks = list( Task.objects(status__in=relevant_status, last_update__lt=ref_time).only( "id", "name", "status", "project", "last_update")) log.info(f"{len(tasks)} non-responsive tasks found") if not tasks: return 0 err_count = 0 project_ids = set() now = datetime.utcnow() for task in tasks: log.info( f"Stopping {task.id} ({task.name}), last updated at {task.last_update}" ) # noinspection PyBroadException try: updated = Task.objects(id=task.id, status=task.status).update( status=TaskStatus.stopped, status_reason="Forced stop (non-responsive)", status_message="Forced stop (non-responsive)", status_changed=now, last_update=now, last_change=now, ) if updated: project_ids.add(task.project) else: err_count += 1 except Exception as ex: log.error("Failed setting status: %s", str(ex)) update_project_time(list(project_ids)) return len(tasks) - err_count
def move(call: APICall, company_id: str, request: MoveRequest): if not (request.project or request.project_name): raise errors.bad_request.MissingRequiredFields( "project or project_name is required") updated_projects = set( t.project for t in Task.objects(id__in=request.ids).only("project") if t.project) project_id = project_bll.move_under_project( entity_cls=Task, user=call.identity.user, company=company_id, ids=request.ids, project=request.project, project_name=request.project_name, ) projects = list(updated_projects | {project_id}) _reset_cached_tags(company_id, projects=projects) update_project_time(projects) return {"project_id": project_id}
def delete(call: APICall, company_id, req_model: DeleteRequest): task = TaskBLL.get_task_with_access( req_model.task, company_id=company_id, requires_write_access=True ) move_to_trash = req_model.move_to_trash force = req_model.force if task.status != TaskStatus.created and not force: raise errors.bad_request.TaskCannotBeDeleted( "due to status, use force=True", task=task.id, expected=TaskStatus.created, current=task.status, ) with translate_errors_context(): result = cleanup_task(task, force) if move_to_trash: collection_name = task._get_collection_name() archived_collection = "{}__trash".format(collection_name) task.switch_collection(archived_collection) try: # A simple save() won't do due to mongoengine caching (nothing will be saved), so we have to force # an insert. However, if for some reason such an ID exists, let's make sure we'll keep going. with TimingContext("mongo", "save_task"): task.save(force_insert=True) except Exception: pass task.switch_collection(collection_name) task.delete() _reset_cached_tags(company_id, projects=[task.project]) update_project_time(task.project) call.result.data = dict(deleted=True, **attr.asdict(result))
def edit(call: APICall, company_id, req_model: UpdateRequest): task_id = req_model.task force = req_model.force with translate_errors_context(): task = Task.get_for_writing(id=task_id, company=company_id) if not task: raise errors.bad_request.InvalidTaskId(id=task_id) if not force and task.status != TaskStatus.created: raise errors.bad_request.InvalidTaskStatus( expected=TaskStatus.created, status=task.status ) edit_fields = create_fields.copy() edit_fields.update(dict(status=None)) with translate_errors_context( field_does_not_exist_cls=errors.bad_request.ValidationError ), TimingContext("code", "parse_and_validate"): fields = prepare_create_fields( call, valid_fields=edit_fields, output=task.output, previous_task=task ) for key in fields: field = getattr(task, key, None) value = fields[key] if ( field and isinstance(value, dict) and isinstance(field, EmbeddedDocument) ): d = field.to_mongo(use_db_field=False).to_dict() d.update(value) fields[key] = d task_bll.validate(task_bll.create(call, fields)) # make sure field names do not end in mongoengine comparison operators fixed_fields = { (k if k not in COMPARISON_OPERATORS else "%s__" % k): v for k, v in fields.items() } if fixed_fields: now = datetime.utcnow() last_change = dict(last_change=now) if not set(fields).issubset(Task.user_set_allowed()): last_change.update(last_update=now) fields.update(**last_change) fixed_fields.update(**last_change) updated = task.update(upsert=False, **fixed_fields) if updated: new_project = fixed_fields.get("project", task.project) if new_project != task.project: _reset_cached_tags(company_id, projects=[new_project, task.project]) else: _update_cached_tags( company_id, project=task.project, fields=fixed_fields ) update_project_time(fields.get("project")) unprepare_from_saved(call, fields) call.result.data_model = UpdateResponse(updated=updated, fields=fields) else: call.result.data_model = UpdateResponse(updated=0)