def move(call: APICall, company: str, request: MoveRequest): moved, affected_projects = ProjectBLL.move_project( company=company, user=call.identity.user, project_id=request.project, new_location=request.new_location, ) _reset_cached_tags(company, projects=list(affected_projects)) call.result.data = {"moved": moved}
def merge(call: APICall, company: str, request: MergeRequest): moved_entitites, moved_projects, affected_projects = ProjectBLL.merge_project( company, source_id=request.project, destination_id=request.destination_project) _reset_cached_tags(company, projects=list(affected_projects)) call.result.data = { "moved_entities": moved_entitites, "moved_projects": moved_projects, }
def create(call: APICall): identity = call.identity with translate_errors_context(): fields = parse_from_call(call.data, create_fields, Project.get_fields()) conform_tag_fields(call, fields, validate=True) return IdResponse(id=ProjectBLL.create( user=identity.user, company=identity.company, **fields, ))
def update(call: APICall): """ update :summary: Update project information. See `project.create` for parameters. :return: updated - `int` - number of projects updated fields - `[string]` - updated fields """ fields = parse_from_call(call.data, create_fields, Project.get_fields(), discard_none_values=False) conform_tag_fields(call, fields, validate=True) updated = ProjectBLL.update(company=call.identity.company, project_id=call.data["project"], **fields) conform_output_tags(call, fields) call.result.data_model = UpdateResponse(updated=updated, fields=fields)
import re from apiserver.apimodels.pipelines import StartPipelineResponse, StartPipelineRequest from apiserver.bll.organization import OrgBLL from apiserver.bll.project import ProjectBLL from apiserver.bll.task import TaskBLL from apiserver.bll.task.task_operations import enqueue_task from apiserver.database.model.project import Project from apiserver.database.model.task.task import Task from apiserver.service_repo import APICall, endpoint org_bll = OrgBLL() project_bll = ProjectBLL() task_bll = TaskBLL() def _update_task_name(task: Task): if not task or not task.project: return project = Project.objects(id=task.project).only("name").first() if not project: return _, _, name_prefix = project.name.rpartition("/") name_mask = re.compile(rf"{re.escape(name_prefix)}( #\d+)?$") count = Task.objects(project=task.project, system_tags__in=["pipeline"], name=name_mask).count() new_name = f"{name_prefix} #{count}" if count > 0 else name_prefix task.update(name=new_name)
def clone_task( cls, company_id: str, user_id: str, task_id: str, name: Optional[str] = None, comment: Optional[str] = None, parent: Optional[str] = None, project: Optional[str] = None, tags: Optional[Sequence[str]] = None, system_tags: Optional[Sequence[str]] = None, hyperparams: Optional[dict] = None, configuration: Optional[dict] = None, container: Optional[dict] = None, execution_overrides: Optional[dict] = None, input_models: Optional[Sequence[TaskInputModel]] = None, validate_references: bool = False, new_project_name: str = None, ) -> Tuple[Task, dict]: validate_tags(tags, system_tags) params_dict = { field: value for field, value in ( ("hyperparams", hyperparams), ("configuration", configuration), ) if value is not None } task = cls.get_by_id(company_id=company_id, task_id=task_id, allow_public=True) now = datetime.utcnow() if input_models: input_models = [ ModelItem(model=m.model, name=m.name, updated=now) for m in input_models ] execution_dict = task.execution.to_proper_dict( ) if task.execution else {} if execution_overrides: execution_model = execution_overrides.pop("model", None) if not input_models and execution_model: input_models = [ ModelItem( model=execution_model, name=TaskModelNames[TaskModelTypes.input], updated=now, ) ] docker_cmd = execution_overrides.pop("docker_cmd", None) if not container and docker_cmd: image, _, arguments = docker_cmd.partition(" ") container = {"image": image, "arguments": arguments} artifacts_prepare_for_save({"execution": execution_overrides}) params_dict["execution"] = {} for legacy_param in ("parameters", "configuration"): legacy_value = execution_overrides.pop(legacy_param, None) if legacy_value is not None: params_dict["execution"] = legacy_value escape_dict_field(execution_overrides, "model_labels") execution_dict.update(execution_overrides) params_prepare_for_save(params_dict, previous_task=task) artifacts = execution_dict.get("artifacts") if artifacts: execution_dict["artifacts"] = { k: a for k, a in artifacts.items() if a.get("mode", DEFAULT_ARTIFACT_MODE) != ArtifactModes.output } execution_dict.pop("queue", None) new_project_data = None if not project and new_project_name: # Use a project with the provided name, or create a new project project = ProjectBLL.find_or_create( project_name=new_project_name, user=user_id, company=company_id, description="", ) new_project_data = {"id": project, "name": new_project_name} def clean_system_tags(input_tags: Sequence[str]) -> Sequence[str]: if not input_tags: return input_tags return [ tag for tag in input_tags if tag not in [TaskSystemTags.development, EntityVisibility.archived.value] ] with TimingContext("mongo", "clone task"): parent_task = (task.parent if task.parent and not task.parent.startswith(deleted_prefix) else None) new_task = Task( id=create_id(), user=user_id, company=company_id, created=now, last_update=now, last_change=now, name=name or task.name, comment=comment or task.comment, parent=parent or parent_task, project=project or task.project, tags=tags or task.tags, system_tags=system_tags or clean_system_tags(task.system_tags), type=task.type, script=task.script, output=Output(destination=task.output.destination) if task.output else None, models=Models(input=input_models or task.models.input), container=escape_dict(container) or task.container, execution=execution_dict, configuration=params_dict.get("configuration") or task.configuration, hyperparams=params_dict.get("hyperparams") or task.hyperparams, ) cls.validate( new_task, validate_models=validate_references or input_models, validate_parent=validate_references or parent, validate_project=validate_references or project, ) new_task.save() if task.project == new_task.project: updated_tags = tags updated_system_tags = system_tags else: updated_tags = new_task.tags updated_system_tags = new_task.system_tags org_bll.update_tags( company_id, Tags.Task, project=new_task.project, tags=updated_tags, system_tags=updated_system_tags, ) update_project_time(new_task.project) return new_task, new_project_data
def clone_task( cls, company_id: str, user_id: str, task_id: str, name: Optional[str] = None, comment: Optional[str] = None, parent: Optional[str] = None, project: Optional[str] = None, tags: Optional[Sequence[str]] = None, system_tags: Optional[Sequence[str]] = None, hyperparams: Optional[dict] = None, configuration: Optional[dict] = None, execution_overrides: Optional[dict] = None, validate_references: bool = False, new_project_name: str = None, ) -> Tuple[Task, dict]: validate_tags(tags, system_tags) params_dict = { field: value for field, value in ( ("hyperparams", hyperparams), ("configuration", configuration), ) if value is not None } task = cls.get_by_id(company_id=company_id, task_id=task_id, allow_public=True) execution_dict = task.execution.to_proper_dict( ) if task.execution else {} execution_model_overriden = False if execution_overrides: execution_model_overriden = execution_overrides.get( "model") is not None artifacts_prepare_for_save({"execution": execution_overrides}) params_dict["execution"] = {} for legacy_param in ("parameters", "configuration"): legacy_value = execution_overrides.pop(legacy_param, None) if legacy_value is not None: params_dict["execution"] = legacy_value execution_dict.update(execution_overrides) params_prepare_for_save(params_dict, previous_task=task) artifacts = execution_dict.get("artifacts") if artifacts: execution_dict["artifacts"] = { k: a for k, a in artifacts.items() if a.get("mode") != ArtifactModes.output } new_project_data = None if not project and new_project_name: # Use a project with the provided name, or create a new project project = ProjectBLL.find_or_create( project_name=new_project_name, user=user_id, company=company_id, description="Auto-generated while cloning", ) new_project_data = {"id": project, "name": new_project_name} now = datetime.utcnow() def clean_system_tags(input_tags: Sequence[str]) -> Sequence[str]: if not input_tags: return input_tags return [ tag for tag in input_tags if tag not in [TaskSystemTags.development, EntityVisibility.archived.value] ] with TimingContext("mongo", "clone task"): new_task = Task( id=create_id(), user=user_id, company=company_id, created=now, last_update=now, last_change=now, name=name or task.name, comment=comment or task.comment, parent=parent or task.parent, project=project or task.project, tags=tags or task.tags, system_tags=system_tags or clean_system_tags(task.system_tags), type=task.type, script=task.script, output=Output(destination=task.output.destination) if task.output else None, execution=execution_dict, configuration=params_dict.get("configuration") or task.configuration, hyperparams=params_dict.get("hyperparams") or task.hyperparams, ) cls.validate( new_task, validate_model=validate_references or execution_model_overriden, validate_parent=validate_references or parent, validate_project=validate_references or project, ) new_task.save() if task.project == new_task.project: updated_tags = tags updated_system_tags = system_tags else: updated_tags = new_task.tags updated_system_tags = new_task.system_tags org_bll.update_tags( company_id, Tags.Task, project=new_task.project, tags=updated_tags, system_tags=updated_system_tags, ) update_project_time(new_task.project) return new_task, new_project_data