def get_full_operation(self): return OperationSpecification.read({ "version": pkg.SCHEMA_VERSION, "name": self.DEFAULT_STR_VALUE, "description": self.DEFAULT_STR_VALUE, "tags": [ "{}1".format(self.DEFAULT_STR_VALUE), "{}2".format(self.DEFAULT_STR_VALUE), ], "presets": [self.DEFAULT_STR_VALUE], "queue": "{}/{}".format(self.DEFAULT_STR_VALUE, self.DEFAULT_STR_VALUE), "cache": { "disable": False, "ttl": self.DEFAULT_INT_VALUE, }, "termination": { "maxRetries": self.DEFAULT_INT_VALUE, "ttl": self.DEFAULT_INT_VALUE, "timeout": self.DEFAULT_INT_VALUE, }, "plugins": { "auth": False, "shm": False, "collectLogs": False, "collectArtifacts": False, "collectResources": False, }, "actions": [ { "hubRef": "{}1".format(self.DEFAULT_STR_VALUE) }, { "hubRef": "{}2".format(self.DEFAULT_STR_VALUE), "label": "customLabel", "many": True, }, ], "hooks": [ { "trigger": "succeeded", "connection": "{}1".format(self.DEFAULT_STR_VALUE), }, { "connection": "{}1".format(self.DEFAULT_STR_VALUE), "hubRef": "{}2".format(self.DEFAULT_STR_VALUE), }, ], "params": { "patch-key1": { "value": "{}2".format(self.DEFAULT_STR_VALUE) }, "patch-key2": { "value": "{}1".format(self.DEFAULT_STR_VALUE) }, }, "runPatch": { "init": [{ "connection": self.DEFAULT_STR_VALUE, "git": { "revision": self.DEFAULT_STR_VALUE }, }], "connections": [ "{}1".format(self.DEFAULT_STR_VALUE), "{}2".format(self.DEFAULT_STR_VALUE), ], "container": { "resources": { "requests": { "cpu": self.DEFAULT_INT_VALUE } } }, "environment": { "nodeSelector": { "polyaxon": "core" }, "serviceAccountName": self.DEFAULT_STR_VALUE, "imagePullSecrets": [ "{}1".format(self.DEFAULT_STR_VALUE), "{}2".format(self.DEFAULT_STR_VALUE), ], }, }, "schedule": { "kind": "cron", "cron": "0 0 * * *", "startAt": self.DEFAULT_DT_VALUE, "endAt": self.DEFAULT_DT_VALUE, }, "events": None, "matrix": { "concurrency": self.DEFAULT_INT_VALUE, "kind": "mapping", "values": [ { "a": self.DEFAULT_INT_VALUE }, { "b": self.DEFAULT_INT_VALUE }, ], }, "dependencies": [ "{}1".format(self.DEFAULT_STR_VALUE), "{}2".format(self.DEFAULT_STR_VALUE), ], "trigger": "all_succeeded", "conditions": self.DEFAULT_STR_VALUE, "skipOnUpstreamSkip": True, "hubRef": self.DEFAULT_STR_VALUE, })
def test_patch_does_not_alter_with_no_preset(self): assert (OperationSpecification.apply_preset( config=self.compiled_operation, preset=None, ) == self.compiled_operation)
def run( ctx, project, polyaxonfile, python_module, url, hub, name, tags, description, log, watch, local, params, profile, queue, nocache, eager, ): """Run polyaxonfile specification. Examples: \b $ polyaxon run -f file -f file_override ... Run and set description and tags for this run \b $ polyaxon run -f file --description="Description of the current run" --tags="foo, bar, moo" Run and set a unique name for this run \b polyaxon run --name=foo Run for a specific project \b $ polyaxon run -p project1 -f file.yaml Run with updated params \b $ polyaxon run -p project1 -f file.yaml -P param1=234.2 -P param2=relu If a python file contains a component main, you can run that component \b polyaxon run -pm path/to/my-component.py If a python file contains more than one component, you can specify the component to run \b polyaxon run -pm path/to/my-component.py:componentA """ op_spec = check_polyaxonfile( polyaxonfile=polyaxonfile, python_module=python_module, url=url, hub=hub, params=params, profile=profile, queue=queue, nocache=nocache, verbose=False, eager=eager, ) owner, project_name = get_project_or_local(project, is_cli=True) tags = validate_tags(tags) if local: try: compiled_operation = OperationSpecification.compile_operation( op_spec) compiled_operation = CompiledOperationSpecification.apply_operation_contexts( compiled_operation) except (PolyaxonSchemaError, ValidationError): Printer.print_error( "Could not run this polyaxonfile locally, " "a context is required to resolve it dependencies.") sys.exit(1) docker_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, compiled_operation=compiled_operation, log=log, ) elif settings.CLIENT_CONFIG.no_api: k8s_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, log=log, ) else: platform_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, log=log, watch=watch, eager=eager, )
def get_empty_operation(self): return OperationSpecification.read({ "version": pkg.SCHEMA_VERSION, "hubRef": "test" })
def init_run( self, project_id: int, user_id: int, op_spec: V1Operation = None, compiled_operation: V1CompiledOperation = None, name: str = None, description: str = None, tags: str = None, override: Union[str, Dict] = None, override_post: bool = True, params: Dict = None, readme: str = None, original_id: int = None, original_uuid: int = None, cloning_kind: str = None, is_managed: bool = True, supported_kinds: Set[str] = None, **kwargs, ) -> Tuple[V1CompiledOperation, BaseRun]: content = None raw_content = None if op_spec: op_spec = self.set_spec(op_spec) raw_content = op_spec.to_dict(dump=True) if op_spec: if not compiled_operation or override: compiled_operation = OperationSpecification.compile_operation( op_spec, override=override, override_post=override_post) params = op_spec.params params = params or {} inputs = {p: pv.value for p, pv in params.items() if pv.is_literal} params = {p: pv.to_dict() for p, pv in params.items()} kind = None meta_info = {} if compiled_operation: name = name or compiled_operation.name description = description or compiled_operation.description tags = tags or compiled_operation.tags kind, meta_kind = self.get_kind(compiled_operation) kind, meta_info = self.get_meta_info(compiled_operation, kind, meta_kind) self.supports_kind(kind, meta_kind, supported_kinds, is_managed) if cloning_kind == V1CloningKind.COPY: if meta_kind not in {V1RunKind.JOB, V1RunKind.SERVICE}: raise ValueError( "Operation with kind `{}` does not support restart with copy mode." ) compiled_operation.run.add_init( V1Init(artifacts=V1ArtifactsType(dirs=[original_uuid]))) content = compiled_operation.to_dict(dump=True) instance = get_run_model()( project_id=project_id, user_id=user_id, name=name, description=description, tags=tags, readme=readme, raw_content=raw_content, content=content, params=params, inputs=inputs, kind=kind, meta_info=meta_info, original_id=original_id, cloning_kind=cloning_kind, is_managed=is_managed, status_conditions=[ V1StatusCondition.get_condition( type=V1Statuses.CREATED, status="True", reason="PolyaxonRunCreated", message="Run is created", ).to_dict() ], **self.sanitize_kwargs(**kwargs), ) return compiled_operation, instance
def init_run( self, project_id: int, user_id: int, op_spec: V1Operation = None, compiled_operation: V1CompiledOperation = None, name: str = None, description: str = None, tags: str = None, override: Union[str, Dict] = None, params: Dict = None, readme: str = None, original_id: int = None, original_uuid: int = None, cloning_kind: str = None, is_managed: bool = True, pending: str = None, meta_info: Dict = None, supported_kinds: Set[str] = None, **kwargs, ) -> Tuple[V1CompiledOperation, BaseRun]: if op_spec: op_spec, kwargs = self.set_spec(op_spec, **kwargs) if op_spec: if not compiled_operation or override: compiled_operation = OperationSpecification.compile_operation( op_spec, override=override) params = op_spec.params params = params or {} inputs = {p: pv.value for p, pv in params.items() if pv.is_literal} params = {p: pv.to_dict() for p, pv in params.items()} kind = None meta_info = meta_info or {} if compiled_operation: if pending is None and compiled_operation.is_approved is False: pending = V1RunPending.APPROVAL name = name or compiled_operation.name description = description or compiled_operation.description tags = tags or compiled_operation.tags kind, runtime = self.get_kind(compiled_operation) kind, runtime, meta_info = self.get_meta_info( compiled_operation, kind, runtime, meta_info, **kwargs) self.supports_kind(kind, runtime, supported_kinds, is_managed) kwargs["content"] = compiled_operation.to_dict(dump=True) instance = get_run_model()( project_id=project_id, user_id=user_id, name=name, description=description, tags=tags, readme=readme, params=params, inputs=inputs, kind=kind, runtime=runtime, meta_info=meta_info, original_id=original_id, cloning_kind=cloning_kind, is_managed=is_managed, pending=pending, status_conditions=[ V1StatusCondition.get_condition( type=V1Statuses.CREATED, status="True", reason=kwargs.pop("reason", "OperationServiceInit"), message=kwargs.pop("message", "Run is created"), ).to_dict() ], **self.sanitize_kwargs(**kwargs), ) return compiled_operation, instance
def resolve_presets(self): for preset in self._get_meta_artifacts_presets(): self.compiled_operation = OperationSpecification.apply_preset( config=self.compiled_operation, preset=preset)
def run( ctx, project, polyaxonfile, python_module, name, tags, description, upload, log, watch, local, conda_env, params, profile, queue, nocache, ): """Run polyaxonfile specification. Examples: \b ```bash $ polyaxon run -f file -f file_override ... ``` Upload before running \b ```bash $ polyaxon run -f file -u ``` Run and set description and tags for this run \b ```bash $ polyaxon run -f file -u --description="Description of the current run" --tags="foo, bar, moo" ``` Run and set a unique name for this run \b ```bash polyaxon run --name=foo ``` Run for a specific project \b ```bash $ polyaxon run -p project1 -f file.yaml ``` Run with updated params \b ```bash $ polyaxon run -p project1 -f file.yaml -P param1=234.2 -P param2=relu ``` """ op_spec = check_polyaxonfile( polyaxonfile=polyaxonfile, python_module=python_module, params=params, profile=profile, queue=queue, nocache=nocache, log=False, ) owner, project_name = get_project_or_local(project, is_cli=True) tags = validate_tags(tags) if local: try: compiled_operation = OperationSpecification.compile_operation( op_spec) compiled_operation = CompiledOperationSpecification.apply_context( compiled_operation) except (PolyaxonSchemaError, ValidationError): Printer.print_error( "Could not run this polyaxonfile locally, " "a context is required to resolve it dependencies.") sys.exit(1) docker_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, compiled_operation=compiled_operation, log=log, ) elif settings.CLIENT_CONFIG.no_api: k8s_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, upload=upload, log=log, can_upload=all([upload, project]), ) else: platform_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, upload=upload, log=log, watch=watch, can_upload=all([upload, project]), )
def init_run( self, project_id: int, user_id: int, op_spec: V1Operation = None, compiled_operation: V1CompiledOperation = None, name: str = None, description: str = None, tags: str = None, override: Union[str, Dict] = None, override_post: bool = True, params: Dict = None, readme: str = None, original_id: int = None, cloning_kind: str = None, supported_kinds: Set[str] = None, **kwargs, ) -> Tuple[V1CompiledOperation, BaseRun]: content = None raw_content = None if op_spec: op_spec = self.set_spec(op_spec) raw_content = op_spec.to_dict(dump=True) if op_spec: if not compiled_operation or override: compiled_operation = OperationSpecification.compile_operation( op_spec, override=override, override_post=override_post) params = op_spec.params params = params or {} inputs = {p: pv.value for p, pv in params.items() if pv.is_literal} params = {p: pv.to_dict() for p, pv in params.items()} kind = None meta_info = {} if compiled_operation: content = compiled_operation.to_dict(dump=True) name = name or compiled_operation.name description = description or compiled_operation.description tags = tags or compiled_operation.tags kind, meta_kind = self.get_kind(compiled_operation) kind, meta_info = self.get_meta_info(compiled_operation, kind, meta_kind) self.supports_kind(kind, meta_kind, supported_kinds) instance = get_run_model()( project_id=project_id, user_id=user_id, name=name, description=description, tags=tags, readme=readme, raw_content=raw_content, content=content, params=params, inputs=inputs, kind=kind, meta_info=meta_info, original_id=original_id, cloning_kind=cloning_kind, status_conditions=[ V1StatusCondition.get_condition( type=V1Statuses.CREATED, status="True", reason="PolyaxonRunCreated", message="Run is created", ).to_dict() ], **self.sanitize_kwargs(**kwargs), ) return compiled_operation, instance
def run( ctx, project, polyaxonfile, python_module, url, hub, name, tags, description, log, upload, upload_from, upload_to, watch, local, params, presets, queue, nocache, cache, eager, git_preset, git_revision, ignore_template, ): """Run polyaxonfile specification. Examples: \b $ polyaxon run -f file -f file_override ... Run and set description and tags for this run \b $ polyaxon run -f file --description="Description of the current run" --tags="foo, bar, moo" Run and set a unique name for this run \b polyaxon run --name=foo Run for a specific project \b $ polyaxon run -p project1 -f file.yaml Run with updated params \b $ polyaxon run -p project1 -f file.yaml -P param1=234.2 -P param2=relu If a python file contains a component main, you can run that component \b $ polyaxon run -pm path/to/my-component.py If a python file contains more than one component, you can specify the component to run \b $ polyaxon run -pm path/to/my-component.py:componentA Uploading from everything in the current folder to the default uploads path \b $ polyaxon run ... -u Uploading from everything in the current folder to a custom path, e.g. code \b $ polyaxon run ... -u-to code Uploading from everything from a sub-folder, e.g. ./code to the a custom path, e.g. new-code \b $ polyaxon run ... -u-from ./code -u-to new-code """ if cache and nocache: Printer.print_error( "You can't use `--cache` and `--nocache` at the same.", sys_exit=True ) if (upload_to or upload_from) and not upload: upload = True if upload and eager: Printer.print_error( "You can't use `--upload` and `--eager` at the same.", sys_exit=True ) git_init = None if git_preset or git_revision: # Check that the current path was initialized if not GitConfigManager.is_initialized(): Printer.print_error( "You can't use `--git-preset [--git-revision]`, " "the current path is not initialized with a valid git connection or a git url, " "please run `polyaxon init [--git-connection] [--git-url]` " "to set a valid git configuration.", sys_exit=True, ) git_init = GitConfigManager.get_config() if git_init.git is None: GitConfigManager.purge(visibility=GitConfigManager.VISIBILITY_LOCAL) Printer.print_error( "Polyaxon could not start a new run with the `[--git-preset] or [--git-revision]`. " "The current path is initialized with " "an invalid git connection or an invalid git url.\n" "please run `polyaxon init [--git-connection] [--git-url]` " "to properly initialize the current path.", sys_exit=True, ) if git_revision: git_init.git.revision = git_revision elif code_reference.is_git_initialized(path="."): if code_reference.is_dirty(path="."): Printer.print_warning( "Polyaxon detected uncommitted changes in the current git repo!" ) commit_hash = code_reference.get_commit() git_init.git.revision = commit_hash else: Printer.print_warning( "Polyaxon could not find a valid git repo, " "and will not add the current commit to the git initializer." ) presets = validate_tags(presets) op_spec = check_polyaxonfile( polyaxonfile=polyaxonfile, python_module=python_module, url=url, hub=hub, params=params, presets=presets, queue=queue, cache=cache, nocache=nocache, verbose=False, eager=eager, git_init=git_init, ignore_template=ignore_template, ) if ignore_template: op_spec.disable_template() if op_spec.is_template(): click.echo("Please customize the specification or disable the template.") sys.exit(1) owner, project_name = get_project_or_local(project, is_cli=True) tags = validate_tags(tags) if local: try: compiled_operation = OperationSpecification.compile_operation(op_spec) compiled_operation = ( CompiledOperationSpecification.apply_operation_contexts( compiled_operation ) ) except (PolyaxonSchemaError, ValidationError): Printer.print_error( "Could not run this polyaxonfile locally, " "a context is required to resolve it dependencies." ) sys.exit(1) docker_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, compiled_operation=compiled_operation, log=log, ) elif settings.CLIENT_CONFIG.no_api: k8s_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, log=log, ) else: platform_run( ctx=ctx, name=name, owner=owner, project_name=project_name, description=description, tags=tags, op_spec=op_spec, log=log, upload=upload, upload_to=upload_to, upload_from=upload_from, watch=watch, eager=eager, )
async def notify_run( namespace: str, owner: str, project: str, run_uuid: str, run_name: str, condition: V1StatusCondition, connections: List[str], ): spawner = AsyncSpawner(namespace=namespace) await spawner.k8s_manager.setup() for connection in connections: connection_type = settings.AGENT_CONFIG.notification_connections_by_names.get( connection ) if not connection_type: logger.warning( "Could not create notification using connection {}, " "the connection was not found or not set correctly.".format( connection_type ) ) continue operation = V1Operation( params={ "kind": connection_type.kind, "owner": owner, "project": project, "run_uuid": run_uuid, "run_name": run_name, "condition": ujson.dumps(condition.to_dict()), }, termination=V1Termination(max_retries=3), component=V1Component( name="slack-notification", plugins=V1Plugins( auth=False, collect_logs=False, collect_artifacts=False, collect_resources=False, sync_statuses=False, ), inputs=[ V1IO(name="kind", iotype=types.STR, is_optional=False), V1IO(name="owner", iotype=types.STR, is_optional=False), V1IO(name="project", iotype=types.STR, is_optional=False), V1IO(name="run_uuid", iotype=types.STR, is_optional=False), V1IO(name="run_name", iotype=types.STR, is_optional=True), V1IO(name="condition", iotype=types.STR, is_optional=True), V1IO(name="connection", iotype=types.STR, is_optional=True), ], run=V1Notifier( connections=[connection], container=get_default_notification_container(), ), ), ) compiled_operation = OperationSpecification.compile_operation(operation) resource = compiler.make( owner_name=owner, project_name=project, project_uuid=project, run_uuid=run_uuid, run_name=run_name, run_path=run_uuid, compiled_operation=compiled_operation, params=operation.params, ) await spawner.create( run_uuid=run_uuid, run_kind=compiled_operation.get_run_kind(), resource=resource, )
def init_run( self, project_id: int, user_id: int, op_spec: V1Operation = None, compiled_operation: V1CompiledOperation = None, name: str = None, description: str = None, tags: str = None, override: Union[str, Dict] = None, params: Dict = None, readme: str = None, original_id: int = None, original_uuid: int = None, cloning_kind: str = None, is_managed: bool = True, is_approved: bool = True, meta_info: Dict = None, supported_kinds: Set[str] = None, **kwargs, ) -> Tuple[V1CompiledOperation, BaseRun]: if op_spec: op_spec, kwargs = self.set_spec(op_spec, **kwargs) if op_spec: if not compiled_operation or override: compiled_operation = OperationSpecification.compile_operation( op_spec, override=override ) params = op_spec.params params = params or {} inputs = {p: pv.value for p, pv in params.items() if pv.is_literal} params = {p: pv.to_dict() for p, pv in params.items()} kind = None meta_info = meta_info or {} if compiled_operation: if is_approved and compiled_operation.is_approved is not None: is_approved = compiled_operation.is_approved name = name or compiled_operation.name description = description or compiled_operation.description tags = tags or compiled_operation.tags kind, runtime = self.get_kind(compiled_operation) kind, runtime, meta_info = self.get_meta_info( compiled_operation, kind, runtime, meta_info, **kwargs ) self.supports_kind(kind, runtime, supported_kinds, is_managed) if cloning_kind == V1CloningKind.COPY: if runtime not in {V1RunKind.JOB, V1RunKind.SERVICE}: raise ValueError( "Operation with kind `{}` does not support restart with copy mode.".format( runtime ) ) compiled_operation.run.add_init( V1Init( artifacts=V1ArtifactsType( dirs=[[original_uuid, "{{ globals.run_artifacts_path }}"]] ) ) ) kwargs["content"] = compiled_operation.to_dict(dump=True) instance = get_run_model()( project_id=project_id, user_id=user_id, name=name, description=description, tags=tags, readme=readme, params=params, inputs=inputs, kind=kind, runtime=runtime, meta_info=meta_info, original_id=original_id, cloning_kind=cloning_kind, is_managed=is_managed, is_approved=is_approved, status_conditions=[ V1StatusCondition.get_condition( type=V1Statuses.CREATED, status="True", reason=kwargs.pop("reason", "OperationServiceInit"), message=kwargs.pop("message", "Run is created"), ).to_dict() ], **self.sanitize_kwargs(**kwargs), ) return compiled_operation, instance
def test_patch_environment_and_termination(self): termination1 = {"maxRetries": 1, "timeout": 1, "ttl": 1} environment1 = { "labels": {"label1": "value1"}, "annotations": {"anno1": "value1"}, "nodeSelector": {"plx": "selector1"}, "affinity": {"podAffinity": {}}, "tolerations": [{"key": "key1", "operator": "Exists"}], "serviceAccountName": "sa1", "imagePullSecrets": ["ps1", "ps2"], "securityContext": {"runAsUser": 1000, "runAsGroup": 3000}, } plugins1 = { "logLevel": "DEBUG", "auth": True, "docker": True, "shm": True, } self.preset["termination"] = termination1 self.preset["runPatch"]["environment"] = environment1 self.preset["plugins"] = plugins1 assert self.compiled_operation.termination is None assert ( OperationSpecification.apply_preset( config=self.compiled_operation, preset=self.preset, ) == self.compiled_operation ) assert self.compiled_operation.termination is not None assert self.compiled_operation.termination.to_dict() == termination1 assert self.compiled_operation.run.environment is not None env = self.compiled_operation.run.environment.to_dict() assert env == environment1 assert self.compiled_operation.plugins is not None assert self.compiled_operation.plugins.to_dict() == plugins1 termination2 = {"maxRetries": 10, "timeout": 10, "ttl": 10} environment2 = { "labels": {"label1": "value12"}, "annotations": {"anno1": "value12"}, "nodeSelector": {"plx": "selector12"}, "affinity": {"podAffinity": {"k": "v"}}, "tolerations": [{"key": "key11", "operator": "NotExists"}], "serviceAccountName": "sa2", "imagePullSecrets": ["ps2", "ps22"], "securityContext": {"runAsUser": 100, "runAsGroup": 300}, } plugins2 = { "logLevel": "INFO", "auth": False, "docker": False, "shm": False, } # Updating the preset self.preset["termination"] = termination2 self.preset["runPatch"]["environment"] = environment2 self.preset["plugins"] = plugins2 self.preset["patchStrategy"] = V1PatchStrategy.REPLACE assert ( OperationSpecification.apply_preset( config=self.compiled_operation, preset=self.preset, ) == self.compiled_operation ) assert self.compiled_operation.termination is not None assert self.compiled_operation.termination.to_dict() == termination2 assert self.compiled_operation.termination is not None env = self.compiled_operation.run.environment.to_dict() assert env == environment2 assert self.compiled_operation.plugins is not None assert self.compiled_operation.plugins.to_dict() == plugins2 termination3 = {"maxRetries": 15} environment3 = { "labels": {}, "annotations": {}, "nodeSelector": {}, "affinity": {"podAffinity": {"k": "v"}}, "tolerations": [], "securityContext": {"runAsUser": 10, "runAsGroup": 30}, "serviceAccountName": "sa2", "imagePullSecrets": ["ps2", "ps22"], } # Updating the preset self.preset["termination"] = termination3 self.preset["runPatch"]["environment"] = environment3 self.preset["patchStrategy"] = V1PatchStrategy.REPLACE assert ( OperationSpecification.apply_preset( config=self.compiled_operation, preset=self.preset, ) == self.compiled_operation ) assert self.compiled_operation.termination is not None assert self.compiled_operation.termination.to_dict() == { "maxRetries": 15, "timeout": 10, "ttl": 10, } assert self.compiled_operation.termination is not None env = self.compiled_operation.run.environment.to_dict() assert env == environment3 assert self.compiled_operation.plugins is not None assert self.compiled_operation.plugins.to_dict() == plugins2