class ProjectCommand(BaseCommand): def __init__(self, cli_helper): super(ProjectCommand, self).__init__(cli_helper) self.project_controller = ProjectController() def init(self, name, description): """Initialize command Parameters ---------- name : str name for the project description : str description of the project Returns ------- datmo.core.entity.model.Model """ # Check if project already exists is_new_model = False if not self.project_controller.model: is_new_model = True if is_new_model: # Initialize a new project self.cli_helper.echo( __("info", "cli.project.init.create", {"path": self.project_controller.home})) if not name: _, default_name = os.path.split(self.project_controller.home) name = self.cli_helper.prompt(__("prompt", "cli.project.init.name"), default=default_name) if not description: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description")) try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.create.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.create.failure", { "name": name, "path": self.project_controller.home })) return None else: # Update the current project self.cli_helper.echo( __( "info", "cli.project.init.update", { "name": self.project_controller.model.name, "path": self.project_controller.home })) # Prompt for the name and description and add default if not given if not name: name = self.cli_helper.prompt( __("prompt", "cli.project.init.name"), default=self.project_controller.model.name) if not description: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description"), default=self.project_controller.model.description) # Update the project with the values given try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.update.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.update.failure", { "name": name, "path": self.project_controller.home })) return None self.cli_helper.echo("") # Print out simple project meta data for k, v in self.project_controller.model.to_dictionary().items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) # Ask question if the user would like to setup environment environment_setup = self.cli_helper.prompt_bool( __("prompt", "cli.project.environment.setup")) if environment_setup: # TODO: ramove business logic from here and create common helper # Setting up the environment definition file self.environment_controller = EnvironmentController() environment_types = self.environment_controller.get_environment_types( ) environment_type = self.cli_helper.prompt_available_options( environment_types, option_type="type") available_environment_frameworks = self.environment_controller.get_supported_frameworks( environment_type) environment_framework = self.cli_helper.prompt_available_options( available_environment_frameworks, option_type="framework") available_environment_languages = self.environment_controller.get_supported_languages( environment_type, environment_framework) environment_language = self.cli_helper.prompt_available_options( available_environment_languages, option_type="language") options = { "environment_type": environment_type, "environment_framework": environment_framework, "environment_language": environment_language } environment_obj = self.environment_controller.setup( options=options) self.cli_helper.echo( __("info", "cli.environment.setup.success", (environment_obj.name, environment_obj.id))) else: self.cli_helper.echo( "there was no environment setup. you can get information" " here: https://datmo.readthedocs.io/en/latest/env-setup.html") return self.project_controller.model def version(self): return self.cli_helper.echo("datmo version: %s" % __version__) @Helper.notify_no_project_found def status(self): status_dict, current_snapshot, latest_snapshot_user_generated, latest_snapshot_auto_generated, unstaged_code, unstaged_environment, unstaged_files = \ self.project_controller.status() # Print out simple project meta data for k, v in status_dict.items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) self.cli_helper.echo("") # Print out any unstaged changes else print out the latest snapshot state of the repository if not unstaged_code and not unstaged_environment and not unstaged_files: self.cli_helper.echo( "all changes have been saved, no unstaged changes") self.cli_helper.echo("") self.cli_helper.echo("current snapshot state of the repository: ") if current_snapshot: self.cli_helper.echo(current_snapshot) else: # Print out the unstaged components if unstaged self.cli_helper.echo("unstaged changes since latest snapshot:") if unstaged_code: self.cli_helper.echo("code has been changed") if unstaged_environment: self.cli_helper.echo("environment has been changed") if unstaged_files: self.cli_helper.echo("files have been changed") # Print out info for the latest snapshot (the most recent first, and state if autogenerated or by user) if latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("latest snapshot generated by the user: "******"no snapshot autogenerated by datmo") elif latest_snapshot_auto_generated and not latest_snapshot_user_generated: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("no snapshot generated by the user") elif not latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("no snapshots created yet") elif latest_snapshot_user_generated.created_at > latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot generated by the user: "******"latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) elif latest_snapshot_user_generated.created_at < latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("latest snapshot generated by the user: "******"prompt", "cli.project.cleanup.confirm")) # Cleanup datmo project if user specifies if response: self.cli_helper.echo( __( "info", "cli.project.cleanup", { "name": self.project_controller.model.name, "path": self.project_controller.home })) try: success = self.project_controller.cleanup() if success: self.cli_helper.echo( __( "info", "cli.project.cleanup.success", { "name": self.project_controller.model.name, "path": self.project_controller.home })) return success except Exception: self.cli_helper.echo( __( "info", "cli.project.cleanup.failure", { "name": self.project_controller.model.name, "path": self.project_controller.home })) return False
class TestEnvironmentController(): def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.project_controller = ProjectController() self.environment_ids = [] def teardown_method(self): if not check_docker_inactive(test_datmo_dir): if self.project_controller.is_initialized: self.environment_controller = EnvironmentController() for env_id in list(set(self.environment_ids)): if not self.environment_controller.delete(env_id): raise Exception def __setup(self): self.project_controller.init("test_setup", "test description") self.environment_controller = EnvironmentController() with open(os.path.join(self.temp_dir, "test.txt"), "wb") as f: f.write(to_bytes("hello")) self.random_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "test") with open(self.random_filepath, "wb") as f: f.write(to_bytes("cool")) self.definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") with open(self.definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) def test_init_fail_project_not_init(self): Config().set_home(self.temp_dir) failed = False try: EnvironmentController() except ProjectNotInitialized: failed = True assert failed def test_get_environment_type(self): self.__setup() result = self.environment_controller.get_environment_types() assert result def test_get_supported_environments(self): self.__setup() environment_type = "cpu" result = self.environment_controller.get_supported_frameworks( environment_type) assert result def test_get_supported_languages(self): self.__setup() environment_type = "cpu" environment_name = "data-analytics" result = self.environment_controller.get_supported_languages( environment_type, environment_name) assert result def test_setup(self): # TODO: Run all environment options and test if success self.project_controller.init("test_setup", "test description") self.environment_controller = EnvironmentController() # Test success setup once (no files present) options = { "environment_framework": "data-analytics", "environment_type": "cpu", "environment_language": "py27" } result = self.environment_controller.setup(options=options) output_definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") assert isinstance(result, Environment) assert result.name == "%s:%s-%s" % (options['environment_framework'], options['environment_type'], options['environment_language']) assert result.description == "supported environment created by datmo" assert os.path.isfile(output_definition_filepath) assert "FROM datmo/data-analytics:cpu-py27" in open( output_definition_filepath, "r").read() # Test success setup again (files present, but staged) options = { "environment_framework": "data-analytics", "environment_type": "cpu", "environment_language": "py27" } result = self.environment_controller.setup(options=options) output_definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") assert isinstance(result, Environment) assert result.name == "%s:%s-%s" % (options['environment_framework'], options['environment_type'], options['environment_language']) assert result.description == "supported environment created by datmo" assert os.path.isfile(output_definition_filepath) assert "FROM datmo/data-analytics:cpu-py27" in open( output_definition_filepath, "r").read() # Test failure in downstream function (e.g. bad inputs, no name given) failed = False try: self.environment_controller.setup(options={}) except EnvironmentDoesNotExist: failed = True assert failed # Change environment file with open(output_definition_filepath, "wb") as f: f.write(to_bytes("new content")) # Test failure setup (unstaged changes) failed = False try: self.environment_controller.setup(options=options) except UnstagedChanges: failed = True assert failed def test_current_environment(self): self.__setup() # Test failure with unstaged changes failed = False try: self.environment_controller.current_environment() except UnstagedChanges: failed = True assert failed # Test successful current environment input_dict = {"name": "test", "description": "test description"} environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) current_environment_obj = self.environment_controller.current_environment( ) assert current_environment_obj == environment_obj def test_create(self): # 0) Test SUCCESS create when definition path exists in project environment directory (no input, no root) -- with hardware file # 1) Test SUCCESS create when definition path exists in project environment directory (no input, no root) # 5) Test SUCCESS when definition path exists in project environment directory and passed from input dict (takes input) # 2) Test SUCCESS create when definition path exists in root project folder (no input, no project environment dir) # 3) Test SUCCESS create when definition path is passed into input dict (takes input, no project environment dir) # 4) Test SUCCESS create when definition path is passed into input dict along with expected filename to be saved # 6) Test FAIL when passing same filepath with same filename into input dict self.__setup() input_dict_0 = {"name": "test", "description": "test description"} # 0) Test option 0 (cannot test hash because hardware is machine-dependent) environment_obj_0 = self.environment_controller.create(input_dict_0) self.environment_ids.append(environment_obj_0.id) assert environment_obj_0 assert isinstance(environment_obj_0, Environment) assert environment_obj_0.id assert environment_obj_0.driver_type == "docker" assert environment_obj_0.file_collection_id assert environment_obj_0.definition_filename assert environment_obj_0.hardware_info assert environment_obj_0.unique_hash assert environment_obj_0.name == "test" assert environment_obj_0.description == "test description" # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj_0.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert os.path.isfile(os.path.join(file_collection_dir, "test")) assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) output = open(os.path.join(file_collection_dir, "Dockerfile"), "r").read() print(repr(output)) assert os.path.isfile( os.path.join(file_collection_dir, "hardware_info")) output = open(os.path.join(file_collection_dir, "hardware_info"), "r").read() print(repr(output)) # 1) Test option 1 environment_obj_0 = self.environment_controller.create( input_dict_0, save_hardware_file=False) self.environment_ids.append(environment_obj_0.id) assert environment_obj_0 assert isinstance(environment_obj_0, Environment) assert environment_obj_0.id assert environment_obj_0.driver_type == "docker" assert environment_obj_0.file_collection_id assert environment_obj_0.definition_filename assert environment_obj_0.hardware_info # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj_0.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert os.path.isfile(os.path.join(file_collection_dir, "test")) assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) output = open(os.path.join(file_collection_dir, "Dockerfile"), "r").read() print(repr(output)) assert environment_obj_0.unique_hash == "1e32ff083520f792cbe4bafdc2de2a01" assert environment_obj_0.name == "test" assert environment_obj_0.description == "test description" # Files ["test", "Dockerfile"] # 5) Test option 5 input_dict_1 = { "name": "test", "description": "test description", "paths": [self.definition_filepath], } environment_obj = self.environment_controller.create( input_dict_1, save_hardware_file=False) self.environment_ids.append(environment_obj.id) assert environment_obj assert isinstance(environment_obj, Environment) assert environment_obj.id assert environment_obj.driver_type == "docker" assert environment_obj.file_collection_id assert environment_obj.definition_filename assert environment_obj.hardware_info # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) output = open(os.path.join(file_collection_dir, "Dockerfile"), "r").read() print(repr(output)) assert not os.path.isfile( os.path.join(file_collection_dir, "datmoDockerfile")) print(repr(output)) assert environment_obj.unique_hash == "fd725be022ce93f870c81e2ee170189c" assert environment_obj.name == "test" assert environment_obj.description == "test description" # Files ["Dockerfile"] # remove the project environment directory os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile")) os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "test")) # Create environment definition in root directory home_definition_filepath = os.path.join( self.environment_controller.home, "Dockerfile") with open(home_definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) # 2) Test option 2 environment_obj_1 = self.environment_controller.create( input_dict_0, save_hardware_file=False) self.environment_ids.append(environment_obj_1.id) assert environment_obj_1 assert isinstance(environment_obj_1, Environment) assert environment_obj_1.id assert environment_obj_1.driver_type == "docker" assert environment_obj_1.file_collection_id assert environment_obj_1.definition_filename assert environment_obj_1.hardware_info assert environment_obj_1.unique_hash == file_collection_obj.filehash # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj_1.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert environment_obj_1.name == "test" assert environment_obj_1.description == "test description" assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) assert environment_obj_1.unique_hash == "fd725be022ce93f870c81e2ee170189c" # 3) Test option 3 input_dict_2 = { "name": "test", "description": "test description", "paths": [home_definition_filepath], } # Create environment in the project environment_obj_2 = self.environment_controller.create( input_dict_2, save_hardware_file=False) self.environment_ids.append(environment_obj_2.id) assert environment_obj_2 assert isinstance(environment_obj_2, Environment) assert environment_obj_2.id assert environment_obj_2.driver_type == "docker" assert environment_obj_2.file_collection_id assert environment_obj_2.definition_filename assert environment_obj_2.hardware_info assert environment_obj_2.unique_hash == file_collection_obj.filehash # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj_2.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert environment_obj_2.name == "test" assert environment_obj_2.description == "test description" assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) assert environment_obj_2.unique_hash == "fd725be022ce93f870c81e2ee170189c" # 4) Test option 4 input_dict_3 = { "paths": [home_definition_filepath + ">Dockerfile"], } # Create environment in the project environment_obj_3 = self.environment_controller.create( input_dict_3, save_hardware_file=False) self.environment_ids.append(environment_obj_3.id) assert environment_obj_3 assert isinstance(environment_obj_3, Environment) assert environment_obj_3.id assert environment_obj_3.driver_type == "docker" assert environment_obj_3.file_collection_id assert environment_obj_3.definition_filename assert environment_obj_3.hardware_info assert environment_obj_3.unique_hash == file_collection_obj.filehash # Get file collection path file_collection_obj = self.environment_controller.dal.file_collection. \ get_by_id(environment_obj_3.file_collection_id) file_collection_dir = self.environment_controller.file_driver. \ get_collection_path(file_collection_obj.filehash) assert environment_obj_3.name == "test" assert environment_obj_3.description == "test description" assert os.path.isfile(os.path.join(file_collection_dir, "Dockerfile")) assert environment_obj_3.unique_hash == "fd725be022ce93f870c81e2ee170189c" # 6) Test option 6 definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") input_dict = { "paths": [ definition_filepath + ">Dockerfile", definition_filepath + ">Dockerfile" ], } # Create environment in the project failed = False try: _ = self.environment_controller.create(input_dict, save_hardware_file=False) except FileAlreadyExistsError: failed = True assert failed @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_build(self): # 1) Test build when no environment given # 2) Test build when definition path exists and given # 3) Test build when NO file exists and definition path exists # 4) Test build when file exists and definition path exists # 5) Test build when file exists but NO definition path exists self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # 1) Test option 1 failed = False try: _ = self.environment_controller.build("does_not_exist") except EnvironmentDoesNotExist: failed = True assert failed # Create environment definition definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) input_dict = { "paths": [definition_filepath], } # 2) Test option 2 # Create environment in the project environment_obj_1 = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj_1.id) result = self.environment_controller.build(environment_obj_1.id) assert result # 3) Test option 3 # Create environment in the project environment_obj_2 = self.environment_controller.create({}) result = self.environment_controller.build(environment_obj_2.id) assert result # Create script to test test_filepath = os.path.join(self.environment_controller.home, "script.py") with open(test_filepath, "wb") as f: f.write(to_bytes("import numpy\n")) f.write(to_bytes("import sklearn\n")) f.write(to_bytes("print('hello')\n")) # 4) Test option 4 environment_obj_3 = self.environment_controller.create({}) result = self.environment_controller.build(environment_obj_3.id) assert result # test 2), 3), and 4) will result in the same environment assert environment_obj_1.id == environment_obj_2.id assert environment_obj_2.id == environment_obj_3.id # Test for building dockerfile when there exists not os.remove(definition_filepath) # 5) Test option 5 # Create environment definition in project environment directory definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) environment_obj_4 = self.environment_controller.create({}) self.environment_ids.append(environment_obj_4.id) result = self.environment_controller.build(environment_obj_4.id) assert result # 6) Test option 6 # Create environment definition in project environment directory with datmo base image definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write( to_bytes("FROM datmo/data-analytics:cpu-py27%s" % os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) environment_obj_4 = self.environment_controller.create({}) self.environment_ids.append(environment_obj_4.id) result = self.environment_controller.build(environment_obj_4.id, workspace="notebook") assert result @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_extract_workspace_url(self): # Create environment definition self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write( to_bytes("FROM datmo/python-base:cpu-py27-notebook" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) image_name = "test" input_dict = {"name": image_name, "description": "test description"} # Create environment in the project environment_obj = self.environment_controller.create( input_dict, save_hardware_file=False) self.environment_controller.build(environment_obj.id) # Test when there is no container being run workspace_url = self.environment_controller.extract_workspace_url( image_name, "notebook") assert workspace_url == None @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_run(self): # Test run simple command with simple Dockerfile self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # 0) Test option 0 # Create environment definition in project environment directory definition_filepath = os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) random_name = str(uuid.uuid1()) run_options = { "command": ["sh", "-c", "echo yo"], "ports": ["8888:8888"], "name": random_name, "volumes": None, "detach": True, "stdin_open": False, "mem_limit": "4g", "tty": False, "api": False } # Create environment in the project environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) log_filepath = os.path.join(self.project_controller.home, "task.log") # Build environment in the project _ = self.environment_controller.build(environment_obj.id) # Run environment in the project return_code, run_id, logs = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) assert return_code == 0 assert run_id assert logs # remove Dockerfile os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile")) # 1) Test option 1 # Create environment definition definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) random_name = str(uuid.uuid1()) run_options = { "command": ["sh", "-c", "echo yo"], "ports": ["8889:8889"], "name": random_name, "volumes": None, "mem_limit": "4g", "detach": True, "stdin_open": False, "tty": False, "api": False } input_dict = { "paths": [definition_filepath], } # Create environment in the project environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) log_filepath = os.path.join(self.project_controller.home, "task.log") # Build environment in the project _ = self.environment_controller.build(environment_obj.id) # Run environment in the project return_code, run_id, logs = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) assert return_code == 0 assert run_id assert logs # 2) Test option 2 os.remove(definition_filepath) # Create script to test test_filepath = os.path.join(self.environment_controller.home, "script.py") with open(test_filepath, "wb") as f: f.write(to_bytes("import os\n")) f.write(to_bytes("import sys\n")) f.write(to_bytes("print('hello')\n")) # Create environment in the project environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) self.environment_controller.build(environment_obj.id) random_name = str(uuid.uuid1()) run_options = { "command": ["sh", "-c", "echo yo"], "ports": ["8899:8899"], "name": random_name, "volumes": { self.environment_controller.home: { 'bind': '/home/', 'mode': 'rw' } }, "mem_limit": "4g", "detach": False, "stdin_open": False, "tty": False, "api": False } # Run environment in the project return_code, run_id, logs = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) assert return_code == 0 assert run_id assert logs @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_interactive_run(self): # 1) Test run interactive terminal in environment # 2) Test run jupyter notebook in environment # Create environment definition self.project_controller.init("test6", "test description") self.environment_controller = EnvironmentController() definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") random_text = str(uuid.uuid1()) with open(definition_filepath, "wb") as f: f.write( to_bytes("FROM nbgallery/jupyter-alpine:latest" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) input_dict = { "paths": [definition_filepath], } # Create environment in the project environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) # 1) Test option 1 @timeout_decorator.timeout(10, use_signals=False) def timed_run(container_name, timed_run): run_options = { "command": [], "ports": ["8889:8889"], "name": environment_obj.id + "-" + container_name, "volumes": None, "mem_limit": "4g", "detach": True, "stdin_open": True, "tty": True, "api": False } log_filepath = os.path.join(self.project_controller.home, "task.log") # Build environment in the project _ = self.environment_controller.build(environment_obj.id) # Run environment in the project self.environment_controller.run(environment_obj.id, run_options, log_filepath) return timed_run container_name = str(uuid.uuid1()) timed_run_result = False try: timed_run_result = timed_run(container_name, timed_run_result) except timeout_decorator.timeout_decorator.TimeoutError: timed_run_result = True assert timed_run_result # 2) Test option 2 environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) @timeout_decorator.timeout(10, use_signals=False) def timed_run(container_name, timed_run): run_options = { "command": ["jupyter", "notebook", "--allow-root"], "ports": ["8888:8888"], "name": environment_obj.id + "-" + container_name, "volumes": None, "mem_limit": "4g", "detach": True, "stdin_open": False, "tty": False, "api": False } log_filepath = os.path.join(self.project_controller.home, "task.log") # Build environment in the project _ = self.environment_controller.build(environment_obj.id) # Run environment in the project self.environment_controller.run(environment_obj.id, run_options, log_filepath) return timed_run container_name = str(uuid.uuid1()) timed_run_result = False try: timed_run_result = timed_run(container_name, timed_run_result) except timeout_decorator.timeout_decorator.TimeoutError: timed_run_result = True assert timed_run_result def test_list(self): self.project_controller.init("test4", "test description") self.environment_controller = EnvironmentController() # Create environment definition for object 1 definition_path_1 = os.path.join(self.environment_controller.home, "Dockerfile") with open(definition_path_1, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) input_dict_1 = { "paths": [definition_path_1], } # Create environment in the project environment_obj_1 = self.environment_controller.create(input_dict_1) self.environment_ids.append(environment_obj_1.id) # Create environment definition for object 2 definition_path_2 = os.path.join(self.environment_controller.home, "Dockerfile2") with open(definition_path_2, "wb") as f: f.write(to_bytes("FROM python:3.4-alpine")) input_dict_2 = { "paths": [definition_path_2 + ">Dockerfile"], } # Create second environment in the project environment_obj_2 = self.environment_controller.create(input_dict_2) self.environment_ids.append(environment_obj_2.id) # List all environments and ensure they exist result = self.environment_controller.list() assert len(result) == 2 and \ environment_obj_1 in result and \ environment_obj_2 in result def test_update(self): self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # Create environment definition definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) input_dict = { "paths": [definition_filepath], } # Create environment in the project environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) # Test success update new_name = "test name" new_description = "test description" result = self.environment_controller.update( environment_obj.id, name=new_name, description=new_description) assert result assert isinstance(result, Environment) assert result.name == new_name assert result.description == new_description # Test failed update failed = False try: self.environment_controller.update("random_id", name=new_name, description=new_description) except EnvironmentDoesNotExist: failed = True assert failed @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_delete(self): self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # Create environment definition definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) input_dict = { "paths": [definition_filepath], } # Create environment in the project environment_obj = self.environment_controller.create(input_dict) # Delete environment in the project result = self.environment_controller.delete(environment_obj.id) # Check if environment retrieval throws error thrown = False try: self.environment_controller.dal.environment.get_by_id( environment_obj.id) except EntityNotFound: thrown = True assert result == True and \ thrown == True @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_stop_failure(self): # 1) Test failure with RequiredArgumentMissing # 2) Test failure with TooManyArgumentsFound self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # 1) Test option 1 failed = False try: self.environment_controller.stop() except RequiredArgumentMissing: failed = True assert failed # 2) Test option 2 failed = False try: self.environment_controller.stop(run_id="hello", match_string="there") except TooManyArgumentsFound: failed = True assert failed @pytest_docker_environment_failed_instantiation(test_datmo_dir) def test_stop_success(self): # TODO: test more run options # 1) Test run_id input to stop # 2) Test match_string input to stop # 3) Test all input to stop # 4) Test if the image was removed by stop self.project_controller.init("test5", "test description") self.environment_controller = EnvironmentController() # Create environment definition definition_filepath = os.path.join(self.environment_controller.home, "Dockerfile") with open(definition_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) run_options = { "command": ["sh", "-c", "echo yo"], "ports": ["8888:8888"], "name": "datmo-task-" + self.environment_controller.model.id + "-" + "test", "volumes": None, "mem_limit": "4g", "detach": False, "stdin_open": False, "tty": False, "api": False } # Create environment definition env_def_path = os.path.join(self.project_controller.home, "Dockerfile") random_text = str(uuid.uuid1()) with open(env_def_path, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine" + os.linesep)) f.write(to_bytes(str("RUN echo " + random_text))) input_dict = { "paths": [definition_filepath], } # Create environment in the project environment_obj = self.environment_controller.create(input_dict) self.environment_ids.append(environment_obj.id) log_filepath = os.path.join(self.project_controller.home, "task.log") # Build environment in the project _ = self.environment_controller.build(environment_obj.id) # 1) Test option 1 _, run_id, _ = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) return_code = self.environment_controller.stop( run_id=run_id, environment_id=environment_obj.id) assert return_code # 2) Test option 2 # Rebuild environment in the project _ = self.environment_controller.build(environment_obj.id) _, _, _ = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) return_code = self.environment_controller.stop( match_string="datmo-task-" + self.environment_controller.model.id, environment_id=environment_obj.id) assert return_code # 3) Test option 3 # Rebuild environment in the project _ = self.environment_controller.build(environment_obj.id) _, _, _ = \ self.environment_controller.run(environment_obj.id, run_options, log_filepath) run_options_2 = { "command": ["sh", "-c", "echo yo"], "ports": ["8889:8889"], "name": "datmo-task-" + self.environment_controller.model.id + "-" + "test2", "volumes": None, "mem_limit": "4g", "detach": False, "stdin_open": False, "tty": False, "api": False } _, _, _ = \ self.environment_controller.run(environment_obj.id, run_options_2, log_filepath) return_code = self.environment_controller.stop( all=True, environment_id=environment_obj.id) assert return_code def test_exists(self): # Test failure, not initialized failed = False try: _ = self.environment_controller.create({}) except: failed = True assert failed # Setup self.__setup() environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) # Check by environment id result = self.environment_controller.exists( environment_id=environment_obj.id) assert result # Check by unique hash result = self.environment_controller.exists( environment_unique_hash=environment_obj.unique_hash) assert result # Test with wrong environment id result = self.environment_controller.exists( environment_id='test_wrong_env_id') assert not result def test_calculate_project_environment_hash(self): # Setup self.__setup() # Test hashing the default (with hardware info) result = self.environment_controller._calculate_project_environment_hash( ) assert result # Test hashing the default Dockerfile result = self.environment_controller._calculate_project_environment_hash( save_hardware_file=False) assert result == "1e32ff083520f792cbe4bafdc2de2a01" # Test if hash is the same as that of create environment_obj = self.environment_controller.create( {}, save_hardware_file=False) self.environment_ids.append(environment_obj.id) result = self.environment_controller._calculate_project_environment_hash( save_hardware_file=False) assert result == "1e32ff083520f792cbe4bafdc2de2a01" assert result == environment_obj.unique_hash # Test if the hash is the same if the same file is passed in as an input input_dict = { "paths": [self.definition_filepath, self.random_filepath] } environment_obj_1 = self.environment_controller.create( input_dict, save_hardware_file=False) self.environment_ids.append(environment_obj_1.id) result = self.environment_controller._calculate_project_environment_hash( save_hardware_file=False) assert result == "1e32ff083520f792cbe4bafdc2de2a01" assert result == environment_obj_1.unique_hash def test_has_unstaged_changes(self): # Setup self.__setup() environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) # Check for no unstaged changes result = self.environment_controller._has_unstaged_changes() assert not result # Make a change to the file (update python version) with open( os.path.join( self.environment_controller.file_driver. environment_directory, "Dockerfile"), "wb") as f: f.write(to_bytes("FROM python:3.6-alpine")) # Check again, should have unstaged changes result = self.environment_controller._has_unstaged_changes() assert result def test_check_unstaged_changes(self): # Setup self.__setup() environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) # 1) After commiting the changes # Check for no unstaged changes because already committed result = self.environment_controller.check_unstaged_changes() assert not result # Add a new file with open( os.path.join( self.environment_controller.file_driver. environment_directory, "test2"), "wb") as f: f.write(to_bytes("cool")) # 2) Not commiting the changes, should error and raise UnstagedChanges failed = False try: self.environment_controller.check_unstaged_changes() except UnstagedChanges: failed = True assert failed # Remove new file os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "test2")) # 3) Files are the same as before but no new commit, should have no unstaged changes result = self.environment_controller.check_unstaged_changes() assert not result # 4) Remove another file, now it is different and should have unstaged changes os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "test")) failed = False try: self.environment_controller.check_unstaged_changes() except UnstagedChanges: failed = True assert failed # 5) Remove the rest of the files, now it is empty and should return as already staged os.remove( os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile")) result = self.environment_controller.check_unstaged_changes() assert not result def test_checkout(self): # Setup and create all environment files self.__setup() # Create environment to checkout to with defaults environment_obj = self.environment_controller.create({}) self.environment_ids.append(environment_obj.id) # Checkout success with there are no unstaged changes result = self.environment_controller.checkout(environment_obj.id) assert result current_hash = self.environment_controller._calculate_project_environment_hash( ) assert environment_obj.unique_hash == current_hash # Check the filenames as well because the hash does not take this into account assert os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "test")) assert os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile")) assert not os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "datmoDockerfile")) assert not os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "hardware_info")) # Change file contents to make it unstaged with open(self.definition_filepath, "wb") as f: f.write(to_bytes("new content")) # Checkout failure with unstaged changes failed = False try: _ = self.environment_controller.checkout(environment_obj.id) except UnstagedChanges: failed = True assert failed # Create new environment to checkout to with defaults (no hardware) environment_obj_1 = self.environment_controller.create( {}, save_hardware_file=False) self.environment_ids.append(environment_obj_1.id) # Checkout success with there are no unstaged changes result = self.environment_controller.checkout(environment_obj.id) assert result current_hash = self.environment_controller._calculate_project_environment_hash( ) assert environment_obj.unique_hash == current_hash assert environment_obj_1.unique_hash != current_hash # Check the filenames as well because the hash does not take this into account assert os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "test")) assert os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "Dockerfile")) assert not os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "datmoDockerfile")) assert not os.path.isfile( os.path.join( self.environment_controller.file_driver.environment_directory, "hardware_info"))
class ProjectCommand(BaseCommand): def __init__(self, cli_helper): super(ProjectCommand, self).__init__(cli_helper) self.project_controller = ProjectController() def init(self, name, description, force): """Initialize command Parameters ---------- name : str name for the project description : str description of the project force : bool Boolean to force initialization without prompts Returns ------- datmo.core.entity.model.Model """ # Check if project already exists is_new_model = False if not self.project_controller.model: is_new_model = True if is_new_model: # Initialize a new project self.cli_helper.echo( __("info", "cli.project.init.create", {"path": self.project_controller.home})) if not name: _, default_name = os.path.split(self.project_controller.home) if not force: name = self.cli_helper.prompt(__("prompt", "cli.project.init.name"), default=default_name) else: name = default_name if not description: if not force: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description")) else: description = "" try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.create.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.create.failure", { "name": name, "path": self.project_controller.home })) return None else: # Update the current project self.cli_helper.echo( __( "info", "cli.project.init.update", { "name": self.project_controller.model.name, "path": self.project_controller.home })) # Prompt for the name and description and add default if not given if not name and not force: name = self.cli_helper.prompt( __("prompt", "cli.project.init.name"), default=self.project_controller.model.name) elif force: name = self.project_controller.model.name if not description and not force: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description"), default=self.project_controller.model.description) elif force: description = self.project_controller.model.description # Update the project with the values given try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.update.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.update.failure", { "name": name, "path": self.project_controller.home })) return None self.cli_helper.echo("") # Print out simple project meta data for k, v in self.project_controller.model.to_dictionary().items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) # Ask question if the user would like to setup environment environment_setup = self.cli_helper.prompt_bool( __("prompt", "cli.project.environment.setup")) if not force else False if environment_setup: # TODO: remove business logic from here and create common helper # Setting up the environment definition file self.environment_controller = EnvironmentController() environment_types = self.environment_controller.get_environment_types( ) environment_type = self.cli_helper.prompt_available_options( environment_types, option_type="type") available_environment_frameworks = self.environment_controller.get_supported_frameworks( environment_type) environment_framework = self.cli_helper.prompt_available_options( available_environment_frameworks, option_type="framework") available_environment_languages = self.environment_controller.get_supported_languages( environment_type, environment_framework) environment_language = self.cli_helper.prompt_available_options( available_environment_languages, option_type="language") options = { "environment_type": environment_type, "environment_framework": environment_framework, "environment_language": environment_language } environment_obj = self.environment_controller.setup( options=options) self.cli_helper.echo( __("info", "cli.environment.setup.success", (environment_obj.name, environment_obj.id))) else: self.cli_helper.echo( "there was no environment setup. you can get information" " here: https://datmo.readthedocs.io/en/latest/env-setup.html") return self.project_controller.model def version(self): return self.cli_helper.echo("datmo version: %s" % __version__) def configure(self): """ Configure datmo installation """ # General setup setup_remote_bool = self.cli_helper.prompt_bool( "Would you like to setup your remote credentials? [yN]") if setup_remote_bool: datmo_api_key = None while not datmo_api_key: datmo_api_key = self.cli_helper.prompt( "Enter API key for datmo") # Initialize remote API to get master ip address remote_api = RemoteAPI(datmo_api_key) response = remote_api.get_deployment_info() master_system_info = response['body']['master_system_info'] master_server_ip = str(master_system_info.get('datmo_master_ip')) if isinstance(master_system_info, dict)\ else None # Create a config file self.datmo_config = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "config")) config = dict() if master_server_ip and datmo_api_key: config["MASTER_SERVER_IP"] = master_server_ip config["DATMO_API_KEY"] = datmo_api_key self.datmo_config.to_file(config) else: self.cli_helper.echo( "Datmo API key could not be saved. Please try again") # Setup project specific things if self.project_controller.model: pass else: self.cli_helper.echo( "No datmo project found. Skipping configuration for project.") @Helper.notify_no_project_found def status(self): status_dict, current_snapshot, latest_snapshot_user_generated, latest_snapshot_auto_generated, unstaged_code, unstaged_environment, unstaged_files = \ self.project_controller.status() # Print out simple project meta data for k, v in status_dict.items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) self.cli_helper.echo("") # Print out any unstaged changes else print out the latest snapshot state of the repository if not unstaged_code and not unstaged_environment and not unstaged_files: self.cli_helper.echo( "all changes have been saved, no unstaged changes") self.cli_helper.echo("") self.cli_helper.echo("current snapshot state of the repository: ") if current_snapshot: self.cli_helper.echo(current_snapshot) else: # Print out the unstaged components if unstaged self.cli_helper.echo("unstaged changes since latest snapshot:") if unstaged_code: self.cli_helper.echo("code has been changed") if unstaged_environment: self.cli_helper.echo("environment has been changed") if unstaged_files: self.cli_helper.echo("files have been changed") # Print out info for the latest snapshot (the most recent first, and state if autogenerated or by user) if latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("latest snapshot generated by the user: "******"no snapshot autogenerated by datmo") elif latest_snapshot_auto_generated and not latest_snapshot_user_generated: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("no snapshot generated by the user") elif not latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("no snapshots created yet") elif latest_snapshot_user_generated.created_at > latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot generated by the user: "******"latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) elif latest_snapshot_user_generated.created_at < latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("latest snapshot generated by the user: "******"prompt", "cli.project.cleanup.confirm")) # Cleanup datmo project if user specifies if response: name = self.project_controller.model.name if self.project_controller.model.name else "" path = self.project_controller.home if self.project_controller.home else "" self.cli_helper.echo( __("info", "cli.project.cleanup", { "name": name, "path": path })) try: success = self.project_controller.cleanup() if success: self.cli_helper.echo( __("info", "cli.project.cleanup.success", { "name": name, "path": path })) return success except Exception: self.cli_helper.echo( __("info", "cli.project.cleanup.failure", { "name": name, "path": path })) return False def dashboard(self): if not self.project_controller.is_initialized: self.cli_helper.echo( "Please initialize datmo before using this command") return False dir_path = os.path.dirname(os.path.abspath(__file__)) os.chdir(os.path.join(dir_path, "../../dashboard")) app.run(host='0.0.0.0') return True
class EnvironmentCommand(ProjectCommand): def __init__(self, cli_helper): super(EnvironmentCommand, self).__init__(cli_helper) def environment(self): self.parse(["environment", "--help"]) return True @Helper.notify_no_project_found def setup(self, **kwargs): self.environment_controller = EnvironmentController() environment_type = kwargs.get("type", None) environment_framework = kwargs.get("framework", None) environment_language = kwargs.get("language", None) # TODO: remove business logic from here and create common helper # environment types environment_types = self.environment_controller.get_environment_types() if not environment_type or environment_type not in environment_types: environment_type = self.cli_helper.prompt_available_options( environment_types, option_type="type") # environment frameworks available_framework_details = self.environment_controller.get_supported_frameworks( environment_type) available_frameworks = [ item[0] for item in available_framework_details ] if not environment_framework or environment_framework not in available_frameworks: environment_framework = self.cli_helper.prompt_available_options( available_framework_details, option_type="framework") # environment languages available_environment_languages = self.environment_controller.get_supported_languages( environment_type, environment_framework) if available_environment_languages and not environment_language or environment_language not in available_environment_languages: environment_language = self.cli_helper.prompt_available_options( available_environment_languages, option_type="language") try: options = { "environment_type": environment_type, "environment_framework": environment_framework, "environment_language": environment_language } environment_obj = self.environment_controller.setup( options=options) self.cli_helper.echo( __("info", "cli.environment.setup.success", (environment_obj.name, environment_obj.id))) return environment_obj except EnvironmentDoesNotExist: self.cli_helper.echo( __( "error", "cli.environment.setup.argument", "%s:%s-%s" % (environment_framework, environment_type, environment_language))) @Helper.notify_no_project_found def create(self, **kwargs): self.environment_controller = EnvironmentController() self.cli_helper.echo(__("info", "cli.environment.create")) created_environment_obj = self.environment_controller.create(kwargs) environments = self.environment_controller.list() for environment_obj in environments: if created_environment_obj == environment_obj: self.cli_helper.echo( __("info", "cli.environment.create.alreadyexist", created_environment_obj.id)) return created_environment_obj self.cli_helper.echo( __("info", "cli.environment.create.success", created_environment_obj.id)) return created_environment_obj @Helper.notify_no_project_found def update(self, **kwargs): self.environment_controller = EnvironmentController() environment_id = kwargs.get('id') name = kwargs.get('name', None) description = kwargs.get("description", None) result = self.environment_controller.update(environment_id, name=name, description=description) return result @Helper.notify_environment_active(EnvironmentController) @Helper.notify_no_project_found def delete(self, **kwargs): self.environment_controller = EnvironmentController() environment_id = kwargs.get('id') if self.environment_controller.delete(environment_id): self.cli_helper.echo( __("info", "cli.environment.delete.success", environment_id)) return True @Helper.notify_no_project_found def ls(self, **kwargs): self.environment_controller = EnvironmentController() print_format = kwargs.get('format', "table") download = kwargs.get('download', None) download_path = kwargs.get('download_path', None) environment_objs = self.environment_controller.list() header_list = ["id", "created at", "name", "description"] item_dict_list = [] for environment_obj in environment_objs: environment_obj_name = printable_object(environment_obj.name) environment_obj_description = printable_object( environment_obj.description) item_dict_list.append({ "id": environment_obj.id, "created at": prettify_datetime(environment_obj.created_at), "name": environment_obj_name, "description": environment_obj_description }) if download: if not download_path: # download to current working directory with timestamp current_time = datetime.utcnow() epoch_time = datetime.utcfromtimestamp(0) current_time_unix_time_ms = ( current_time - epoch_time).total_seconds() * 1000.0 download_path = os.path.join( self.environment_controller.home, "environment_ls_" + str(current_time_unix_time_ms)) self.cli_helper.print_items(header_list, item_dict_list, print_format=print_format, output_path=download_path) return environment_objs self.cli_helper.print_items(header_list, item_dict_list, print_format=print_format) return environment_objs