def test_delete(self): # check project is not initialized if wrong home Config().set_home(os.path.join("does", "not", "exist")) failed = False try: delete() except InvalidProjectPath: failed = True assert failed # check for snapshot id that does not exist Config().set_home(self.temp_dir) failed = False try: delete(snapshot_id="does_not_exist") except EntityNotFound: failed = True assert failed # delete a snapshot test_filepath = os.path.join(self.temp_dir, "script.py") with open(test_filepath, "wb") as f: f.write(to_bytes("import numpy\n")) f.write(to_bytes("import sklearn\n")) snapshot_obj = create(message="delete_test") snapshot_list_before_delete = ls(filter='delete_test') delete(snapshot_id=snapshot_obj.id) snapshot_list_after_delete = ls(filter='delete_test') assert len(snapshot_list_before_delete) == 1 assert len(snapshot_list_after_delete) == 0
def test_ls(self): # check project is not initialized if wrong home Config().set_home(os.path.join("does", "not", "exist")) failed = False try: ls() except InvalidProjectPath: failed = True assert failed # check session does not exist if wrong session Config().set_home(self.temp_dir) failed = False try: ls(session_id="does_not_exist") except SessionDoesNotExist: failed = True assert failed # create with default params and files to commit test_filepath = os.path.join(self.temp_dir, "script.py") with open(test_filepath, "wb") as f: f.write(to_bytes("import numpy\n")) f.write(to_bytes("import sklearn\n")) create(message="test1") # list all snapshots with no filters snapshot_list_1 = ls() assert snapshot_list_1 assert len(list(snapshot_list_1)) == 1 assert isinstance(snapshot_list_1[0], Snapshot) # Create a snapshot with default params, files, and environment test_filepath = os.path.join(self.temp_dir, "Dockerfile") with open(test_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) create(message="test2") # list all snapshots with no filters (works when more than 1 snapshot) snapshot_list_2 = ls() assert snapshot_list_2 assert len(list(snapshot_list_2)) == 2 assert isinstance(snapshot_list_2[0], Snapshot) assert isinstance(snapshot_list_2[1], Snapshot) # list snapshots with specific filter snapshot_list_3 = ls(filter="test2") assert snapshot_list_3 assert len(list(snapshot_list_3)) == 1 assert isinstance(snapshot_list_3[0], Snapshot) # list snapshots with filter of none snapshot_list_4 = ls(filter="test3") assert len(list(snapshot_list_4)) == 0
def test_ls(self): # check project is not initialized if wrong home Config().set_home(os.path.join("does", "not", "exist")) failed = False try: ls() except InvalidProjectPath: failed = True assert failed # check session does not exist if wrong session Config().set_home(self.temp_dir) failed = False try: ls(session_id="does_not_exist") except SessionDoesNotExist: failed = True assert failed # run a task with default params self.__setup() # list all tasks with no filters task_list_1 = ls() assert task_list_1 assert len(list(task_list_1)) == 1 assert isinstance(task_list_1[0], Task) # run another task with default params self.__setup(command="test") # list all tasks with no filters (works when more than 1 task) task_list_2 = ls() assert task_list_2 assert len(list(task_list_2)) == 2 assert isinstance(task_list_2[0], Task) assert isinstance(task_list_2[1], Task) # list tasks with specific filter task_list_3 = ls(filter="script.py") assert task_list_3 assert len(list(task_list_3)) == 1 assert isinstance(task_list_3[0], Task) # list snapshots with filter of none task_list_4 = ls(filter="random") assert len(list(task_list_4)) == 0
def setup_method(self): # provide mountable tmp directory for docker tempfile.tempdir = "/tmp" if not platform.system( ) == "Windows" else None test_datmo_dir = os.environ.get('TEST_DATMO_DIR', tempfile.gettempdir()) self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.project = ProjectController() self.project.init("test", "test description") self.snapshot = SnapshotController() # Create environment_driver definition self.env_def_path = os.path.join(self.temp_dir, "Dockerfile") with open(self.env_def_path, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) # Create config self.config_filepath = os.path.join(self.snapshot.home, "config.json") with open(self.config_filepath, "wb") as f: f.write(to_bytes(str('{"foo":1}'))) # Create stats self.stats_filepath = os.path.join(self.snapshot.home, "stats.json") with open(self.stats_filepath, "wb") as f: f.write(to_bytes(str('{"bar":1}'))) # Create test file self.filepath = os.path.join(self.snapshot.home, "file.txt") with open(self.filepath, "wb") as f: f.write(to_bytes(str("test")))
def __init__(self): from datmo.core.util.logger import DatmoLogger from datmo.config import Config self.config = Config() self.docker_cli = self.config.docker_cli self.log = DatmoLogger.get_logger(__name__) self.log.info("handling command %s", self.config.home)
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.cli_helper = Helper() self.snapshot_dict = { "id": "test", "model_id": "my_model", "session_id": "my_session", "message": "my message", "code_id": "my_code_id", "environment_id": "my_environment_id", "file_collection_id": "my file collection", "config": { "test": 0.56 }, "stats": { "test": 0.34 } } self.task_dict = { "id": "test", "model_id": "my_model", "session_id": "my_session", "command": "python test.py" }
def model_deployment_detail(model_name, deployment_version_id, model_version_id): model = base_controller.model.__dict__ filter = { "model_id": model_name, "model_version_id": model_version_id, "deployment_version_id": deployment_version_id } input_keys, prediction_keys, feedback_keys = [], [], [] data = datmo_monitoring.search_metadata(filter) if data: max_index = 0 for ind, datum in enumerate(data): if datum['feedback'] is not None: max_index = ind datum = data[max_index] input_keys = list(datum['input'].keys()) prediction_keys = list(datum['prediction'].keys()) feedback_keys = list( datum['feedback'].keys()) if datum['feedback'] is not None else [] # Determine the graph directory path and create if not present graph_dirpath = os.path.join(base_controller.home, Config().datmo_directory_name, "deployments", deployment_version_id, model_version_id, "graphs") if not os.path.exists(graph_dirpath): os.makedirs(graph_dirpath) # Include deployment info deployment_info = datmo_monitoring.get_deployment_info( deployment_version_id=deployment_version_id) # Prettify dates deployment_info['created_at'] = prettify_datetime( deployment_info['created_at']) # TODO: replace with proper handling deployment_info['endpoints'] = [ endpoint for endpoint in deployment_info['endpoints'] if "".join(model_version_id.split("_")) in endpoint ] deployment_info['service_paths'] = [ path for path in deployment_info['service_paths'] if "".join(model_version_id.split("_")) in path ] # TODO: END deployment_info['deployment_version_id'] = deployment_version_id deployment_info['model_version_id'] = model_version_id return render_template( "model_deployment_detail.html", user=user, model=model, deployment=deployment_info, graph_dirpath=graph_dirpath, input_keys=input_keys, prediction_keys=prediction_keys, feedback_keys=feedback_keys, )
def test_init_fail_project_not_init(self): Config().set_home(self.temp_dir) failed = False try: SnapshotController() except ProjectNotInitialized: failed = True assert failed
def test_failed_controller_instantiation(self): failed = False try: Config().set_home("does_not_exists") BaseController() except InvalidProjectPath: failed = True assert failed
def teardown_method(self): if not check_docker_inactive(test_datmo_dir, Config().datmo_directory_name): self.__setup() self.environment_controller = EnvironmentController() for env_id in list(set(self.environment_ids)): if not self.environment_controller.delete(env_id): raise Exception
def __init__(self, api_key=None): if api_key is None: config = Config() _, self._api_key, _ = config.remote_credentials else: self._api_key = api_key self.remote_api = RemoteAPI(self._api_key) self._start_time, self._end_time, self._model_id, \ self._model_version_id, self._deployment_version_id = None, None, None, None, None
def setup_method(self): # provide mountable tmp directory for docker tempfile.tempdir = "/tmp" if not platform.system( ) == "Windows" else None test_datmo_dir = os.environ.get('TEST_DATMO_DIR', tempfile.gettempdir()) self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.cli_helper = Helper()
def test_init_fail_invalid_path(self): test_home = "some_random_dir" Config().set_home(test_home) failed = False try: SnapshotController() except InvalidProjectPath: failed = True assert failed
def setup_method(self): # provide mountable tmp directory for docker tempfile.tempdir = "/tmp" if not platform.system( ) == "Windows" else None test_datmo_dir = os.environ.get('TEST_DATMO_DIR', tempfile.gettempdir()) self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.project_controller = ProjectController() self.file_collection_controller = FileCollectionController()
def __init__(self, service_container_management=False): """Initialize the Orchestrator service""" super(DeployController, self).__init__() self.commands = Commands() self.config = Config() self.master_server_ip, self.datmo_api_key, self.datmo_end_point = self.config.remote_credentials self.service_container_management = service_container_management self.driver = DatmoMicroserviceDeployDriver( end_point=self.datmo_end_point, api_key=self.datmo_api_key) self.spinner = Spinner()
def setup_method(self): # provide mountable tmp directory for docker tempfile.tempdir = "/tmp" if not platform.system( ) == "Windows" else None test_datmo_dir = os.environ.get('TEST_DATMO_DIR', tempfile.gettempdir()) self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.local_file_driver = LocalFileDriver(root=self.temp_dir, datmo_directory_name=".datmo")
def main(): cli_helper = Helper() # Config is required to run first so it can # initialize/find datmo home directory (.datmo) # This is required for logging to place the logs in a # place for the user. config = Config() log = DatmoLogger.get_logger(__name__) log.info("handling command %s", config.home) # parse_args defaults to [1:] for args, but you need to # exclude the rest of the args too, or validation will fail # args = parser.parse_args(sys.argv[1:2]) if len(sys.argv) > 1 and \ sys.argv[1] in cli_helper.get_command_choices(): command_name = sys.argv[1] if command_name == "init": command_name = "project" elif command_name == "version" or \ command_name == "--version" or \ command_name == "-v": command_name = "project" sys.argv[1] = "version" elif command_name == "status": command_name = "project" sys.argv[1] = "status" elif command_name == "cleanup": command_name = "project" sys.argv[1] = "cleanup" command_class = cli_helper.get_command_class(command_name) else: command_class = BaseCommand # instantiate the command class try: command_instance = command_class(os.getcwd(), cli_helper) except TypeError as ex: cli_helper.echo(__("error", "cli.general", str(ex))) return 1 # parse the command line arguments try: command_instance.parse(sys.argv[1:]) except CLIArgumentException as ex: cli_helper.echo(__("error", "cli.general", str(ex))) return 1 try: command_instance.execute() return 0 except Exception as ex: cli_helper.echo(__("error", "cli.general", str(ex))) return 1
def get_config_defaults(self): return { "controller.code.driver": { "class_constructor": "datmo.core.controller.code.driver.file.FileCodeDriver", "options": { "root": self.home, "datmo_directory_name": Config().datmo_directory_name } }, "controller.file.driver": { "class_constructor": "datmo.core.controller.file.driver.local.LocalFileDriver", "options": { "root": self.home, "datmo_directory_name": Config().datmo_directory_name } }, "controller.environment.driver": { "class_constructor": "datmo.core.controller.environment.driver.dockerenv.DockerEnvironmentDriver", "options": { "root": self.home, "datmo_directory_name": Config().datmo_directory_name, "docker_execpath": "docker" } }, "storage.local": { "class_constructor": "datmo.core.storage.local.dal.LocalDAL", "options": { "driver_type": "blitzdb", "driver_options": { "driver_type": "file", "connection_string": os.path.join(self.home, Config().datmo_directory_name, "database") } } }, }
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.project_controller = ProjectController() _ = self.project_controller.init("test", "test description") self.input_dict = { "id": "test", "model_id": "my_model", "session_id": "my_session", "command": "python test.py" }
def __init__(self, home=None): self.home = Config().home if not home else home if not os.path.isdir(self.home): raise InvalidProjectPath( __("error", "controller.base.__init__", self.home)) self.logger = DatmoLogger.get_logger(__name__) # property caches and initial values self._is_initialized = False self._dal = None self._model = None self._code_driver = None self._file_driver = None self._environment_driver = None
def __init__(self): self.home = Config().home if not os.path.isdir(self.home): raise InvalidProjectPath( __("error", "controller.base.__init__", self.home)) self.config_store = JSONStore( os.path.join(self.home, ".datmo", ".config")) self.logger = DatmoLogger.get_logger(__name__) # property caches and initial values self._dal = None self._model = None self._current_session = None self._code_driver = None self._file_driver = None self._environment_driver = None self._is_initialized = False
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) _ = ProjectController().init("test", "test description") self.input_dict = { "id": "test", "model_id": "my_model", "message": "my message", "code_id": "my_code_id", "environment_id": "my_environment_id", "file_collection_id": "my file collection", "config": { "test": 0.56 }, "stats": { "test": 0.34 } }
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.environment_ids = []
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.cli_helper = Helper()
def init(self, name, description): """ Initialize the project This function will initialize the project or reinitialize it the project is already initialized. Parameters ---------- name : str description : str Returns ------- bool """ is_new_model = False old_model = self.model if not self.model: is_new_model = True try: # Always validate inputs to the init function validate("create_project", { "name": name, "description": description }) # Initialize File Driver if needed if not self.file_driver.is_initialized: self.file_driver.init() # Initialize the dal if not self.dal.is_initialized: self.dal.init() # Initialize Code Driver if needed if not self.code_driver.is_initialized: self.code_driver.init() # Initialize Environment Driver if needed if not self.environment_driver.is_initialized: self.environment_driver.init() # Initialize the config JSON store self.config_store = JSONStore( os.path.join(self.home, Config().datmo_directory_name, ".config")) # Create model if new else update if is_new_model: _ = self.dal.model.create( Model({ "name": name, "description": description })) else: self._model = self.dal.model.update({ "id": self.model.id, "name": name, "description": description }) # Connect Environment Driver if needed # (not required but will warn if not present) try: if not self.environment_driver.is_connected: self.environment_driver.connect() except EnvironmentConnectFailed: self.logger.warning( __("warn", "controller.general.environment.failed")) # Build the initial default Environment (NOT NECESSARY) # self.environment_driver.build_image(tag="datmo-" + \ # self.model.name) return True except Exception: # if any error occurred with new model, ensure no initialize occurs and raise previous error # if any error occurred with existing model, ensure no updates were made, raise previous error if is_new_model: self.cleanup() else: self._model = self.dal.model.update({ "id": old_model.id, "name": old_model.name, "description": old_model.description }) raise
def main(): cli_helper = Helper() # Config is required to run first so it can # initialize/find datmo home directory (.datmo) # This is required for logging to place the logs in a # place for the user. config = Config() config.set_home(os.getcwd()) log = DatmoLogger.get_logger(__name__) log.info("handling command %s", config.home) # parse_args defaults to [1:] for args, but you need to # exclude the rest of the args too, or validation will fail # args = parser.parse_args(sys.argv[1:2]) if len(sys.argv) > 1 and \ sys.argv[1] in cli_helper.get_command_choices(): command_name = sys.argv[1] # commands in project.py if command_name == "init": command_name = "project" elif command_name == "version" or \ command_name == "--version" or \ command_name == "-v": command_name = "project" sys.argv[1] = "version" elif command_name == "status": command_name = "project" sys.argv[1] = "status" elif command_name == "cleanup": command_name = "project" sys.argv[1] = "cleanup" # commands in workspace.py elif command_name in ["notebook", "jupyterlab", "terminal", "rstudio"]: sys.argv[1] = command_name command_name = "workspace" # commands in run.py elif command_name == "rerun": command_name = "run" sys.argv[1] = "rerun" elif command_name == "run": if len(sys.argv) == 2: command_name = "run" sys.argv.append("--help") else: command_name = "run" elif command_name == "stop": # stop command in run.py if len(sys.argv) == 2: command_name = "run" sys.argv.append("--help") else: command_name = "run" elif command_name == "ls": # ls command in run.py command_name = "run" elif command_name == "delete": # delete command in run.py command_name = "run" command_class = cli_helper.get_command_class(command_name) elif len(sys.argv) == 1: command_name = "datmo_command" command_class = cli_helper.get_command_class(command_name) else: command_class = BaseCommand # instantiate the command class try: command_instance = command_class(cli_helper) except TypeError as ex: cli_helper.echo(__("error", "cli.general", "%s %s" % (type(ex), ex))) return 1 # parse the command line arguments try: command_instance.parse(sys.argv[1:]) except CLIArgumentError as ex: cli_helper.echo(__("error", "cli.general", "%s %s" % (type(ex), ex))) return 1 try: command_instance.execute() return 0 except Exception as ex: cli_helper.echo(__("error", "cli.general", "%s %s" % (type(ex), ex))) return 1
def test_create(self): # check project is not initialized if wrong home Config().set_home(os.path.join("does", "not", "exist")) failed = False try: create(message="test") except InvalidProjectPath: failed = True assert failed # Create a snapshot with default params # (pass w/ no commit) Config().set_home(self.temp_dir) result = create(message="test") assert result # Create a snapshot with default params and files to commit test_filepath = os.path.join(self.temp_dir, "script.py") with open(test_filepath, "wb") as f: f.write(to_bytes("import numpy\n")) f.write(to_bytes("import sklearn\n")) snapshot_obj_1 = create(message="test") assert snapshot_obj_1 assert isinstance(snapshot_obj_1, Snapshot) assert snapshot_obj_1.message == "test" assert snapshot_obj_1.code_id assert snapshot_obj_1.environment_id assert snapshot_obj_1.files == [] assert snapshot_obj_1.config == {} assert snapshot_obj_1.stats == {} # Create a snapshot with default params, files, and environment test_filepath = os.path.join(self.temp_dir, "Dockerfile") with open(test_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) snapshot_obj_2 = create(message="test") assert snapshot_obj_2 assert isinstance(snapshot_obj_2, Snapshot) assert snapshot_obj_2.message == "test" assert snapshot_obj_2.code_id assert snapshot_obj_2.environment_id assert snapshot_obj_2.files == [] assert snapshot_obj_2.config == {} assert snapshot_obj_2.stats == {} assert snapshot_obj_2 != snapshot_obj_1 # Create a snapshot with default params, files, and environment being passed in test_filepath = os.path.join(self.temp_dir, "Dockerfile") with open(test_filepath, "wb") as f: f.write(to_bytes("FROM python:3.5-alpine")) snapshot_obj_3 = create(message="test", env=test_filepath) assert snapshot_obj_3 assert isinstance(snapshot_obj_3, Snapshot) assert snapshot_obj_3.message == "test" assert snapshot_obj_3.code_id assert snapshot_obj_3.environment_id assert snapshot_obj_3.files == [] assert snapshot_obj_3.config == {} assert snapshot_obj_3.stats == {} assert snapshot_obj_3 != snapshot_obj_1
def setup_method(self): self.temp_dir = tempfile.mkdtemp(dir=test_datmo_dir) Config().set_home(self.temp_dir) self.project_controller = ProjectController() self.environment_ids = []
def __init__(self, cli_helper): self.home = Config().home self.cli_helper = cli_helper self.logger = DatmoLogger.get_logger(__name__) self.parser = get_datmo_parser()
def __setup(self): Config().set_home(self.temp_dir) self.project_controller = ProjectController() self.project_controller.init("test", "test description") self.task_controller = TaskController() self.snapshot_controller = SnapshotController()