def test_save(self): storage = JSONStore(self.storage_file) storage.save('foobar', 'yep') found_it = False if 'foobar' in open(self.storage_file).read(): found_it = True assert found_it
def _config_setup(self, incoming_dictionary, create_dict): """ Fills in snapshot config by having one of the following: 1. config = JSON object 2. config_filepath = some location where a json file exists 3. config_filename = just the file name Parameters ---------- incoming_dictionary : dict dictionary for the create function defined above create_dict : dict dictionary for creating the Snapshot entity Raises ------ FileIOError """ if "config" in incoming_dictionary: create_dict['config'] = incoming_dictionary['config'] elif "config_filepath" in incoming_dictionary: if not os.path.isfile(incoming_dictionary['config_filepath']): raise FileIOError( __("error", "controller.snapshot.create.file_config")) # If path exists transform file to config dict config_json_driver = JSONStore( incoming_dictionary['config_filepath']) create_dict['config'] = config_json_driver.to_dict() elif "config_filename" in incoming_dictionary: config_filename = incoming_dictionary['config_filename'] create_dict['config'] = self._find_in_filecollection( config_filename, create_dict['file_collection_id']) else: config_filename = "config.json" create_dict['config'] = self._find_in_filecollection( config_filename, create_dict['file_collection_id'])
def test_get_obj(self): storage = JSONStore(self.storage_file) key = 'foobar1' value = {"does this work": "noway"} storage.save(key, value) return_value = storage.get(key) assert return_value == value
def _find_in_filecollection(self, file_to_find, file_collection_id): """ Attempts to find a file within the file collection Returns ------- dict output dictionary of the JSON file """ file_collection_obj = self.file_collection.dal.file_collection.\ get_by_id(file_collection_id) file_collection_path = \ self.file_collection.file_driver.get_collection_path( file_collection_obj.filehash) # find all of the possible paths it could exist possible_paths = [os.path.join(self.home, file_to_find)] + \ [os.path.join(self.home, item[0], file_to_find) for item in os.walk(file_collection_path)] existing_possible_paths = [ possible_path for possible_path in possible_paths if os.path.isfile(possible_path) ] if not existing_possible_paths: # TODO: Add some info / warning that no file was found # create some default stats return {} else: # If any such path exists, transform file to stats dict json_file = JSONStore(existing_possible_paths[0]) return json_file.to_dict()
def remote_credentials(self): """ Returns credentials if present Returns ------- MASTER_SERVER_IP : str return if present else None DATMO_API_KEY : str return if present else None END_POINT : str return if present else None """ # 1) Load from the environment if datmo config not already saved globally MASTER_SERVER_IP = os.environ.get('MASTER_SERVER_IP', None) DATMO_API_KEY = os.environ.get('DATMO_API_KEY', None) # 2) loading the datmo config if present datmo_config_filepath = os.path.join(os.path.expanduser("~"), ".datmo", "config") if os.path.isfile(datmo_config_filepath): datmo_config = JSONStore(datmo_config_filepath) config_dict = datmo_config.to_dict() if MASTER_SERVER_IP is None: MASTER_SERVER_IP = config_dict.get('MASTER_SERVER_IP', None) if DATMO_API_KEY is None: DATMO_API_KEY = config_dict.get('DATMO_API_KEY', None) if MASTER_SERVER_IP: END_POINT = 'http://' + MASTER_SERVER_IP + ':2083/api/v1' else: END_POINT = None return MASTER_SERVER_IP, DATMO_API_KEY, END_POINT
def test_get_string(self): storage = JSONStore(self.storage_file) key = 'foobar1' value = 'disco' storage.save(key, value) return_value = storage.get(key) assert return_value == value
def __init__(self): self._home = None self.datmo_directory_name = ".datmo" self.logging_level = logging.DEBUG DatmoLogger.get_logger(__name__).info("initializing") self.data_cache = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "cache.json")) self.docker_cli = '/usr/bin/docker'
def __init__(self): self.home = Config().home if not os.path.isdir(self.home): raise InvalidProjectPath( __("error", "controller.base.__init__", self.home)) self.config_store = JSONStore( os.path.join(self.home, ".datmo", ".config")) self.logger = DatmoLogger.get_logger(__name__) # property caches and initial values self._dal = None self._model = None self._current_session = None self._code_driver = None self._file_driver = None self._environment_driver = None self._is_initialized = False
def __init__(self, home): self.home = home if not os.path.isdir(self.home): raise InvalidProjectPathException(__("error", "controller.base.__init__", home)) self.config = JSONStore(os.path.join(self.home, ".datmo", ".config")) # property caches and initial values self._dal = None self._model = None self._current_session = None self._code_driver = None self._file_driver = None self._environment_driver = None self._is_initialized = False
def _setup_compatible_environment(self, create_dict, paths, directory, save_hardware_file=True): """Setup compatible environment from user paths. Creates the necessary datmo files if they are not already present Parameters ---------- create_dict : dict dictionary for entity creation, this is mutated in the function (not returned) paths : list list of absolute or relative filepaths and/or dirpaths to collect with destination names (e.g. "/path/to/file>hello", "/path/to/file2", "/path/to/dir>newdir") directory : str path of directory to save additional files to save_hardware_file : bool boolean to save hardware file along with other files (default is True to save the file and create distinct hashes based on software and hardware) Returns ------- paths : list returns the input paths with the paths of the new files created appended """ # a. look for the default definition, if not present add it to the directory, and add it to paths original_definition_filepath = "" if all(create_dict['definition_filename'] not in path for path in paths): self.environment_driver.create_default_definition(directory) original_definition_filepath = os.path.join( directory, create_dict['definition_filename']) paths.append(original_definition_filepath) else: for idx, path in enumerate(paths): if create_dict['definition_filename'] in path: src_path, dest_path = parse_path(path) original_definition_filepath = src_path # b. use the default definition and create a datmo definition in the directory, and add to paths datmo_definition_filepath = \ os.path.join(directory, "datmo" + create_dict['definition_filename']) if not os.path.isfile(datmo_definition_filepath): _, original_definition_filepath, datmo_definition_filepath = \ self.environment_driver.create(path=original_definition_filepath, output_path=datmo_definition_filepath) paths.append(datmo_definition_filepath) # c. get the hardware info and save it to the entity, if save_hardware_file is True # then save it to file and add it to the paths create_dict[ 'hardware_info'] = self.environment_driver.get_hardware_info() if save_hardware_file: hardware_info_filepath = os.path.join(directory, "hardware_info") _ = JSONStore(hardware_info_filepath, initial_dict=create_dict['hardware_info']) paths.append(hardware_info_filepath) return paths
def configure(self): """ Configure datmo installation """ # General setup setup_remote_bool = self.cli_helper.prompt_bool( "Would you like to setup your remote credentials? [yN]") if setup_remote_bool: datmo_api_key = None while not datmo_api_key: datmo_api_key = self.cli_helper.prompt( "Enter API key for datmo") # Initialize remote API to get master ip address remote_api = RemoteAPI(datmo_api_key) response = remote_api.get_deployment_info() master_system_info = response['body']['master_system_info'] master_server_ip = str(master_system_info.get('datmo_master_ip')) if isinstance(master_system_info, dict)\ else None # Create a config file self.datmo_config = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "config")) config = dict() if master_server_ip and datmo_api_key: config["MASTER_SERVER_IP"] = master_server_ip config["DATMO_API_KEY"] = datmo_api_key self.datmo_config.to_file(config) else: self.cli_helper.echo( "Datmo API key could not be saved. Please try again") # Setup project specific things if self.project_controller.model: pass else: self.cli_helper.echo( "No datmo project found. Skipping configuration for project.")
class __InternalConfig: def __init__(self): self._home = None self.logging_level = logging.DEBUG DatmoLogger.get_logger(__name__).info("initalizing") self.data_cache = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "cache.json")) @property def home(self): return self._home @home.setter def home(self, home_path): self._home = home_path def get_cache_item(self, key): cache_expire_key = 'cache_key_expires.' + key cache_key = 'cache_key.' + key cache_expire_val = self.data_cache.get(cache_expire_key) # no cache expire val, it's not stored if cache_expire_val == None: return None # return value if item has not expired elif int(cache_expire_val) > int( datetime.datetime.now().strftime('%s')): return self.data_cache.get(cache_key) # expire item and return None else: self.data_cache.remove(cache_expire_key) self.data_cache.remove(cache_key) return None def set_cache_item(self, key, value, duration=60): cache_expire_key = 'cache_key_expires.' + key cache_key = 'cache_key.' + key expire_val = (duration * 60) + int( datetime.datetime.now().strftime('%s')) self.data_cache.save(cache_expire_key, expire_val) self.data_cache.save(cache_key, value)
def test_load_new_json_file(self): new_json_filepath = os.path.join(self.temp_dir, "test.json") with open(new_json_filepath, "wb") as f: f.write(to_bytes("")) # Test returns empty dict for empty file storage = JSONStore(new_json_filepath) assert storage.to_dict() == {} # Test FileIOError if not JSON decodeable with open(new_json_filepath, "wb") as f: f.write(to_bytes("this is not json decodeable")) failed = False try: storage = JSONStore(new_json_filepath) storage.to_dict() except FileIOError: failed = True assert failed
def _store_hardware_info(self, dictionary, create_dict, definition_path): if "hardware_info" in dictionary: create_dict['hardware_info'] = dictionary['hardware_info'] else: # Extract hardware info of the container (currently taking from system platform) # TODO: extract hardware information directly from the container (system, node, release, version, machine, processor) = platform.uname() create_dict['hardware_info'] = { 'system': system, 'node': node, 'release': release, 'version': version, 'machine': machine, 'processor': processor } # Create hardware info file in definition path hardware_info_filepath = os.path.join(definition_path, "hardware_info") _ = JSONStore(hardware_info_filepath, initial_dict=create_dict['hardware_info']) return hardware_info_filepath
def __save_dictionary(self, dictionary, path): json_obj = JSONStore(path) data = json_obj.to_dict() data.update(dictionary) json_obj.to_file(data) return data
class ProjectCommand(BaseCommand): def __init__(self, cli_helper): super(ProjectCommand, self).__init__(cli_helper) self.project_controller = ProjectController() def init(self, name, description, force): """Initialize command Parameters ---------- name : str name for the project description : str description of the project force : bool Boolean to force initialization without prompts Returns ------- datmo.core.entity.model.Model """ # Check if project already exists is_new_model = False if not self.project_controller.model: is_new_model = True if is_new_model: # Initialize a new project self.cli_helper.echo( __("info", "cli.project.init.create", {"path": self.project_controller.home})) if not name: _, default_name = os.path.split(self.project_controller.home) if not force: name = self.cli_helper.prompt(__("prompt", "cli.project.init.name"), default=default_name) else: name = default_name if not description: if not force: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description")) else: description = "" try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.create.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.create.failure", { "name": name, "path": self.project_controller.home })) return None else: # Update the current project self.cli_helper.echo( __( "info", "cli.project.init.update", { "name": self.project_controller.model.name, "path": self.project_controller.home })) # Prompt for the name and description and add default if not given if not name and not force: name = self.cli_helper.prompt( __("prompt", "cli.project.init.name"), default=self.project_controller.model.name) elif force: name = self.project_controller.model.name if not description and not force: description = self.cli_helper.prompt( __("prompt", "cli.project.init.description"), default=self.project_controller.model.description) elif force: description = self.project_controller.model.description # Update the project with the values given try: success = self.project_controller.init(name, description) if success: self.cli_helper.echo( __("info", "cli.project.init.update.success", { "name": name, "path": self.project_controller.home })) except Exception: self.cli_helper.echo( __("info", "cli.project.init.update.failure", { "name": name, "path": self.project_controller.home })) return None self.cli_helper.echo("") # Print out simple project meta data for k, v in self.project_controller.model.to_dictionary().items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) # Ask question if the user would like to setup environment environment_setup = self.cli_helper.prompt_bool( __("prompt", "cli.project.environment.setup")) if not force else False if environment_setup: # TODO: remove business logic from here and create common helper # Setting up the environment definition file self.environment_controller = EnvironmentController() environment_types = self.environment_controller.get_environment_types( ) environment_type = self.cli_helper.prompt_available_options( environment_types, option_type="type") available_environment_frameworks = self.environment_controller.get_supported_frameworks( environment_type) environment_framework = self.cli_helper.prompt_available_options( available_environment_frameworks, option_type="framework") available_environment_languages = self.environment_controller.get_supported_languages( environment_type, environment_framework) environment_language = self.cli_helper.prompt_available_options( available_environment_languages, option_type="language") options = { "environment_type": environment_type, "environment_framework": environment_framework, "environment_language": environment_language } environment_obj = self.environment_controller.setup( options=options) self.cli_helper.echo( __("info", "cli.environment.setup.success", (environment_obj.name, environment_obj.id))) else: self.cli_helper.echo( "there was no environment setup. you can get information" " here: https://datmo.readthedocs.io/en/latest/env-setup.html") return self.project_controller.model def version(self): return self.cli_helper.echo("datmo version: %s" % __version__) def configure(self): """ Configure datmo installation """ # General setup setup_remote_bool = self.cli_helper.prompt_bool( "Would you like to setup your remote credentials? [yN]") if setup_remote_bool: datmo_api_key = None while not datmo_api_key: datmo_api_key = self.cli_helper.prompt( "Enter API key for datmo") # Initialize remote API to get master ip address remote_api = RemoteAPI(datmo_api_key) response = remote_api.get_deployment_info() master_system_info = response['body']['master_system_info'] master_server_ip = str(master_system_info.get('datmo_master_ip')) if isinstance(master_system_info, dict)\ else None # Create a config file self.datmo_config = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "config")) config = dict() if master_server_ip and datmo_api_key: config["MASTER_SERVER_IP"] = master_server_ip config["DATMO_API_KEY"] = datmo_api_key self.datmo_config.to_file(config) else: self.cli_helper.echo( "Datmo API key could not be saved. Please try again") # Setup project specific things if self.project_controller.model: pass else: self.cli_helper.echo( "No datmo project found. Skipping configuration for project.") @Helper.notify_no_project_found def status(self): status_dict, current_snapshot, latest_snapshot_user_generated, latest_snapshot_auto_generated, unstaged_code, unstaged_environment, unstaged_files = \ self.project_controller.status() # Print out simple project meta data for k, v in status_dict.items(): if k != "config": self.cli_helper.echo(str(k) + ": " + str(v)) self.cli_helper.echo("") # Print out any unstaged changes else print out the latest snapshot state of the repository if not unstaged_code and not unstaged_environment and not unstaged_files: self.cli_helper.echo( "all changes have been saved, no unstaged changes") self.cli_helper.echo("") self.cli_helper.echo("current snapshot state of the repository: ") if current_snapshot: self.cli_helper.echo(current_snapshot) else: # Print out the unstaged components if unstaged self.cli_helper.echo("unstaged changes since latest snapshot:") if unstaged_code: self.cli_helper.echo("code has been changed") if unstaged_environment: self.cli_helper.echo("environment has been changed") if unstaged_files: self.cli_helper.echo("files have been changed") # Print out info for the latest snapshot (the most recent first, and state if autogenerated or by user) if latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("latest snapshot generated by the user: "******"no snapshot autogenerated by datmo") elif latest_snapshot_auto_generated and not latest_snapshot_user_generated: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("no snapshot generated by the user") elif not latest_snapshot_user_generated and not latest_snapshot_auto_generated: self.cli_helper.echo("no snapshots created yet") elif latest_snapshot_user_generated.created_at > latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot generated by the user: "******"latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) elif latest_snapshot_user_generated.created_at < latest_snapshot_auto_generated.created_at: self.cli_helper.echo("latest snapshot autogenerated by datmo: ") self.cli_helper.echo(latest_snapshot_auto_generated) self.cli_helper.echo("latest snapshot generated by the user: "******"prompt", "cli.project.cleanup.confirm")) # Cleanup datmo project if user specifies if response: name = self.project_controller.model.name if self.project_controller.model.name else "" path = self.project_controller.home if self.project_controller.home else "" self.cli_helper.echo( __("info", "cli.project.cleanup", { "name": name, "path": path })) try: success = self.project_controller.cleanup() if success: self.cli_helper.echo( __("info", "cli.project.cleanup.success", { "name": name, "path": path })) return success except Exception: self.cli_helper.echo( __("info", "cli.project.cleanup.failure", { "name": name, "path": path })) return False def dashboard(self): if not self.project_controller.is_initialized: self.cli_helper.echo( "Please initialize datmo before using this command") return False dir_path = os.path.dirname(os.path.abspath(__file__)) os.chdir(os.path.join(dir_path, "../../dashboard")) app.run(host='0.0.0.0') return True
def __init__(self): self._home = None self.logging_level = logging.DEBUG DatmoLogger.get_logger(__name__).info("initalizing") self.data_cache = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "cache.json"))
class ProjectController(BaseController): """ProjectController inherits from BaseController and manages business logic related to the project. One model is associated with each project currently. Methods ------- init(name, description) Initialize the project repository as a new model or update the existing project cleanup() Remove all datmo references from the current repository. NOTE: THIS WILL DELETE ALL DATMO WORK status() Give the user a picture of the status of the project, snapshots, and tasks """ def __init__(self): super(ProjectController, self).__init__() def init(self, name, description): """ Initialize the project This function will initialize the project or reinitialize it the project is already initialized. Parameters ---------- name : str description : str Returns ------- bool """ is_new_model = False old_model = self.model if not self.model: is_new_model = True try: # Always validate inputs to the init function validate("create_project", { "name": name, "description": description }) # Initialize File Driver if needed if not self.file_driver.is_initialized: self.file_driver.init() # Initialize the dal if not self.dal.is_initialized: self.dal.init() # Initialize Code Driver if needed if not self.code_driver.is_initialized: self.code_driver.init() # Initialize Environment Driver if needed if not self.environment_driver.is_initialized: self.environment_driver.init() # Initialize the config JSON store self.config_store = JSONStore( os.path.join(self.home, Config().datmo_directory_name, ".config")) # Create model if new else update if is_new_model: _ = self.dal.model.create( Model({ "name": name, "description": description })) else: self._model = self.dal.model.update({ "id": self.model.id, "name": name, "description": description }) # Connect Environment Driver if needed # (not required but will warn if not present) try: if not self.environment_driver.is_connected: self.environment_driver.connect() except EnvironmentConnectFailed: self.logger.warning( __("warn", "controller.general.environment.failed")) # Build the initial default Environment (NOT NECESSARY) # self.environment_driver.build_image(tag="datmo-" + \ # self.model.name) return True except Exception: # if any error occurred with new model, ensure no initialize occurs and raise previous error # if any error occurred with existing model, ensure no updates were made, raise previous error if is_new_model: self.cleanup() else: self._model = self.dal.model.update({ "id": old_model.id, "name": old_model.name, "description": old_model.description }) raise def cleanup(self): """Cleans the project structure completely Notes ----- This function will not error out but will gracefully exit, since it is used in cases where init fails as a check against mid-initialized projects Returns ------- bool """ if not self.is_initialized: self.logger.warning( __("warn", "controller.project.cleanup.not_init")) # Remove Datmo environment_driver references, give warning if error try: # Obtain image id before cleaning up if exists images = self.environment_driver.list_images(name="datmo-" + \ self.model.name) image_id = images[0].id if images else None except Exception: self.logger.warning( __("warn", "controller.project.cleanup.environment")) # Remove Datmo code_driver references, give warning if error try: if self.code_driver.is_initialized: for ref in self.code_driver.list_refs(): self.code_driver.delete_ref(ref) except Exception: self.logger.warning(__("warn", "controller.project.cleanup.code")) try: # Remove Hidden Datmo file structure, give warning if error self.file_driver.delete_hidden_datmo_file_structure() except (FileIOError, PathDoesNotExist): self.logger.warning(__("warn", "controller.project.cleanup.files")) try: if image_id: # Remove image created during init self.environment_driver.remove_image( image_id_or_name=image_id, force=True) # Remove any dangling images (optional) # Stop and remove all running environments with image_id self.environment_driver.stop_remove_containers_by_term( image_id, force=True) except Exception: self.logger.warning( __("warn", "controller.project.cleanup.environment")) return True def status(self): """Return the project status information if initialized Returns ------- status_dict : dict dictionary with project metadata and config current_snapshot : datmo.core.entity.snapshot.Snapshot snapshot object of the current state of the repo if present else None latest_snapshot_user_generated : datmo.core.entity.snapshot.Snapshot snapshot object of the latest snapshot generated by the user if present else None latest_snapshot_auto_generated : datmo.core.entity.snapshot.Snapshot snapshot object of the latest snapshot generated automatically by datmo if present else None unstaged_code : bool True if code has unstaged changes unstaged_environment : bool True if environment has unstaged changes unstaged_files : bool True if files have unstaged changes """ if not self.is_initialized: raise ProjectNotInitialized( __("error", "controller.project.status")) # TODO: Add in note when environment is not setup or intialized # Add in project metadata status_dict = self.model.to_dictionary().copy() # Find all project settings status_dict["config"] = self.config_store.to_dict() # Find the latest snapshot generated by the user descending_snapshots = self.dal.snapshot.query( { "visible": True }, sort_key="created_at", sort_order="descending") latest_snapshot_user_generated = descending_snapshots[ 0] if descending_snapshots else None # Show the latest snapshot generated automatically by datmo descending_snapshots = self.dal.snapshot.query( { "visible": False }, sort_key="created_at", sort_order="descending") latest_snapshot_auto_generated = descending_snapshots[ 0] if descending_snapshots else None # TODO: add in latest run self.code_controller = CodeController() try: unstaged_code = self.code_controller.check_unstaged_changes() except UnstagedChanges: unstaged_code = True self.environment_controller = EnvironmentController() try: unstaged_environment = self.environment_controller.check_unstaged_changes( ) except UnstagedChanges: unstaged_environment = True self.file_collection_controller = FileCollectionController() try: unstaged_files = self.file_collection_controller.check_unstaged_changes( ) except UnstagedChanges: unstaged_files = True # If exists, obtain the current snapshot, if unstaged changes, will be None self.snapshot_controller = SnapshotController() try: current_snapshot = self.snapshot_controller.current_snapshot() except UnstagedChanges: current_snapshot = None return status_dict, current_snapshot, latest_snapshot_user_generated, latest_snapshot_auto_generated, \ unstaged_code, unstaged_environment, unstaged_files
def save_config(self, filepath): JSONStore(os.path.join(filepath, 'config.json'), self.config) return
def test_init(self): storage = JSONStore(self.storage_file) assert storage.filepath == self.storage_file
def save_stats(self, filepath): JSONStore(os.path.join(filepath, 'stats.json'), self.stats) return
class BaseController(object): """BaseController is used to setup the repository. It serves as the basis for all other Controller objects Parameters ---------- home : str home path of the project dal_driver : DALDriver an instance of a DALDriver to use while accessing the DAL Attributes ---------- home : str Filepath for the location of the project dal_driver : DALDriver object This is an instance of a storage DAL driver config : JSONStore This is the set of configurations used to create a project dal model current_session code_driver file_driver environment_driver is_initialized Methods ------- dal_instantiate() Instantiate a version of the DAL get_or_set_default(key, default_value) Returns value adn sets to default if no value present config_loader(key) Return the config dictionary based on key get_config_defaults() Return the configuration defaults """ def __init__(self, home): self.home = home if not os.path.isdir(self.home): raise InvalidProjectPathException(__("error", "controller.base.__init__", home)) self.config = JSONStore(os.path.join(self.home, ".datmo", ".config")) # property caches and initial values self._dal = None self._model = None self._current_session = None self._code_driver = None self._file_driver = None self._environment_driver = None self._is_initialized = False @property # Controller objects are only in sync if the data drivers are the same between objects # Currently pass dal_driver down from controller to controller to ensure syncing dals # TODO: To fix dal from different controllers so they sync within one session; they do NOT currently def dal(self): """Property that is maintained in memory Returns ------- DAL """ if self._dal == None: self._dal = self.dal_instantiate() return self._dal @property def model(self): """Property that is maintained in memory Returns ------- Model """ if self._model == None: models = self.dal.model.query({}) self._model = models[0] if models else None return self._model @property def current_session(self): """Property that is maintained in memory Returns ------- Session """ if not self.model: raise DatmoModelNotInitializedException(__("error", "controller.base.current_session")) if self._current_session == None: sessions = self.dal.session.query({"current": True}) self._current_session = sessions[0] if sessions else None return self._current_session @property def code_driver(self): """Property that is maintained in memory Returns ------- CodeDriver """ if self._code_driver == None: module_details = self.config_loader("controller.code.driver") self._code_driver = module_details["constructor"](**module_details["options"]) return self._code_driver @property def file_driver(self): """Property that is maintained in memory Returns ------- FileDriver """ if self._file_driver == None: module_details = self.config_loader("controller.file.driver") self._file_driver = module_details["constructor"](**module_details["options"]) return self._file_driver @property def environment_driver(self): """Property that is maintained in memory Returns ------- EnvironmentDriver """ if self._environment_driver == None: module_details = self.config_loader("controller.environment.driver") self._environment_driver = module_details["constructor"](**module_details["options"]) return self._environment_driver @property def is_initialized(self): """Property that is maintained in memory Returns ------- bool True if the project is property initialized else False """ if not self._is_initialized: if self.code_driver.is_initialized and \ self.environment_driver.is_initialized and \ self.file_driver.is_initialized: if self.model: self._is_initialized = True return self._is_initialized def dal_instantiate(self): # first load driver, then create DAL using driver dal_driver_dict = self.config_loader("storage.driver") dal_driver = dal_driver_dict["constructor"](**dal_driver_dict["options"]) # Get DAL, set driver, dal_dict = self.config_loader("storage.local") dal_dict["options"]["driver"] = dal_driver return dal_dict["constructor"](**dal_dict["options"]) def get_or_set_default(self, key, default_value): value = self.config.get(key) if value is None: self.config.save(key, default_value) value = default_value return value def config_loader(self, key): defaults = self.get_config_defaults() module_details = self.get_or_set_default(key, defaults[key]) module_details["constructor"] = get_class_contructor(module_details["class_constructor"]) return module_details def get_config_defaults(self): return { "controller.code.driver": { "class_constructor": "datmo.core.controller.code.driver.git.GitCodeDriver", "options": { "filepath": self.home, "execpath": "git" } }, "controller.file.driver": { "class_constructor": "datmo.core.controller.file.driver.local.LocalFileDriver", "options": { "filepath": self.home } }, "controller.environment.driver":{ "class_constructor": "datmo.core.controller.environment.driver.dockerenv.DockerEnvironmentDriver", "options": { "filepath": self.home, "docker_execpath": "docker" } }, "storage.local": { "class_constructor": "datmo.core.storage.local.dal.LocalDAL", "options": { "driver": "storage.driver" } }, "storage.driver": { "class_constructor": "datmo.core.storage.driver.blitzdb_dal_driver.BlitzDBDALDriver", "options": { "driver_type": "file", "connection_string": os.path.join(self.home, ".datmo/database") } }, }
def init(self, name, description): """ Initialize the project This function will initialize the project or reinitialize it the project is already initialized. Parameters ---------- name : str description : str Returns ------- bool """ is_new_model = False old_model = self.model if not self.model: is_new_model = True try: # Always validate inputs to the init function validate("create_project", { "name": name, "description": description }) # Initialize File Driver if needed if not self.file_driver.is_initialized: self.file_driver.init() # Initialize the dal if not self.dal.is_initialized: self.dal.init() # Initialize Code Driver if needed if not self.code_driver.is_initialized: self.code_driver.init() # Initialize Environment Driver if needed if not self.environment_driver.is_initialized: self.environment_driver.init() # Initialize the config JSON store self.config_store = JSONStore( os.path.join(self.home, Config().datmo_directory_name, ".config")) # Create model if new else update if is_new_model: _ = self.dal.model.create( Model({ "name": name, "description": description })) else: self._model = self.dal.model.update({ "id": self.model.id, "name": name, "description": description }) # Connect Environment Driver if needed # (not required but will warn if not present) try: if not self.environment_driver.is_connected: self.environment_driver.connect() except EnvironmentConnectFailed: self.logger.warning( __("warn", "controller.general.environment.failed")) # Build the initial default Environment (NOT NECESSARY) # self.environment_driver.build_image(tag="datmo-" + \ # self.model.name) return True except Exception: # if any error occurred with new model, ensure no initialize occurs and raise previous error # if any error occurred with existing model, ensure no updates were made, raise previous error if is_new_model: self.cleanup() else: self._model = self.dal.model.update({ "id": old_model.id, "name": old_model.name, "description": old_model.description }) raise
class __InternalConfig: def __init__(self): self._home = None self.datmo_directory_name = ".datmo" self.logging_level = logging.DEBUG DatmoLogger.get_logger(__name__).info("initializing") self.data_cache = JSONStore( os.path.join(os.path.expanduser("~"), ".datmo", "cache.json")) self.docker_cli = '/usr/bin/docker' @property def home(self): return self._home @property def remote_credentials(self): """ Returns credentials if present Returns ------- MASTER_SERVER_IP : str return if present else None DATMO_API_KEY : str return if present else None END_POINT : str return if present else None """ # 1) Load from the environment if datmo config not already saved globally MASTER_SERVER_IP = os.environ.get('MASTER_SERVER_IP', None) DATMO_API_KEY = os.environ.get('DATMO_API_KEY', None) # 2) loading the datmo config if present datmo_config_filepath = os.path.join(os.path.expanduser("~"), ".datmo", "config") if os.path.isfile(datmo_config_filepath): datmo_config = JSONStore(datmo_config_filepath) config_dict = datmo_config.to_dict() if MASTER_SERVER_IP is None: MASTER_SERVER_IP = config_dict.get('MASTER_SERVER_IP', None) if DATMO_API_KEY is None: DATMO_API_KEY = config_dict.get('DATMO_API_KEY', None) if MASTER_SERVER_IP: END_POINT = 'http://' + MASTER_SERVER_IP + ':2083/api/v1' else: END_POINT = None return MASTER_SERVER_IP, DATMO_API_KEY, END_POINT def set_home(self, home_path): self._home = home_path def get_cache_item(self, key): cache_expire_key = 'cache_key_expires.' + key cache_key = 'cache_key.' + key cache_expire_val = self.data_cache.get(cache_expire_key) # no cache expire val, it's not stored if cache_expire_val == None: return None # return value if item has not expired elif int(cache_expire_val) > int( datetime.datetime.now().strftime('%s')): return self.data_cache.get(cache_key) # expire item and return None else: self.data_cache.remove(cache_expire_key) self.data_cache.remove(cache_key) return None def set_cache_item(self, key, value, duration=60): cache_expire_key = 'cache_key_expires.' + key cache_key = 'cache_key.' + key expire_val = (duration * 60) + int( datetime.datetime.now().strftime('%s')) self.data_cache.save(cache_expire_key, expire_val) self.data_cache.save(cache_key, value)