class DataForTest: __serializable__ = Serializable(composite={'inner': DataForTestInner}) def __init__(self, **kwargs): def kw_or_def(key, default=None): return kwargs[key] if key in kwargs else default self.inner = kw_or_def('inner', DataForTestInner()) self.name = kw_or_def('name', "ooo")
class ResourcesData(PersistentDict): """ Child class for RES data. """ DIR = "resources" FILE_NAME = "resources" __serializable__ = Serializable( composite={'__all__': ResPreset} ) @staticmethod def open(): return PersistentDict.open(ResourcesData)
class PbsData(PersistentDict): """ Child class for PBS data. """ DIR = "pbs" FILE_NAME = "pbs" __serializable__ = Serializable( composite={'__all__': PbsPreset} ) @staticmethod def open(): return PersistentDict.open(PbsData)
class SshData(PersistentDict): """ Child class for SSH data. """ DIR = "ssh" FILE_NAME = "ssh" __serializable__ = Serializable( composite={'__all__': SshPreset} ) @staticmethod def open(): return PersistentDict.open(SshData)
class EnvPresets(PersistentDict): """ Child class for ENV data. """ DIR = "environments" FILE_NAME = "environments" __serializable__ = Serializable( composite={'__all__': EnvPreset} ) @staticmethod def open(): return PersistentDict.open(EnvPresets)
def save_config_file(name, config, directory=None, extension='yaml'): """Save config object to file name.extension in config directory""" if directory is not None: directory = os.path.join(__config_dir__, directory) else: directory = __config_dir__ try: os.makedirs(directory, exist_ok=True) except: raise Exception('Cannot create config directory: ' + directory) file_name = os.path.join(directory, name+'.'+extension) data = Serializable.dump(config) yaml_file = open(file_name, 'w') yaml.dump(data, yaml_file) yaml_file.close()
class ConfigData: """ Child class for Config data. """ DIR = "settings" FILE_NAME = "settings" __serializable__ = Serializable( excluded=['observers'] ) def __init__(self, **kwargs): def kw_or_def(key, default=None): return kwargs[key] if key in kwargs else default self.observers = [] """List of observer objects to be notified on change.""" self.analysis = kw_or_def('analysis', None) """Name of active analysis""" self.selected_mj = kw_or_def('selected_mj', None) """Selected multijob in UI""" self.local_env = kw_or_def('local_env', None) """Selected multijob in UI""" self.report_dir = kw_or_def('report_dir', os.path.expanduser("~")) """Last folder wher was saved report file""" def __setattr__(self, key, value): self.__dict__[key] = value self.notify() def notify(self): for observer in self.observers: observer.notify() @staticmethod def open(): directory = os.path.join(BASE_DIR, ConfigData.DIR) config = cfg.get_config_file(ConfigData.FILE_NAME, directory, cls=ConfigData) if config is None: config = ConfigData() return config def save(self): directory = os.path.join(BASE_DIR, ConfigData.DIR) cfg.save_config_file(ConfigData.FILE_NAME, self, directory)
def get_config_file(name, directory=None, cls=None, extension='yaml'): """ Get config object from filename in config directory return: Config object or None (if file not exist) """ if directory is not None: directory = os.path.join(__config_dir__, directory) if not os.path.isdir(directory): return None else: directory = __config_dir__ file_name = os.path.join(directory, name+'.'+extension) try: yaml_file = open(file_name, 'r') except (FileNotFoundError, IOError): return None config = yaml.load(yaml_file) yaml_file.close() config = Serializable.load(config, cls) return config
class MultiJobData(PersistentDict): """ Child class for MJ data. """ DIR = "mj" FILE_NAME = "mj" __serializable__ = Serializable( composite={'__all__': MultiJob} ) @staticmethod def open(id, path): mjs = PersistentDict.open(MultiJobData, id) if path is not None: for key in mjs: dir = os.path.join(path, mjs[key].preset.analysis, 'mj', mjs[key].preset.name) if not os.path.isdir(dir): mjs[key].valid = False return mjs def save(self, id): super(MultiJobData, self).save(id)
class MultiJobState: """ Data for current state of MultiJob """ __serializable__ = Serializable(composite={'status': TaskStatus}) def __init__(self, name, **kwargs): """ Default initialization. :param name: MultiJob name :return: None """ def kw_or_def(key, default=None): return kwargs[key] if key in kwargs else default self.name = name """Name of multijob""" self.analysis = kw_or_def('analysis') """Name of the analysis""" self.insert_time = kw_or_def('insert_time', time.time()) """When MultiJob was started""" self.queued_time = kw_or_def('queued_time') """When MultiJob was queued""" self.start_time = kw_or_def('start_time') """When MultiJob was started""" self.run_interval = kw_or_def('run_interval', 0) """MultiJob run time from start in second""" self.status = kw_or_def('status', TaskStatus.none) """MultiJob current status""" self.known_jobs = kw_or_def('known_jobs', 0) """Count of known jobs (minimal amount of jobs)""" self.estimated_jobs = kw_or_def('estimated_jobs', 0) """Estimated count of jobs""" self.finished_jobs = kw_or_def('finished_jobs', 0) """Count of finished jobs""" self.running_jobs = kw_or_def('running_jobs', 0) """Count of running jobs""" self.update_time = kw_or_def('update_time') """When MultiJobState was last updated""" def copy(self, new_status=None): """ Deep copy state :param new_status: Set status if is not None :return: new state """ new_state = copy.deepcopy(self) if new_status is not None: new_state.status = new_status return new_state def update(self, new_state): """ Update new_state with received data :param new_state: Communication new_state data :return: None """ self.queued_time = new_state.queued_time self.start_time = new_state.start_time self.run_interval = new_state.run_interval self.status = new_state.status self.known_jobs = new_state.known_jobs self.estimated_jobs = new_state.estimated_jobs self.finished_jobs = new_state.finished_jobs self.running_jobs = new_state.running_jobs self.update_time = time.time() def get_status(self): """ Return MultiJob status :return: Current TaskStatus """ return self.status def set_status(self, new_status): """ Directly changes status of the MultiJob :param new_status: TaskStatus o replace current. :return: None """ self.status = new_status def __repr__(self): """ Representation of object :return: String representation of object. """ return "%s(%r)" % (self.__class__.__name__, self.__dict__)
class MultiJob: __serializable__ = Serializable(composite={ 'preset': MultiJobPreset, 'state': MultiJobState }, excluded=['valid']) rdeleted_actions = { MultijobActions.delete_remote, MultijobActions.download_whole } def __init__(self, preset, **kwargs): def kw_or_def(key, default=None): return kwargs[key] if key in kwargs else default self.preset = preset """mj preset""" self.state = kw_or_def('state', MultiJobState(preset.name)) """mj state""" self.error = kw_or_def('error', "") """mj error for error state""" self.last_status = kw_or_def('last_status', None) """State before deleting""" self.valid = True """actions dependent on internal state of mj""" @property def id(self): """Get multijob id = analysis_name""" return self.preset.analysis + "_" + self.preset.name def get_preset(self): """ Get MultiJob preset. :return: MultiJobPreset object """ return self.preset def get_state(self): """ Return MultiJob state. :return: MultiJobState object """ return self.state def get_jobs(self): """ Return list of Jobs that belong to MultiJob. :return: List of Jobs """ conf_path = Installation.get_config_dir_static(self.preset.name, self.preset.analysis) states = JobsState() states.load_file(conf_path) return states.jobs def is_action_forbidden(self, action): """Return True if specified action is forbidden for this MultiJob :param action: MultiJob action e.g. delete or stop :return: False if action is permitted and true if it is forbidden """ mj_local = self.preset.mj_ssh_preset is None return ( self.state.status is None or (self.state.status, action) not in TASK_STATUS_PERMITTED_ACTIONS or (action in self.rdeleted_actions and self.preset.deleted_remote) or (action == MultijobActions.download_whole and (self.preset.downloaded or mj_local)) or (action == MultijobActions.reuse and not mj_local and self.preset.deleted_remote)) def get_logs(self): """ Scans log directory and returns log files. :return: List of MultiJobLog objects """ logs = [] mj_config_path = Installation.get_config_dir_static( self.preset.name, self.preset.analysis) mj_config_path_conf = os.path.join(mj_config_path, GEOMOP_INTERNAL_DIR_NAME) # MJ preparation log file = "mj_preparation.log" if os.path.isfile(os.path.join(mj_config_path_conf, file)): log = MultiJobLog(os.path.normpath(mj_config_path_conf), file) logs.append(log) # MJ log file = "mj_service.log" if os.path.isfile(os.path.join(mj_config_path_conf, file)): log = MultiJobLog(os.path.normpath(mj_config_path_conf), file) logs.append(log) # Jobs log for dir in os.listdir(mj_config_path): job_dir = os.path.join(mj_config_path, dir) if os.path.isdir(job_dir) and dir.startswith("action_"): file = "job_service.log" job_dir_conf = os.path.join(job_dir, GEOMOP_INTERNAL_DIR_NAME) if os.path.isfile(os.path.join(job_dir_conf, file)): log = MultiJobLog(os.path.normpath(job_dir_conf), file) logs.append(log) return logs def get_results(self): """ Scans res directory and returns results files. :return: List of MultiJobRes objects """ res_path = Installation.get_result_dir_static(self.preset.name, self.preset.analysis) ress = [] for file in os.listdir(res_path): if os.path.isfile(os.path.join(res_path, file)): res = MultiJobLog(res_path, file) ress.append(res) jobs = self.get_jobs() for job in jobs: dir = os.path.join(res_path, job.name) if os.path.isdir(dir): ress.extend(self._get_result_from_dir(dir)) return ress def _get_result_from_dir(self, dir, recurs=True): """return all files in set directory as result""" ress = [] for file in os.listdir(dir): new = os.path.join(dir, file) if os.path.isfile(new): res = MultiJobLog(dir, file) ress.append(res) elif recurs and os.path.isdir(new): ress.extend(self._get_result_from_dir(new)) return ress def get_configs(self): """ Scans res directory and returns config files. :return: List of MultiJobConf objects """ conf_path = Installation.get_config_dir_static(self.preset.name, self.preset.analysis) confs = [] for file in os.listdir(conf_path): if os.path.isfile(os.path.join(conf_path, file)): conf = MultiJobLog(conf_path, file) confs.append(conf) return confs
class WorkspacesConf(): """ Known workspaces """ DIR = "workspaces" FILE_NAME = "workspaces" __serializable__ = Serializable( composite={'workspaces': WorkspaceConf} ) def __init__(self, **kwargs): def kw_or_def(key, default=None): return kwargs[key] if key in kwargs else default self.workspaces = kw_or_def("workspaces", []) """Workspaces array""" self.selected = kw_or_def("selected") """selected workspace""" if self.selected is not None and self.selected >= len(self.workspaces): if len(self.workspaces) == 0: self.selected = None else: self.selected = 0 @property def workspace(self): """Application main window.""" return self.workspaces[self.selected].path @staticmethod def _get_compare_path(path): """return uppercase normalized real path""" if path is None: return None res=os.path.realpath(path) #return os.path.normcase(res) # Modified due to docker path problems. return os.path.normpath(res) def get_id(self): """get id selected mj""" if self.selected is None: return 0 return self.workspaces[self.selected].id def get_path(self): """get path selected mj""" if self.selected is None: return None return self.workspaces[self.selected].path def save(self, selected_mj, selected_analysis): """serialize settings""" if self.selected: self.workspaces[self.selected].selected_mj = selected_mj self.workspaces[self.selected].analysis = selected_analysis directory = os.path.join(BASE_DIR, self.DIR) base_cfg.save_config_file(self.FILE_NAME, self, directory) @classmethod def open(cls): """deserialize settings""" directory = os.path.join(BASE_DIR, cls.DIR) config = base_cfg.get_config_file(cls.FILE_NAME, directory, cls=WorkspacesConf) if config is None: config = WorkspacesConf() return config def save_to_workspace(self, presets): """save selected workspace to workspace directory""" self.workspaces[self.selected].save_workspace(presets) def get_selected_mj(self): """get mj selected during closing""" if self.selected: return self.workspaces[self.selected].selected_mj return 0 def get_selected_analysis(self): """get analysis from selected mj""" if self.selected: return self.workspaces[self.selected].selected_analysis return 0 def select_workspace(self, path, mj_container): """ select new workspace If workspace is not in array, import it. If import is OK, return True, elseif workspace is oh switch it else show error message return False and stay in old workspace. """ path_exist = False path = self._get_compare_path(path) if path == self. get_path(): return False for i in range(0, len(self.workspaces)): if path == self.workspaces[i].path: self.selected = i path_exist = True break if not path_exist: i = len(self.workspaces) self.selected = i self.workspaces.append(WorkspaceConf(path=path, id=i)) return self.workspaces[self.selected].import_workspace(mj_container) return True
class _Config: """Class for ModelEditor serialization""" __serializable__ = Serializable(excluded=['observers']) DEBUG_MODE = False """debug mode changes the behaviour""" SERIAL_FILE = "ModelEditorData" """Serialize class file""" COUNT_RECENT_FILES = 5 """Count of recent files""" CONFIG_DIR = os.path.join(base_cfg.__config_dir__, 'ModelEditor') LINE_ENDINGS_LF = 'unix' LINE_ENDINGS_CRLF = 'windows' def __init__(self, **kwargs): def kw_or_def(key, default=None): """Get keyword arg or default value.""" return kwargs[key] if key in kwargs else default self.observers = [] """objects to be notified of changes""" self.current_working_dir = os.getcwd() """directory of the most recently opened data file""" self.recent_files = kw_or_def('recent_files', []) """a list of recently opened files""" self.format_files = kw_or_def('format_files', []) """a list of format files""" self.display_autocompletion = kw_or_def('display_autocompletion', False) """whether to display autocompletion automatically""" self.symbol_completion = kw_or_def('symbol_completion', True) """whether to automatically complete brackets and array symbols""" self.shortcuts = kw_or_def( 'shortcuts', deepcopy(shortcuts_definition.DEFAULT_USER_SHORTCUTS)) if not 'open_window' in self.shortcuts: # added to version 1.0.0 self.shortcuts[ 'open_window'] = shortcuts_definition.DEFAULT_USER_SHORTCUTS[ 'open_window'] """user customizable keyboard shortcuts""" self.font = kw_or_def('font', constants.DEFAULT_FONT) """text editor font""" self._line_endings = kw_or_def('_line_endings', _Config.LINE_ENDINGS_LF) #self._analysis = kw_or_def('_analysis') # analysis no longer in use self._analysis = None self._workspace = kw_or_def('_workspace') # initialize project and workspace self.workspace = self._workspace self.analysis = self._analysis def update_current_working_dir(self, file_name): """Save dir from last used file""" analysis_directory = None directory = os.path.dirname(os.path.realpath(file_name)) if self.workspace is not None and self.analysis is not None: analysis_dir = os.path.join(self.workspace, self.analysis) if analysis_directory is None or directory != analysis_dir: self.current_working_dir = directory @staticmethod def open(): """Open config from saved file (if exists).""" config = base_cfg.get_config_file(_Config.SERIAL_FILE, _Config.CONFIG_DIR, cls=_Config) if config is None: config = _Config() return config def save(self): """Save config data""" base_cfg.save_config_file(self.__class__.SERIAL_FILE, self, self.CONFIG_DIR) def add_recent_file(self, file_name, format_file): """ If file is in array, move it top, else add file to top and delete last file if is needed. Relevant format files is keep """ # 0 files if len(self.recent_files) == 0: self.recent_files.append(file_name) self.format_files.append(format_file) self.save() return # first file == update file if file_name == self.recent_files[0]: # format file can be changed self.format_files[0] = format_file self.save() return # init for last_file = self.recent_files[0] last_format = self.format_files[0] self.recent_files[0] = file_name self.format_files[0] = format_file for i in range(1, len(self.recent_files)): if file_name == self.recent_files[i]: # added file is in list self.recent_files[i] = last_file self.format_files[i] = last_format self.save() return last_file_pom = self.recent_files[i] last_format_pom = self.format_files[i] self.recent_files[i] = last_file self.format_files[i] = last_format last_file = last_file_pom last_format = last_format_pom # recent files is max+1, but first is not displayed if self.__class__.COUNT_RECENT_FILES < i + 1: self.save() return # add last file self.recent_files.append(last_file) self.format_files.append(last_format) self.save() def get_format_file(self, file_name): """get format file that is in same position as file""" for i in range(0, len(self.recent_files)): if self.recent_files[i] == file_name: return self.format_files[i] return None @property def data_dir(self): """Data directory - either an analysis dir or the current working dir.""" if self.workspace and self.analysis: return os.path.join(self.workspace, self.analysis) else: return self.current_working_dir @property def workspace(self): """path to workspace""" return self._workspace @workspace.setter def workspace(self, value): if value == '' or value is None: self._workspace = None if value != self._workspace: # close analysis if workspace is changed self.analysis = None self._workspace = value @property def analysis(self): """name of the analysis in the workspace""" return self._analysis @analysis.setter def analysis(self, value): if value == '' or value is None: self._analysis = None Analysis.current = None else: self._analysis = value try: analysis = Analysis.open(self._workspace, self._analysis) except InvalidAnalysis: self._analysis = None else: Analysis.current = analysis self.notify_all() @property def line_endings(self): """line endings used in the edited files""" return self._line_endings @line_endings.setter def line_endings(self, value): if value != self._line_endings: self._line_endings = value self.notify_all() def notify_all(self): """Notify all observers about changes.""" for observer in self.observers: observer.config_changed()
class Analysis: """Analysis settings and data.""" __serializable__ = Serializable(excluded=[ 'filename', 'workspace', 'name', 'analysis_dir', '_analysis_dir' ], composite={ 'params': Parameter, 'files': File, 'additional_files': File, 'layers_files': File, 'script_files': File }) current = None """currently opened analysis""" def __init__(self, filename=None, **kwargs): self.filename = filename self.workspace = None self.name = None self.params = kwargs['params'] if 'params' in kwargs else [] self.files = kwargs['files'] if 'files' in kwargs else [] self.additional_files = kwargs[ 'additional_files'] if 'additional_files' in kwargs else [] self.layers_files = kwargs[ 'layers_files'] if 'layers_files' in kwargs else [] self.script_files = kwargs[ 'script_files'] if 'script_files' in kwargs else [] self._analysis_dir = '' self.flow123d_version = kwargs[ 'flow123d_version'] if 'flow123d_version' in kwargs else '' self.mj_counter = kwargs['mj_counter'] if 'mj_counter' in kwargs else 1 @staticmethod def _get_compare_path(path): """return uppercase normalized real path""" if path is None: return None res = os.path.realpath(path) return os.path.normcase(res) @property def analysis_dir(self): return self._analysis_dir @analysis_dir.setter def analysis_dir(self, value): self._analysis_dir = self._get_compare_path(value) @property def selected_file_paths(self): return [ f.file_path for f in (self.files + self.additional_files) if f.selected ] @staticmethod def open(workspace, analysis_name, sync_files=False): """Retrieve analysis from settings by its name and workspace.""" if analysis_name is None: raise InvalidAnalysis("No analysis specified.") if workspace is None: raise InvalidAnalysis("No workspace specified.") directory = os.path.join(workspace, analysis_name) analysis = config.get_config_file(ANALYSIS_MAIN_FILE_NAME, directory=directory, extension=ANALYSIS_MAIN_FILE_EXT, cls=Analysis) if analysis is None: raise InvalidAnalysis("Selected analysis is invalid.") analysis.analysis_dir = directory analysis.filename = os.path.join(directory, ANALYSIS_MAIN_FILE) analysis.workspace = workspace analysis.name = analysis_name # scan and update files if sync_files: analysis.sync_files() return analysis def sync_files(self): """Scan and update files""" current_configs = {file.file_path: file for file in self.files} current_additional_files = { file.file_path: file for file in self.additional_files } current_layers_files = { file.file_path: file for file in self.layers_files } current_script_files = { file.file_path: file for file in self.script_files } self.files = [] self.additional_files = [] self.layers_files = [] self.script_files = [] for root, dirs, files in os.walk(self.analysis_dir): # ignore multijobs folder if root.startswith(os.path.join(self.analysis_dir, MULTIJOBS_DIR)): continue for filename in files: file_path = self.make_relative_path( os.path.join(root, filename)) if file_path.endswith('.yaml'): if file_path in current_configs: self.files.append(current_configs[file_path]) else: self.files.append(File(file_path)) elif file_path.endswith('.json'): if file_path in current_layers_files: self.layers_files.append( current_layers_files[file_path]) else: self.layers_files.append(File(file_path)) elif file_path.endswith('.py'): if file_path in current_script_files: self.script_files.append( current_script_files[file_path]) else: self.script_files.append(File(file_path)) else: if filename == ANALYSIS_MAIN_FILE: continue elif file_path in current_additional_files: self.additional_files.append( current_additional_files[file_path]) else: self.additional_files.append(File(file_path)) @staticmethod def open_from_mj(mj_dir, analysis_name='N/A'): """Retrieve analysis from multijob directory.""" analysis = config.get_config_file(ANALYSIS_MAIN_FILE_NAME, directory=os.path.join( mj_dir, MJ_CONFIG_DIR), extension=ANALYSIS_MAIN_FILE_EXT, cls=Analysis) if analysis is None: raise InvalidAnalysis("Selected analysis is invalid.") analysis.analysis_dir = mj_dir analysis.filename = os.path.join(mj_dir, ANALYSIS_MAIN_FILE) analysis.workspace = None analysis.name = analysis_name return analysis @staticmethod def exists(workspace, analysis_name): """Determine whether project exists in a workspace.""" if not workspace or not analysis_name: return False analysis_filename = os.path.join(workspace, analysis_name, ANALYSIS_MAIN_FILE) if not os.path.isfile(analysis_filename): return False return True @staticmethod def notify(data): """Observer method to update current analysis.""" if data.workspace is None or data.analysis is None: Analysis.current = None return if (Analysis.current is None or data.workspace != Analysis.current.workspace or data.analysis != Analysis.current.name): if Analysis.exists(data.workspace, data.analysis): Analysis.current = Analysis.open(data.workspace, data.analysis) else: data.analysis = None Analysis.current = None @staticmethod def reload_current(): """Read the current analysis file and updated the analysis data.""" if Analysis.current is None: return None analysis = Analysis.open(Analysis.current.workspace, Analysis.current.name) analysis._sync_analysis() Analysis.current = analysis return analysis def save(self): """Save the current analysis as a config file.""" config.save_config_file(ANALYSIS_MAIN_FILE_NAME, self, self._analysis_dir, ANALYSIS_MAIN_FILE_EXT) def make_relative_path(self, file_path): """Makes the path relative to analysis_dir.""" file_path = self._get_compare_path(file_path) if not self._analysis_dir: return file_path if not file_path.startswith(self._analysis_dir): # assume file_path is already relative to project return file_path return file_path[len(self._analysis_dir) + 1:] def merge_params(self, params): """Merge another param collection into this one.""" for new_param in params: # check if param doesn't exist, then add it exists = False for curr_param in self.params: if curr_param.name == new_param.name: exists = True break if not exists: self.params.append(new_param) def is_abs_path_in_analysis_dir(self, file_path): """Whether file exists in the analysis directory or subdirectories.""" file_path = self._get_compare_path(file_path) if not file_path or not self._analysis_dir: return False return file_path.startswith(self._analysis_dir) def add_file(self, file_path, params=None): """Add or sync a file. If file exists, update parameters.""" file_path = self.make_relative_path(file_path) for file in self.files: if file_path == file.file_path: if params is not None: file.params = params return # file is not registered file = File(file_path, params) self.files.append(file) return def _sync_analysis(self): """Write current file and params to an analysis file.""" if len(self._analysis_dir) == 0 or not os.path.isdir( self._analysis_dir): return for root, directories, filenames in os.walk(self._analysis_dir): for filename in filenames: if filename.endswith('.yaml') and \ not "analysis_results" in root: self.add_file(os.path.join(root, filename)) self.save() def copy_into_mj_folder(self, mj): """Copy this analysis into multijob folder.""" mj_dir = os.path.join(self.analysis_dir, MULTIJOBS_DIR, mj.preset.name, MJ_CONFIG_DIR) if not os.path.isdir(mj_dir): os.makedirs(mj_dir) # get all files used by analyses files = self.selected_file_paths # add analysis configuration file files.append(ANALYSIS_MAIN_FILE) # get parameters params = { param.name: param.value for param in self.params if param.value } # copy the selected files (with filled in parameters) for file in set(files): src = os.path.join(self.analysis_dir, file) dst = os.path.join(mj_dir, file) # create directory structure if not present dst_dir = os.path.dirname(dst) if not os.path.isdir(dst_dir): os.makedirs(dst_dir) gm_base.flow_util.analysis.replace_params_in_file(src, dst, params) @staticmethod def is_analysis(path): """Determine if a path is an analysis directory.""" if os.path.isdir(path): if path.endswith(CONFIG_DIR): return False for file_ in os.listdir(path): if file_.lower() == ANALYSIS_MAIN_FILE: return True return False @staticmethod def list_analyses_in_workspace(workspace): """Get a list of all analyses in workspace.""" analyses = [] if os.path.exists(workspace): for name in os.listdir(workspace): path = os.path.join(workspace, name) if Analysis.is_analysis(path): analyses.append(name) return analyses @staticmethod def get_workspace_config_dir(workspace, module): """Get a list of all analyses in workspace.""" if os.path.exists(workspace): settings = os.path.join(workspace, CONFIG_DIR) if not os.path.exists(settings): os.makedirs(settings) settings = os.path.join(settings, module) if not os.path.exists(settings): os.makedirs(settings) return settings return None
class _Config: """Class for Analyzis serialization""" __serializable__ = Serializable(excluded=['observers']) DEBUG_MODE = False """debug mode changes the behaviour""" SERIAL_FILE = "LayerEditorData" """Serialize class file""" COUNT_RECENT_FILES = 5 """Count of recent files""" CONTEXT_NAME = 'LayerEditor' CONFIG_DIR = os.path.join(cfg.__config_dir__, 'LayerEditor') def __init__(self, **kwargs): def kw_or_def(key, default=None): """Get keyword arg or default value.""" return kwargs[key] if key in kwargs else default from os.path import expanduser self.observers = [] """objects to be notified of changes""" self._analysis = None self._workspace = None self._analysis = kw_or_def('_analysis') self._workspace = kw_or_def('_workspace') self.show_init_area = kw_or_def('show_init_area', True) self.current_workdir = os.getcwd() """directory of the most recently opened data file""" self.recent_files = kw_or_def('recent_files', []) """a list of recently opened files""" self.shortcuts = kw_or_def( 'shortcuts', deepcopy(shortcuts_definition.DEFAULT_USER_SHORTCUTS)) """user customizable keyboard shortcuts""" def save(self): """Save config data""" cfg.save_config_file(self.__class__.SERIAL_FILE, self, self.CONFIG_DIR) @staticmethod def open(): """Open config from saved file (if exists).""" config = cfg.get_config_file(_Config.SERIAL_FILE, _Config.CONFIG_DIR, cls=_Config) if config is None: config = _Config() return config def add_recent_file(self, file_name): """ If file is in array, move it top, else add file to top and delete last file if is needed. Relevant format files is keep """ # 0 files if len(self.recent_files) == 0: self.recent_files.append(file_name) self.save() return # init for if self.recent_files[0] == file_name: self.save() return last_file = self.recent_files[0] self.recent_files[0] = file_name for i in range(1, len(self.recent_files)): if file_name == self.recent_files[i]: # added file is in list self.recent_files[i] = last_file self.save() return last_file_pom = self.recent_files[i] self.recent_files[i] = last_file last_file = last_file_pom # recent files is max+1, but first is not displayed if self.__class__.COUNT_RECENT_FILES < i + 1: self.save() return # add last file self.recent_files.append(last_file) self.save() @property def data_dir(self): """Data directory - either an analysis dir or the last used dir.""" if self.workspace and self.analysis: return os.path.join(self.workspace, self.analysis) else: return self.current_workdir def update_current_workdir(self, file_name): """Save dir from last used file""" analysis_directory = None directory = os.path.dirname(os.path.realpath(file_name)) if self.workspace is not None and self.analysis is not None: analysis_dir = os.path.join(self.workspace, self.analysis) if analysis_directory is None or directory != analysis_dir: self.current_workdir = directory @property def workspace(self): """path to workspace""" return self._workspace @workspace.setter def workspace(self, value): if value == '' or value is None: self._workspace = None if value != self._workspace: # close analysis if workspace is changed self.analysis = None self._workspace = value @property def analysis(self): """name of the analysis in the workspace""" return self._analysis @analysis.setter def analysis(self, value): if value == '' or value is None: self._analysis = None Analysis.current = None else: self._analysis = value try: analysis = Analysis.open(self._workspace, self._analysis) except InvalidAnalysis: self._analysis = None else: Analysis.current = analysis self.notify_all() def notify_all(self): """Notify all observers about changes.""" for observer in self.observers: observer.config_changed()