def action_parameters(space, version=None, oformat='text'): ''' Returns a JSON with whole results info for a given space and version ''' if space is None: return False, 'Empty space label' from flame.parameters import Parameters param = Parameters() param.loadYaml(space, version, isSpace=True) if oformat != 'text': return True, param else: yaml = param.dumpYAML() for line in yaml: print (line) return True, 'parameters listed'
def action_parameters(model, version=None, oformat='text'): ''' Returns an object with whole results info for a given model and version ''' if model is None: return False, 'Empty model label' from flame.parameters import Parameters param = Parameters() success, results = param.loadYaml(model, version) if not success: print(f'error obtaining parametes for model {model} : {results}') return False, results if oformat != 'text': return True, param else: yaml = param.dumpYAML() for line in yaml: print(line) return True, 'parameters listed'
def action_refresh(model=None, version=None, GUI=False): ''' Rebuild one or many models making use of existing parameter files and locally stored training series. ''' import flame.context as context from flame.parameters import Parameters # from flame.documentation import Documentation import logging if GUI: token_file = os.path.join(tempfile.gettempdir(), 'refreshing_' + model) # update token file with content 'working' with open(token_file, 'w') as f: f.write('Analyzing and sorting models...') # list endpoints relevant for the arguments if model is not None: model_list = [model] else: model_root = pathlib.Path(utils.model_repository_path()) model_list = [x.stem for x in model_root.iterdir() if x.is_dir()] # list versions relevant for the arguments task_list = [] for imodel in model_list: if version is not None: task_list.append((imodel, version)) else: model_root = pathlib.Path(utils.model_tree_path(imodel)) itask_list = [(imodel, utils.modeldir2ver(x.stem)) for x in model_root.iterdir() if x.is_dir()] task_list += itask_list # use "+=" and not "append" to merge the new list with the old one # analize task_list and add at the end ensemble models # this is needed to have low models refreshed BEFORE refreshing the high models # eliminating the need to refresh them recursively LOG.info("Analyzing and sorting models...") # make sure the lower models are in task_list and, if not, force the inclussion for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: continue if param.getVal('input_type') == 'model_ensemble': ens_nams = param.getVal('ensemble_names') ens_vers = param.getVal('ensemble_versions') for i in range(len(ens_nams)): iver = 0 inam = ens_nams[i] if (i < len(ens_vers)): iver = ens_vers[i] if ((inam, iver)) not in task_list: task_list.append((inam, iver)) # create separate lists for regular and ensemble models # and add ensemble models at the end # this needs to be carried out after the previos step because # some of the lower level models could be an ensemble model # itself mol_list = [] ens_list = [] for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: mol_list.append(itask) continue if param.getVal('input_type') == 'model_ensemble': ens_list.append(itask) else: mol_list.append(itask) task_list = mol_list + ens_list # show all models before stating LOG.info( "Starting model refreshing task for the following models and versions") for itask in task_list: LOG.info(f' model: {itask[0]} version: {itask[1]}') LOG.info("This can take some time, please be patient...") source_dir = os.path.dirname(os.path.abspath(__file__)) children_dir = os.path.join(source_dir, 'children') master_parameters = os.path.join(children_dir, 'parameters.yaml') master_documentation = os.path.join(children_dir, 'documentation.yaml') # now send the build command for each task for itask in task_list: destinat_path = utils.model_path(itask[0], 0) # dev if itask[1] != 0: # move version to /dev for building original_path = utils.model_path(itask[0], itask[1]) # veri security_path = destinat_path + '_security' # dev_sec shutil.move(destinat_path, security_path) # dev --> dev_sec shutil.move(original_path, destinat_path) # veri --> dev LOG.info( f' refreshing model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...' ) if GUI: with open(token_file, 'w') as f: f.write( f'model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})' ) # dissable LOG output logging.disable(logging.ERROR) # update parameters dump_parameters = os.path.join(destinat_path, 'parameters_dump.yaml') success, param = action_parameters(itask[0], 0, oformat='bin') if success: param_yaml = param.dumpYAML() with open(dump_parameters, 'w') as f: for line in param_yaml: f.write(line + '\n') else: LOG.info( ' ERROR: unable to merge parameters for model: {itask[0]} version: {itask[1]}' ) dump_parameters = None original_parameters = os.path.join(destinat_path, 'parameters.yaml') shutil.copy(master_parameters, original_parameters) #update documentation dump_documentation = os.path.join(destinat_path, 'documentation_dump.yaml') success, documentation = action_documentation(itask[0], 0, doc_file=None, oformat='bin') original_documentation = os.path.join(destinat_path, 'documentation.yaml') shutil.copy(master_documentation, original_documentation) if success: documentation_yaml = documentation.dumpYAML() with open(dump_documentation, 'w') as f: for line in documentation_yaml: line = line.encode("ascii", "ignore") line = line.decode("ascii", "ignore") f.write(line + '\n') s2, documentation = action_documentation(itask[0], 0, doc_file=None, oformat='bin') s3, r3 = documentation.delta(itask[0], 0, dump_documentation) else: LOG.info( ' ERROR: unable to merge documentation for model: {itask[0]} version: {itask[1]}' ) # rebuild the model command_build = { 'endpoint': itask[0], 'infile': None, 'param_file': dump_parameters, 'incremental': False } success, results = context.build_cmd(command_build) # enable LOG output logging.disable(logging.NOTSET) if itask[1] != 0: shutil.move(destinat_path, original_path) # dev --> veri shutil.move(security_path, destinat_path) # dev_sec --> dev if not success: LOG.error(results) LOG.info("Model refreshing task finished") if GUI: # update token file with status 'ready' with open(token_file, 'w') as f: f.write('ready') return True, 'OK'
def action_parameters(model, version=None, oformat='text'): ''' Returns an object with whole results info for a given model and version ''' if model is None: return False, 'Empty model label' from flame.parameters import Parameters param = Parameters() success, results = param.loadYaml(model, version) if not success: print(f'error obtaining parametes for model {model} : {results}') return False, results if oformat != 'text': return True, param else: yaml = param.dumpYAML() for line in yaml: print(line) # order = ['input_type', 'quantitative', 'SDFile_activity', 'SDFile_name', 'SDFile_id', # 'SDFile_experimental', 'SDFile_complementary', 'normalize_method', 'ionize_method', 'convert3D_method', # 'computeMD_method', 'model', 'modelAutoscaling', 'tune', 'conformal', # 'conformalConfidence', 'ModelValidationCV', 'ModelValidationLC', # 'ModelValidationN', 'ModelValidationP', 'output_format', 'output_md', 'output_similar', # 'TSV_activity', 'TSV_objnames', 'TSV_varnames', 'imbalance', # 'feature_selection', 'feature_number', 'mol_batch', # 'ensemble_names','ensemble_versions', # 'similarity_metric', 'similarity_cutoff_num', 'similarity_cutoff_distance', # 'numCPUs', 'verbose_error', 'modelingToolkit', # 'endpoint', 'model_path', # #'md5', # 'version'] # order += ['MD_settings', 'RF_parameters','RF_optimize', # 'SVM_parameters','SVM_optimize', # 'PLSDA_parameters','PLSDA_optimize', # 'PLSR_parameters','PLSR_optimize', # 'GNB_parameters'] # for ik in order: # if ik in param.p: # k = ik # v = param.p[k] # ivalue = '' # idescr = '' # ioptio = '' # ## newest parameter formats are extended and contain # ## rich metainformation for each entry # if param.extended: # if 'value' in v: # if not isinstance(v['value'] ,dict): # ivalue = v['value'] # else: # # print header of dictionaty # print (f'{k} :') # # iterate keys assuming existence of value and description # for intk in v['value']: # intv = v['value'][intk] # iivalue = '' # if "value" in intv: # iivalue = intv["value"] # iidescr = '' # if "description" in intv and intv["description"] is not None: # iidescr = intv["description"] # iioptio = '' # if 'options' in intv: # toptio = intv['options'] # if isinstance(toptio, list): # if toptio != [None]: # iioptio = f' {toptio}' # if isinstance (iivalue, float): # iivalue = f'{iivalue:f}' # elif iivalue is None: # iivalue = '' # print (f' {intk:27} : {str(iivalue):30} #{iioptio} {iidescr}') # continue # if 'description' in v: # idescr = v['description'] # if 'options' in v: # toptio = v['options'] # if isinstance(toptio, list): # ioptio = f' {toptio}' # ### compatibility: old stile parameters # else: # if not isinstance(v ,dict): # ivalue = v # else: # ivalue = '*dictionary*' # ### end compatibility # if isinstance (ivalue, float): # ivalue = f'{ivalue:f}' # elif ivalue is None: # ivalue = '' # print (f'{k:30} : {str(ivalue):30} #{ioptio} {idescr}') return True, 'parameters listed'