def action_dir(): ''' Returns a JSON with the list of models and versions ''' # get de model repo path models_path = pathlib.Path(utils.model_repository_path()) # get directories in model repo path dirs = [x for x in models_path.iterdir() if x.is_dir()] # if dir contains dev/ -> is model (NAIVE APPROACH) # get last dir name [-1]: model name model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))] results = [] for imodel in model_dirs: # versions = ['dev'] versions = [{'text': 'dev'}] for iversion in os.listdir(utils.model_tree_path(imodel)): if iversion.startswith('ver'): # versions.append (iversion) versions.append({'text': iversion}) # results.append ((imodel,versions)) results.append({'text': imodel, 'nodes': versions}) return True, json.dumps(results)
def action_dir(): ''' Returns a JSON with the list of models and versions ''' # get de model repo path models_path = pathlib.Path(utils.model_repository_path()) # get directories in model repo path dirs = [x for x in models_path.iterdir() if x.is_dir()] # if dir contains dev/ -> is model (NAIVE APPROACH) # get last dir name [-1]: model name model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))] results = [] for imodel in model_dirs: idict = {} idict ["modelname"] = imodel versions = [0] for iversion in os.listdir(utils.model_tree_path(imodel)): if iversion.startswith('ver'): versions.append(utils.modeldir2ver(iversion)) idict ["versions"] = versions results.append(idict) #print (json.dumps(results)) return True, json.dumps(results)
def action_list(model): ''' Lists available models (if no argument is provided) and model versions (if "model" is provided as argument) ''' # TODO: if no argument is provided, also list all models if not model: rdir = utils.model_repository_path() num_models = 0 models = [] print(' Models found in repository:') for x in os.listdir(rdir): num_models += 1 models.append(x) print('\t- ', x) LOG.debug(f'Retrieved list of models from {rdir}') return True, '' bdir = utils.model_tree_path(model) num_versions = 0 for x in os.listdir(bdir): if x.startswith("ver"): num_versions += 1 print(model, ':', x) LOG.info(f'model {model} has {num_versions} published versions') return True, 'model '+model+' has '+str(num_versions)+' published versions'
def post(self, request): try: file_obj = request.FILES['model'] except MultiValueDictKeyError: response = {"error": "Model to import not provided"} return JsonResponse(response, status=status.HTTP_400_BAD_REQUEST) print(file_obj) model_name = file_obj.name.split('.') extension = model_name[1] model_name = model_name[0] models_path = utils.model_repository_path() # Exist Model if os.path.isdir(models_path + '/' + model_name + '/'): return JsonResponse({'error': "Model already exist"}, status=status.HTTP_409_CONFLICT) fs = FileSystemStorage(location=models_path) #defaults to MEDIA_ROOT tarFile = fs.save(model_name + '.' + extension, file_obj) os.mkdir(models_path + '/' + model_name) tar = tarfile.open(models_path + '/' + tarFile) tar.extractall(path=models_path + '/' + model_name + "/") tar.close() os.remove(models_path + '/' + tarFile) #return Response(flame_status, status=status.HTTP_200_OK) return JsonResponse({'Model': model_name}, status=status.HTTP_200_OK)
def action_list(model): ''' In no argument is provided lists all models present at the repository otherwyse lists all versions for the model provided as argument ''' # if no model name is provided, just list the model names if not model: rdir = utils.model_repository_path() num_models = 0 LOG.info('Models found in repository:') for x in os.listdir(rdir): xpath = os.path.join(rdir, x) # discard if the item is not a directory if not os.path.isdir(xpath): continue # discard if the directory does not contain a 'dev' directory inside if not os.path.isdir(os.path.join(xpath, 'dev')): continue num_models += 1 LOG.info('\t' + x) LOG.debug(f'Retrieved list of models from {rdir}') return True, f'{num_models} models found' # if a model name is provided, list versions base_path = utils.model_tree_path(model) num_versions = 0 for x in os.listdir(base_path): if x.startswith("ver"): num_versions += 1 LOG.info(f'\t{model} : {x}') return True, f'Model {model} has {num_versions} published versions'
def predict_cmd(arguments, output_format=None): ''' Instantiates a Predict object to run a prediction using the given input file and model. This method must be self-contained and suitable for being called in cascade, by models which use the output of other models as input. ''' from flame.predict import Predict # safety check if model exists repo_path = pathlib.Path(utils.model_repository_path()) model_list = os.listdir(repo_path) if arguments['endpoint'] not in model_list: LOG.error('Endpoint name not found in model repository.') return False, 'Endpoint name not found in model repository.' # ** DEPRECATE ** # this is a back-compatibility trick for older versions of APIs # not supporting the label argument if 'label' not in arguments: arguments['label'] = 'temp' predict = Predict(arguments['endpoint'], version=arguments['version'], output_format=output_format, label=arguments['label']) ensemble = predict.get_ensemble() # ensemble[0] Boolean with True for ensemble models and False otherwyse # ensemble[1] List of ensemble model model_names # ensemble[2] List of ensemble model versions if ensemble[0]: if arguments['infile'] is None: return False, 'ensemble models require allways an input file' success, model_res = get_ensemble_input(predict, ensemble[1], ensemble[2], arguments['infile']) if not success: return False, model_res # now run the model using the data from the external sources success, results = predict.run(model_res) else: # run the model with the input file success, results = predict.run(arguments['infile']) LOG.info('Prediction completed...') return success, results
def action_dir(): ''' Returns a list of models and versions ''' # get de model repo path models_path = pathlib.Path(utils.model_repository_path()) if models_path.is_dir() is False: return False, 'the model repository path does not exist. Please run "flame -c config".' # get directories in model repo path dirs = [x for x in models_path.iterdir() if x.is_dir()] # if dir contains dev/ -> is model (NAIVE APPROACH) # get last dir name [-1]: model name model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))] label_defaults = { 'maturity': 'dev', 'type': 'unk', 'subtype': 'unk', 'endpoint': 'unk', 'species': 'unk' } results = [] for imodel in model_dirs: idict = {} idict["modelname"] = imodel idict["version"] = 0 idict["info"] = action_info(imodel, 0, output=None)[1] success, label = action_label(imodel, 0, oformat='object') if success: idict["label"] = label else: idict["label"] = label_defaults results.append(idict) for iversion in os.listdir(utils.model_tree_path(imodel)): if iversion.startswith('ver'): idict = {} idict["modelname"] = imodel idict["version"] = utils.modeldir2ver(iversion) idict["info"] = action_info(imodel, idict["version"], output=None)[1] success, label = action_label(imodel, idict["version"], oformat='object') if success: idict["label"] = label else: idict["label"] = label_defaults results.append(idict) # print (results) return True, results
def build_cmd(model, output_format=None): ''' Instantiates a Build object to build a model using the given input file and model. This method must be self-contained and suitable for being called in cascade, by models which use the output of other models as input ''' # safety check if model exists repo_path = pathlib.Path(utils.model_repository_path()) model_list = os.listdir(repo_path) if model['endpoint'] not in model_list: LOG.error('endpoint name not found in model repository.') raise ValueError('Wrong endpoint name. ' f"{model['endpoint']} does not exist") build = Build(model['endpoint'], output_format) ext_input, model_set = build.get_model_set() if ext_input: success, model_res = get_external_input(build, model_set, model['infile']) if not success: return False, model_res # now run the model using the data from the external sources success, results = build.run(model_res) else: ifile = model['infile'] if not os.path.isfile(ifile): return False, 'wrong training series file' epd = utils.model_path(model['endpoint'], 0) lfile = os.path.join(epd, os.path.basename(ifile)) try: shutil.copy(ifile, lfile) except shutil.SameFileError: LOG.warning('Building model with the input SDF' f' present in model folder {lfile}') # run the model with the input file success, results = build.run(lfile) return success, results
def action_report(): ''' Returns a list of models and the results of each one ''' # get de model repo path models_path = pathlib.Path(utils.model_repository_path()) # get directories in model repo path dirs = [x for x in models_path.iterdir() if x.is_dir()] # # if dir contains dev/ -> is model (NAIVE APPROACH) # # get last dir name [-1]: model name # model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))] results = [] # iterate models for d in dirs: imodel_name = d.parts[-1] imodel_vers = [x.parts[-1] for x in d.iterdir() if x.is_dir()] # make sure the model contains 'dev' to recognize models if 'dev' not in imodel_vers: continue imodel_vers_info = [] for ivtag in imodel_vers: iver = utils.modeldir2ver(ivtag) # now we have the model name and version, try to get the info try: isuccess, iresult = action_info(imodel_name, iver, output='bin') except: continue if not isuccess: continue # build a tuple (version, object) for each version and append imodel_vers_info.append((iver, iresult)) # build a tuple (model_name, [version_info]) for each model and append results.append((imodel_name, imodel_vers_info)) return True, results
def post(self, request): file_object = request.FILES['model'] model_name = file_object.name.split('.') model_name = model_name[0] base_path = utils.model_repository_path() fs = FileSystemStorage(location=base_path) #defaults to models root tarname = fs.save(file_object.name, file_object) tarpath = os.path.join(base_path, tarname) flame_status = manage.action_import(tarpath) os.remove(tarpath) if flame_status[0]: return JsonResponse({'Model': model_name}, status=status.HTTP_200_OK) else: if 'WARNING' in flame_status[1]: return JsonResponse({'error': flame_status[1]}, status=status.HTTP_200_OK) else: return JsonResponse({'error': flame_status[1]}, status=status.HTTP_409_CONFLICT)
def build_cmd(arguments, output_format=None): ''' Instantiates a Build object to build a model using the given input file and model. This method must be self-contained and suitable for being called in cascade, by models which use the output of other models as input ''' from flame.build import Build # safety check if model exists repo_path = pathlib.Path(utils.model_repository_path()) model_list = os.listdir(repo_path) if arguments['endpoint'] not in model_list: LOG.error('Endpoint name not found in model repository.') return False, 'Endpoint name not found in model repository.' if 'param_file' in arguments: build = Build(arguments['endpoint'], param_file=arguments['param_file'], output_format=output_format) elif 'param_string' in arguments: build = Build(arguments['endpoint'], param_string=arguments['param_string'], output_format=output_format) else: build = Build(arguments['endpoint'], output_format=output_format) ensemble = build.get_ensemble() # ensemble[0] Boolean with True for ensemble models and False otherwyse # ensemble[1] List of ensemble model model_names # ensemble[2] List of ensemble model versions if ensemble[0]: if arguments['infile'] is None: return False, 'ensemble models require allways an input file' success, model_res = get_ensemble_input(build, ensemble[1], ensemble[2], arguments['infile']) if not success: return False, model_res # now run the model using the data from the external sources success, results = build.run(model_res) else: ifile = arguments['infile'] epd = utils.model_path(arguments['endpoint'], 0) lfile = os.path.join(epd, 'training_series') # when a new training series is provided in the command line # try to copy it to the model directory if ifile is not None: if not os.path.isfile(ifile): LOG.error(f'Wrong training series file {ifile}') return False, f'Wrong training series file {ifile}' try: # print(lfile) # print(ifile) shutil.copy(ifile, lfile) except: LOG.error(f'Unable to copy input file to model directory') return False, 'Unable to copy input file to model directory' # check that the local copy of the input file exists if not os.path.isfile(lfile): LOG.error(f'No training series found') return False, 'No training series found' # remove pre-existing results file epd = utils.model_path(arguments['endpoint'], 0) rfile = os.path.join(epd, 'results.pkl') if os.path.isfile(rfile): os.remove(rfile) # run the model with the input file success, results = build.run(lfile) return success, results
def test_custom_model_repository_path(): utils.set_model_repository('.') model_path = utils.model_repository_path() realpath = str(pathlib.Path('.').resolve()) assert model_path == realpath
def config(path: str=None) -> bool: """Configures model repository. Loads config.yaml and writes a correct model repository path with the path provided by the user or a default from appdirs if the path is not provided. """ # ---- CLI interface ----- if path is None: # set default default_models_path = Path(appdirs.user_data_dir('models', 'flame')) default_spaces_path = Path(appdirs.user_data_dir('spaces', 'flame')) default_predictions_path = Path(appdirs.user_data_dir('predictions', 'flame')) print(f'Setting model, space and predictions repositories (default) to {default_models_path}, {default_spaces_path} and {default_predictions_path}' '\nWould you like to continue?(y/n)') if ask_user(): if default_models_path.exists() or default_spaces_path.exists() or default_predictions_path.exists(): print(f'These paths already exists. ' 'Would you like to set them anyway?(y/n)') if ask_user(): utils.set_repositories(default_models_path, default_spaces_path, default_predictions_path) else: print('aborting...') return False else: # models_path doesn't exists default_models_path.mkdir(parents=True) default_spaces_path.mkdir(parents=True) default_predictions_path.mkdir(parents=True) utils.set_repositories(default_models_path, default_spaces_path, default_predictions_path) print(f'model repository set to {default_models_path}') print(f'space repository set to {default_spaces_path}') print(f'predictions repository set to {default_predictions_path}') return True else: print('aborting...') return False else: # path input by user in_path = Path(path).expanduser() in_path_models = Path.joinpath(in_path,'models') in_path_spaces = Path.joinpath(in_path,'spaces') in_path_predictions = Path.joinpath(in_path,'predictions') current_models_path = Path(utils.model_repository_path()) current_spaces_path = Path(utils.space_repository_path()) current_predictions_path = Path(utils.predictions_repository_path()) if in_path_models == current_models_path and in_path_spaces == current_spaces_path and in_path_predictions == current_predictions_path: print(f'{in_path_models} already is model repository path') print(f'{in_path_spaces} already is space repository path') print(f'{in_path_predictions} already is predictions repository path') return False elif not (in_path_models.exists() and in_path_spaces.exists() and in_path_predictions.exists()): print("paths doesn't exists. Would you like to create it?(y/n)") if ask_user(): if not in_path_models.exists(): in_path_models.mkdir(parents=True) if not in_path_spaces.exists(): in_path_spaces.mkdir(parents=True) if not in_path_predictions.exists(): in_path_predictions.mkdir(parents=True) utils.set_repositories(in_path_models, in_path_spaces, in_path_predictions) else: print('aborting...') return False else: # in_path exists utils.set_repositories(in_path_models, in_path_spaces, in_path_predictions) print(f'space repository set to {in_path_spaces}') print(f'model repository set to {in_path_models}') print(f'predictions repository set to {in_path_predictions}') return True
def action_refresh(model=None, version=None, GUI=False): ''' Rebuild one or many models making use of existing parameter files and locally stored training series. ''' import flame.context as context from flame.parameters import Parameters # from flame.documentation import Documentation import logging if GUI: token_file = os.path.join(tempfile.gettempdir(), 'refreshing_' + model) # update token file with content 'working' with open(token_file, 'w') as f: f.write('Analyzing and sorting models...') # list endpoints relevant for the arguments if model is not None: model_list = [model] else: model_root = pathlib.Path(utils.model_repository_path()) model_list = [x.stem for x in model_root.iterdir() if x.is_dir()] # list versions relevant for the arguments task_list = [] for imodel in model_list: if version is not None: task_list.append((imodel, version)) else: model_root = pathlib.Path(utils.model_tree_path(imodel)) itask_list = [(imodel, utils.modeldir2ver(x.stem)) for x in model_root.iterdir() if x.is_dir()] task_list += itask_list # use "+=" and not "append" to merge the new list with the old one # analize task_list and add at the end ensemble models # this is needed to have low models refreshed BEFORE refreshing the high models # eliminating the need to refresh them recursively LOG.info("Analyzing and sorting models...") # make sure the lower models are in task_list and, if not, force the inclussion for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: continue if param.getVal('input_type') == 'model_ensemble': ens_nams = param.getVal('ensemble_names') ens_vers = param.getVal('ensemble_versions') for i in range(len(ens_nams)): iver = 0 inam = ens_nams[i] if (i < len(ens_vers)): iver = ens_vers[i] if ((inam, iver)) not in task_list: task_list.append((inam, iver)) # create separate lists for regular and ensemble models # and add ensemble models at the end # this needs to be carried out after the previos step because # some of the lower level models could be an ensemble model # itself mol_list = [] ens_list = [] for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: mol_list.append(itask) continue if param.getVal('input_type') == 'model_ensemble': ens_list.append(itask) else: mol_list.append(itask) task_list = mol_list + ens_list # show all models before stating LOG.info( "Starting model refreshing task for the following models and versions") for itask in task_list: LOG.info(f' model: {itask[0]} version: {itask[1]}') LOG.info("This can take some time, please be patient...") source_dir = os.path.dirname(os.path.abspath(__file__)) children_dir = os.path.join(source_dir, 'children') master_parameters = os.path.join(children_dir, 'parameters.yaml') master_documentation = os.path.join(children_dir, 'documentation.yaml') # now send the build command for each task for itask in task_list: destinat_path = utils.model_path(itask[0], 0) # dev if itask[1] != 0: # move version to /dev for building original_path = utils.model_path(itask[0], itask[1]) # veri security_path = destinat_path + '_security' # dev_sec shutil.move(destinat_path, security_path) # dev --> dev_sec shutil.move(original_path, destinat_path) # veri --> dev LOG.info( f' refreshing model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...' ) if GUI: with open(token_file, 'w') as f: f.write( f'model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})' ) # dissable LOG output logging.disable(logging.ERROR) # update parameters dump_parameters = os.path.join(destinat_path, 'parameters_dump.yaml') success, param = action_parameters(itask[0], 0, oformat='bin') if success: param_yaml = param.dumpYAML() with open(dump_parameters, 'w') as f: for line in param_yaml: f.write(line + '\n') else: LOG.info( ' ERROR: unable to merge parameters for model: {itask[0]} version: {itask[1]}' ) dump_parameters = None original_parameters = os.path.join(destinat_path, 'parameters.yaml') shutil.copy(master_parameters, original_parameters) #update documentation dump_documentation = os.path.join(destinat_path, 'documentation_dump.yaml') success, documentation = action_documentation(itask[0], 0, doc_file=None, oformat='bin') original_documentation = os.path.join(destinat_path, 'documentation.yaml') shutil.copy(master_documentation, original_documentation) if success: documentation_yaml = documentation.dumpYAML() with open(dump_documentation, 'w') as f: for line in documentation_yaml: line = line.encode("ascii", "ignore") line = line.decode("ascii", "ignore") f.write(line + '\n') s2, documentation = action_documentation(itask[0], 0, doc_file=None, oformat='bin') s3, r3 = documentation.delta(itask[0], 0, dump_documentation) else: LOG.info( ' ERROR: unable to merge documentation for model: {itask[0]} version: {itask[1]}' ) # rebuild the model command_build = { 'endpoint': itask[0], 'infile': None, 'param_file': dump_parameters, 'incremental': False } success, results = context.build_cmd(command_build) # enable LOG output logging.disable(logging.NOTSET) if itask[1] != 0: shutil.move(destinat_path, original_path) # dev --> veri shutil.move(security_path, destinat_path) # dev_sec --> dev if not success: LOG.error(results) LOG.info("Model refreshing task finished") if GUI: # update token file with status 'ready' with open(token_file, 'w') as f: f.write('ready') return True, 'OK'
def action_refresh(model=None, version=None): ''' Rebuild one or many models making use of existing parameter files and locally stored training series. ''' import flame.context as context from flame.parameters import Parameters import logging # list endpoints relevant for the arguments if model is not None: model_list = [model] else: model_root = pathlib.Path(utils.model_repository_path()) model_list = [x.stem for x in model_root.iterdir() if x.is_dir()] # list versions relevant for the arguments task_list = [] for imodel in model_list: if version is not None: task_list.append((imodel, version)) else: model_root = pathlib.Path(utils.model_tree_path(imodel)) itask_list = [(imodel, utils.modeldir2ver(x.stem)) for x in model_root.iterdir() if x.is_dir()] task_list += itask_list # use "+=" and not "append" to merge the new list with the old one # analize task_list and add at the end ensemble models # this is needed to have low models refreshed BEFORE refreshing the high models # eliminating the need to refresh them recursively LOG.info("Analyzing and sorting models...") # make sure the lower models are in task_list and, if not, force the inclussion for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: continue if param.getVal('input_type') == 'model_ensemble': ens_nams = param.getVal('ensemble_names') ens_vers = param.getVal('ensemble_versions') for i in range(len(ens_nams)): iver = 0 inam = ens_nams[i] if (i < len(ens_vers)): iver = ens_vers[i] if ((inam, iver)) not in task_list: task_list.append((inam, iver)) # create separate lists for regular and ensemble models # and add ensemble models at the end # this needs to be carried out after the previos step because # some of the lower level models could be an ensemble model # itself mol_list = [] ens_list = [] for itask in task_list: param = Parameters() success, results = param.loadYaml(itask[0], itask[1]) if not success: mol_list.append(itask) continue if param.getVal('input_type') == 'model_ensemble': ens_list.append(itask) else: mol_list.append(itask) task_list = mol_list + ens_list # show all models before stating LOG.info( "Starting model refreshing task for the following models and versions") for itask in task_list: LOG.info(f' model: {itask[0]} version: {itask[1]}') LOG.info("This can take some time, please be patient...") # now send the build command for each task for itask in task_list: if itask[1] != 0: # move version to /dev for building original_path = utils.model_path(itask[0], itask[1]) # veri destinat_path = utils.model_path(itask[0], 0) # dev security_path = destinat_path + '_security' # dev_sec shutil.move(destinat_path, security_path) # dev --> dev_sec shutil.move(original_path, destinat_path) # veri --> dev LOG.info( f' refreshing model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...' ) # dissable LOG output logging.disable(logging.ERROR) command_build = { 'endpoint': itask[0], 'infile': None, 'param_file': None, 'incremental': False } success, results = context.build_cmd(command_build) # enable LOG output logging.disable(logging.NOTSET) if itask[1] != 0: shutil.move(destinat_path, original_path) # dev --> veri shutil.move(security_path, destinat_path) # dev_sec --> dev if not success: LOG.error(results) LOG.info("Model refreshing task finished") return True, 'OK'
def build_cmd(arguments, output_format=None): ''' Instantiates a Build object to build a model using the given input file and model. This method must be self-contained and suitable for being called in cascade, by models which use the output of other models as input ''' from flame.build import Build # safety check if model exists repo_path = pathlib.Path(utils.model_repository_path()) model_list = os.listdir(repo_path) if arguments['endpoint'] not in model_list: LOG.error('Endpoint name not found in model repository.') return False, 'Endpoint name not found in model repository.' build = Build(arguments['endpoint'], arguments['parameters'], output_format) ext_input, model_set = build.get_model_set() if ext_input: success, model_res = get_external_input(build, model_set, arguments['infile']) if not success: return False, model_res # now run the model using the data from the external sources success, results = build.run(model_res) else: ifile = arguments['infile'] epd = utils.model_path(arguments['endpoint'], 0) lfile = os.path.join(epd, 'training_series') # when a new training series is provided in the command line # try to copy it to the model directory if ifile is not None: if not os.path.isfile(ifile): LOG.error(f'Wrong training series file {ifile}') return False, f'Wrong training series file {ifile}' try: shutil.copy(ifile, lfile) except: LOG.error(f'Unable to copy input file to model directory') return False, 'Unable to copy input file to model directory' # check that the local copy of the input file exists if not os.path.isfile(lfile): LOG.error(f'No training series found') return False, 'No training series found' # run the model with the input file success, results = build.run(lfile) return success, results
def config(path: str = None) -> None: """Configures model repository. Loads config.yaml and writes a correct model repository path with the path provided by the user or a default from appdirs if the path is not provided. """ # ---- CLI interface ----- if path is None: # set default default_models_path = Path( appdirs.user_data_dir('flame_models', 'flame')) print(f'Setting model repository (default) to {default_models_path}' '\nWould you like to continue?(y/n)') userinput = input() if userinput.lower() not in ['yes', 'no', 'y', 'n']: print('Please write "yes", "no", "y" or "n"') return elif userinput.lower() in ['yes', 'y']: if default_models_path.exists(): print( f'{default_models_path} already exists. ' 'Would you like to set is as model repository anyway?(y/n)' ) userinput = input() if userinput.lower() not in ['yes', 'no', 'y', 'n']: print('Please write "yes", "no", "y" or "n"') return elif userinput.lower() in ['yes', 'y']: utils.set_model_repository(default_models_path) else: print('aborting...') return else: # models_path doesn't exists default_models_path.mkdir(parents=True) utils.set_model_repository(default_models_path) print(f'model repository set to {default_models_path}') elif userinput.lower() in ['no', 'n']: print('aborting...') return else: # path input by user in_path = Path(path).expanduser() current_models_path = Path(utils.model_repository_path()) if in_path == current_models_path: print(f'{in_path} already is model repository path') return elif not in_path.exists(): print( f"{in_path} doesn't exists. Would you like to create it?(y/n)") userinput = input() if userinput.lower() not in ['yes', 'no', 'y', 'n']: print('Please write "yes", "no", "y" or "n"') return elif userinput.lower() in ['yes', 'y']: in_path.mkdir(parents=True) utils.set_model_repository(in_path) else: print('aborting...') return else: # in_path exists utils.set_model_repository(in_path) print(f'model repository set to {in_path}')