コード例 #1
0
ファイル: build.py プロジェクト: eTRANSAFE/flame
    def __init__(self, model, param_file=None, output_format=None):
        LOG.debug('Starting build...')
        self.model = model
        self.param = Parameters()
        self.conveyor = Conveyor()

        # load parameters
        if param_file is not None:
            # use the param_file to update existing parameters at the model
            # directory and save changes to make them persistent
            success, message = self.param.delta(model, 0, param_file)
        else:
            # load parameter file at the model directory
            success, message = self.param.loadYaml(model, 0)

        # being unable to load parameters is a critical error
        if not success:
            LOG.critical(
                f'Unable to load model parameters. "{message}" Aborting...')
            sys.exit(1)

        # add additional output formats included in the constructor
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output
        if output_format is not None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format', output_format)
コード例 #2
0
    def __init__(self,
                 space,
                 param_file=None,
                 param_string=None,
                 output_format=None):
        LOG.debug('Starting sbuild...')
        self.space = space
        self.param = Parameters()
        self.conveyor = Conveyor()

        # identify the workflow type
        self.conveyor.setOrigin('slearn')

        # generate a unique modelID
        self.conveyor.addMeta('modelID', utils.id_generator())
        LOG.debug(
            f'Generated new space with modelID: {self.conveyor.getMeta("modelID")}'
        )

        # load parameters
        if param_file is not None:
            # use the param_file to update existing parameters at the space
            # directory and save changes to make them persistent
            success, message = self.param.delta(space,
                                                0,
                                                param_file,
                                                iformat='YAML',
                                                isSpace=True)

        elif param_string is not None:
            success, message = self.param.delta(space,
                                                0,
                                                param_string,
                                                iformat='JSONS',
                                                isSpace=True)

        else:
            # load parameter file at the space directory
            success, message = self.param.loadYaml(space, 0, isSpace=True)

        # being unable to load parameters is a critical error
        if not success:
            LOG.critical(
                f'Unable to load space parameters. {message}. Aborting...')
            sys.exit(1)

        # add additional output formats included in the constructor
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output
        if output_format is not None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format', output_format)
コード例 #3
0
    def __init__(self, space, version, output_format=None, label=None):
        LOG.debug('Starting search...')
        self.space = space
        self.version = version
        self.label = label
        self.param = Parameters()
        self.conveyor = Conveyor()

        # identify the workflow type
        self.conveyor.setOrigin('sapply')

        # load modelID
        path = utils.space_path(space, version)
        meta = os.path.join(path, 'space-meta.pkl')
        try:
            with open(meta, 'rb') as handle:
                modelID = pickle.load(handle)
        except:
            LOG.critical(f'Unable to load modelID from {meta}. Aborting...')
            sys.exit()

        self.conveyor.addMeta('modelID', modelID)
        LOG.debug(f'Loaded space with modelID: {modelID}')

        # assign prediction (search) label
        self.conveyor.addVal(label, 'prediction_label', 'prediction label',
                             'method', 'single',
                             'Label used to identify the prediction')

        success, results = self.param.loadYaml(space, version, isSpace=True)
        if not success:
            LOG.critical(
                f'Unable to load space parameters. {results}. Aborting...')
            sys.exit()

        # add additional output formats included in the constructor
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output
        if output_format != None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format', output_format)

        return
コード例 #4
0
ファイル: predict.py プロジェクト: eTRANSAFE/flame
    def __init__(self, model, version, output_format=None):
        LOG.debug('Starting predict...')
        self.model = model
        self.version = version
        self.param = Parameters()
        self.conveyor = Conveyor()

        if not self.param.loadYaml(model, version):
            LOG.critical('Unable to load model parameters. Aborting...')
            sys.exit()

        # add additional output formats included in the constructor 
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output   
        if output_format != None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format',output_format)
 
        return
コード例 #5
0
ファイル: smanage.py プロジェクト: EMVGaron/flame
def action_parameters(space, version=None, oformat='text'):
    ''' Returns a JSON with whole results info for a given space and version '''

    if space is None:
        return False, 'Empty space label'

    from flame.parameters import Parameters

    param = Parameters()
    param.loadYaml(space, version, isSpace=True)

    if oformat != 'text':
        return True, param

    else:
        yaml = param.dumpYAML()
        for line in yaml:
            print (line)

        return True, 'parameters listed'
コード例 #6
0
ファイル: documentation.py プロジェクト: phi-grib/flame
    def __init__(self, model, version=0, context='model'):
        ''' Load the fields from the documentation file'''

        self.model = model
        self.version = version
        self.fields = None
        self.parameters = Parameters()
        self.conveyor = None

        # obtain the path and the default name of the model documents
        documentation_file_path = utils.model_path(self.model, self.version)
        documentation_file_name = os.path.join(documentation_file_path,
                                               'documentation.yaml')

        # load the main class dictionary (p) from this yaml file
        if not os.path.isfile(documentation_file_name):
            raise Exception('Documentation file not found')

        try:
            with open(documentation_file_name, 'r') as documentation_file:
                self.fields = yaml.safe_load(documentation_file)
        except Exception as e:
            # LOG.error(f'Error loading documentation file with exception: {e}')
            raise e

        success, message = self.parameters.loadYaml(model, version)

        if not success:
            print(
                f'Parameters could not be loaded. {message}. Please make sure endpoint and version are correct'
            )
            return

        # Remove this after acc
        #self.load_parameters()
        if context == 'model':
            self.load_results()
            self.assign_parameters()
            self.assign_results()
            self.autocomplete_documentation()
            self.setVal('md5', self.idataHash())
コード例 #7
0
    def __init__(self, model, version=0, output_format=None, label=None):
        LOG.debug('Starting predict...')
        self.model = model
        self.version = version
        self.param = Parameters()
        self.conveyor = Conveyor()

        # identify the workflow type
        self.conveyor.setOrigin('apply')

        # load modelID
        success, result = utils.getModelID(model, version, 'model')
        if not success:
            LOG.critical(f'{result}. Aborting...')
            sys.exit()

        self.conveyor.addMeta('modelID', result)
        LOG.debug(f'Loaded model with modelID: {result}')

        # assign prediction label
        self.conveyor.addVal(label, 'prediction_label', 'prediction label',
                             'method', 'single',
                             'Label used to identify the prediction')

        success, results = self.param.loadYaml(model, version)
        if not success:
            LOG.critical(
                f'Unable to load model parameters. {results}. Aborting...')
            sys.exit()

        # add additional output formats included in the constructor
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output
        if output_format != None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format', output_format)

            if 'ghost' in output_format:
                self.param.setVal('output_similar', False)

        return
コード例 #8
0
ファイル: manage.py プロジェクト: phi-grib/flame
def action_parameters(model, version=None, oformat='text'):
    ''' Returns an object with whole results info for a given model and version '''

    if model is None:
        return False, 'Empty model label'

    from flame.parameters import Parameters

    param = Parameters()
    success, results = param.loadYaml(model, version)

    if not success:
        print(f'error obtaining parametes for model {model} : {results}')
        return False, results

    if oformat != 'text':
        return True, param

    else:
        yaml = param.dumpYAML()
        for line in yaml:
            print(line)

        return True, 'parameters listed'
コード例 #9
0
ファイル: search.py プロジェクト: ismaelresp/flame
    def __init__(self, space, version, output_format=None, label=None):
        LOG.debug('Starting predict...')
        self.space = space
        self.version = version
        self.label = label
        self.param = Parameters()
        self.conveyor = Conveyor()

        self.conveyor.addVal(label, 'prediction_label', 'prediction label',
                             'method', 'single',
                             'Label used to identify the prediction')

        if not self.param.loadYaml(space, version, isSpace=True):
            LOG.critical('Unable to load space parameters. Aborting...')
            sys.exit()

        # add additional output formats included in the constructor
        # this is requiered to add JSON format as output when the object is
        # instantiated from a web service call, requiring this output
        if output_format != None:
            if output_format not in self.param.getVal('output_format'):
                self.param.appVal('output_format', output_format)

        return
コード例 #10
0
ファイル: smanage.py プロジェクト: ismaelresp/flame
def action_parameters(space, version=None, oformat='text'):
    ''' Returns a JSON with whole results info for a given space and version '''

    if space is None:
        return False, 'Empty space label'

    from flame.parameters import Parameters

    param = Parameters()
    param.loadYaml(space, version, isSpace=True)

    if oformat == 'JSON':
        return True, param.dumpJSON()

    else:

        order = [
            'input_type',
            'quantitative',
            'SDFile_activity',
            'SDFile_name',
            'SDFile_id',
            'SDFile_experimental',
            'normalize_method',
            'ionize_method',
            'convert3D_method',
            'computeMD_method',
            'model',
            'modelAutoscaling',
            'tune',
            'conformal',
            'conformalSignificance',
            'ModelValidationCV',
            'ModelValidationLC',
            'ModelValidationN',
            'ModelValidationP',
            'output_format',
            'output_md',
            'TSV_activity',
            'TSV_objnames',
            'TSV_varnames',
            'imbalance',
            'feature_selection',
            'feature_number',
            'mol_batch',
            'ensemble_models',
            'ensemble_versions',
            'numCPUs',
            'verbose_error',
            'modelingToolkit',
            'endpoint',
            'model_path',
            #'md5',
            'version'
        ]

        order += [
            'MD_settings', 'RF_parameters', 'RF_optimize', 'SVM_parameters',
            'SVM_optimize', 'PLSDA_parameters', 'PLSDA_optimize',
            'PLSR_parameters', 'PLSR_optimize', 'GNB_parameters'
        ]

        for ik in order:
            if ik in param.p:
                k = ik
                v = param.p[k]

                ivalue = ''
                idescr = ''
                ioptio = ''

                ## newest parameter formats are extended and contain
                ## rich metainformation for each entry
                if param.extended:
                    if 'value' in v:
                        if not isinstance(v['value'], dict):
                            ivalue = v['value']
                        else:
                            # print header of dictionaty
                            print(f'{k} :')

                            # iterate keys assuming existence of value and description
                            for intk in v['value']:
                                intv = v['value'][intk]

                                iivalue = ''
                                if "value" in intv:
                                    iivalue = intv["value"]

                                iidescr = ''
                                if "description" in intv and intv[
                                        "description"] is not None:
                                    iidescr = intv["description"]

                                iioptio = ''
                                if 'options' in intv:
                                    toptio = intv['options']

                                    if isinstance(toptio, list):
                                        if toptio != [None]:
                                            iioptio = f' {toptio}'

                                if isinstance(iivalue, float):
                                    iivalue = f'{iivalue:f}'
                                elif iivalue is None:
                                    iivalue = ''

                                print(
                                    f'   {intk:27} : {str(iivalue):30} #{iioptio} {iidescr}'
                                )

                            continue

                    if 'description' in v:
                        idescr = v['description']

                    if 'options' in v:
                        toptio = v['options']

                        if isinstance(toptio, list):
                            ioptio = f' {toptio}'

                ### compatibility: old stile parameters
                else:
                    if not isinstance(v, dict):
                        ivalue = v
                    else:
                        ivalue = '*dictionary*'
                ### end compatibility

                if isinstance(ivalue, float):
                    ivalue = f'{ivalue:f}'
                elif ivalue is None:
                    ivalue = ''

                print(f'{k:30} : {str(ivalue):30} #{ioptio} {idescr}')

        return True, 'parameters listed'
コード例 #11
0
ファイル: manage.py プロジェクト: phi-grib/flame
def action_refresh(model=None, version=None, GUI=False):
    '''
    Rebuild one or many models making use of existing parameter files and
    locally stored training series. 
    '''

    import flame.context as context
    from flame.parameters import Parameters
    # from flame.documentation import Documentation
    import logging

    if GUI:
        token_file = os.path.join(tempfile.gettempdir(), 'refreshing_' + model)
        # update token file with content 'working'
        with open(token_file, 'w') as f:
            f.write('Analyzing and sorting models...')

    # list endpoints relevant for the arguments
    if model is not None:
        model_list = [model]
    else:
        model_root = pathlib.Path(utils.model_repository_path())
        model_list = [x.stem for x in model_root.iterdir() if x.is_dir()]

    # list versions relevant for the arguments
    task_list = []
    for imodel in model_list:

        if version is not None:
            task_list.append((imodel, version))
        else:
            model_root = pathlib.Path(utils.model_tree_path(imodel))
            itask_list = [(imodel, utils.modeldir2ver(x.stem))
                          for x in model_root.iterdir() if x.is_dir()]
            task_list += itask_list  # use "+=" and not "append" to merge the new list with the old one

    # analize task_list and add at the end ensemble models
    # this is needed to have low models refreshed BEFORE refreshing the high models
    # eliminating the need to refresh them recursively
    LOG.info("Analyzing and sorting models...")

    # make sure the lower models are in task_list and, if not, force the inclussion
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_nams = param.getVal('ensemble_names')
            ens_vers = param.getVal('ensemble_versions')
            for i in range(len(ens_nams)):
                iver = 0
                inam = ens_nams[i]
                if (i < len(ens_vers)):
                    iver = ens_vers[i]
                if ((inam, iver)) not in task_list:
                    task_list.append((inam, iver))

    # create separate lists for regular and ensemble models
    # and add ensemble models at the end
    # this needs to be carried out after the previos step because
    # some of the lower level models could be an ensemble model
    # itself
    mol_list = []
    ens_list = []
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            mol_list.append(itask)
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_list.append(itask)
        else:
            mol_list.append(itask)

    task_list = mol_list + ens_list

    # show all models before stating
    LOG.info(
        "Starting model refreshing task for the following models and versions")
    for itask in task_list:
        LOG.info(f'   model: {itask[0]}   version: {itask[1]}')

    LOG.info("This can take some time, please be patient...")

    source_dir = os.path.dirname(os.path.abspath(__file__))
    children_dir = os.path.join(source_dir, 'children')
    master_parameters = os.path.join(children_dir, 'parameters.yaml')
    master_documentation = os.path.join(children_dir, 'documentation.yaml')

    # now send the build command for each task
    for itask in task_list:

        destinat_path = utils.model_path(itask[0], 0)  # dev

        if itask[1] != 0:
            # move version to /dev for building
            original_path = utils.model_path(itask[0], itask[1])  # veri
            security_path = destinat_path + '_security'  # dev_sec
            shutil.move(destinat_path, security_path)  # dev --> dev_sec
            shutil.move(original_path, destinat_path)  # veri --> dev

        LOG.info(
            f'   refreshing model: {itask[0]}   version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...'
        )
        if GUI:
            with open(token_file, 'w') as f:
                f.write(
                    f'model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})'
                )

        # dissable LOG output
        logging.disable(logging.ERROR)

        # update parameters
        dump_parameters = os.path.join(destinat_path, 'parameters_dump.yaml')
        success, param = action_parameters(itask[0], 0, oformat='bin')
        if success:
            param_yaml = param.dumpYAML()
            with open(dump_parameters, 'w') as f:
                for line in param_yaml:
                    f.write(line + '\n')
        else:
            LOG.info(
                '   ERROR: unable to merge parameters for model: {itask[0]}   version: {itask[1]}'
            )
            dump_parameters = None

        original_parameters = os.path.join(destinat_path, 'parameters.yaml')
        shutil.copy(master_parameters, original_parameters)

        #update documentation
        dump_documentation = os.path.join(destinat_path,
                                          'documentation_dump.yaml')
        success, documentation = action_documentation(itask[0],
                                                      0,
                                                      doc_file=None,
                                                      oformat='bin')

        original_documentation = os.path.join(destinat_path,
                                              'documentation.yaml')
        shutil.copy(master_documentation, original_documentation)

        if success:
            documentation_yaml = documentation.dumpYAML()
            with open(dump_documentation, 'w') as f:
                for line in documentation_yaml:
                    line = line.encode("ascii", "ignore")
                    line = line.decode("ascii", "ignore")
                    f.write(line + '\n')
            s2, documentation = action_documentation(itask[0],
                                                     0,
                                                     doc_file=None,
                                                     oformat='bin')
            s3, r3 = documentation.delta(itask[0], 0, dump_documentation)
        else:
            LOG.info(
                '   ERROR: unable to merge documentation for model: {itask[0]}   version: {itask[1]}'
            )

        # rebuild the model
        command_build = {
            'endpoint': itask[0],
            'infile': None,
            'param_file': dump_parameters,
            'incremental': False
        }

        success, results = context.build_cmd(command_build)

        # enable LOG output
        logging.disable(logging.NOTSET)

        if itask[1] != 0:
            shutil.move(destinat_path, original_path)  # dev --> veri
            shutil.move(security_path, destinat_path)  # dev_sec --> dev

        if not success:
            LOG.error(results)

    LOG.info("Model refreshing task finished")

    if GUI:
        # update token file with status 'ready'
        with open(token_file, 'w') as f:
            f.write('ready')

    return True, 'OK'
コード例 #12
0
def action_refresh(model=None, version=None):
    '''
    Rebuild one or many models making use of existing parameter files and
    locally stored training series. 
    '''

    import flame.context as context
    from flame.parameters import Parameters
    import logging

    # list endpoints relevant for the arguments
    if model is not None:
        model_list = [model]
    else:
        model_root = pathlib.Path(utils.model_repository_path())
        model_list = [x.stem for x in model_root.iterdir() if x.is_dir()]

    # list versions relevant for the arguments
    task_list = []
    for imodel in model_list:
        if version is not None:
            task_list.append((imodel, version))
        else:
            model_root = pathlib.Path(utils.model_tree_path(imodel))
            itask_list = [(imodel, utils.modeldir2ver(x.stem))
                          for x in model_root.iterdir() if x.is_dir()]
            task_list += itask_list  # use "+=" and not "append" to merge the new list with the old one

    # analize task_list and add at the end ensemble models
    # this is needed to have low models refreshed BEFORE refreshing the high models
    # eliminating the need to refresh them recursively
    LOG.info("Analyzing and sorting models...")

    # make sure the lower models are in task_list and, if not, force the inclussion
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_nams = param.getVal('ensemble_names')
            ens_vers = param.getVal('ensemble_versions')
            for i in range(len(ens_nams)):
                iver = 0
                inam = ens_nams[i]
                if (i < len(ens_vers)):
                    iver = ens_vers[i]
                if ((inam, iver)) not in task_list:
                    task_list.append((inam, iver))

    # create separate lists for regular and ensemble models
    # and add ensemble models at the end
    # this needs to be carried out after the previos step because
    # some of the lower level models could be an ensemble model
    # itself
    mol_list = []
    ens_list = []
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            mol_list.append(itask)
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_list.append(itask)
        else:
            mol_list.append(itask)

    task_list = mol_list + ens_list

    # show all models before stating
    LOG.info(
        "Starting model refreshing task for the following models and versions")
    for itask in task_list:
        LOG.info(f'   model: {itask[0]}   version: {itask[1]}')

    LOG.info("This can take some time, please be patient...")

    # now send the build command for each task
    for itask in task_list:

        if itask[1] != 0:
            # move version to /dev for building
            original_path = utils.model_path(itask[0], itask[1])  # veri
            destinat_path = utils.model_path(itask[0], 0)  # dev
            security_path = destinat_path + '_security'  # dev_sec
            shutil.move(destinat_path, security_path)  # dev --> dev_sec
            shutil.move(original_path, destinat_path)  # veri --> dev

        LOG.info(
            f'   refreshing model: {itask[0]}   version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...'
        )

        # dissable LOG output
        logging.disable(logging.ERROR)

        command_build = {
            'endpoint': itask[0],
            'infile': None,
            'param_file': None,
            'incremental': False
        }

        success, results = context.build_cmd(command_build)

        # enable LOG output
        logging.disable(logging.NOTSET)

        if itask[1] != 0:
            shutil.move(destinat_path, original_path)  # dev --> veri
            shutil.move(security_path, destinat_path)  # dev_sec --> dev

        if not success:
            LOG.error(results)

    LOG.info("Model refreshing task finished")

    return True, 'OK'
コード例 #13
0
def action_parameters(model, version=None, oformat='text'):
    ''' Returns an object with whole results info for a given model and version '''

    if model is None:
        return False, 'Empty model label'

    from flame.parameters import Parameters

    param = Parameters()
    success, results = param.loadYaml(model, version)

    if not success:
        print(f'error obtaining parametes for model {model} : {results}')
        return False, results

    if oformat != 'text':
        return True, param

    else:
        yaml = param.dumpYAML()
        for line in yaml:
            print(line)

        # order = ['input_type', 'quantitative', 'SDFile_activity', 'SDFile_name', 'SDFile_id',
        # 'SDFile_experimental', 'SDFile_complementary', 'normalize_method', 'ionize_method', 'convert3D_method',
        # 'computeMD_method', 'model', 'modelAutoscaling', 'tune', 'conformal',
        # 'conformalConfidence', 'ModelValidationCV', 'ModelValidationLC',
        # 'ModelValidationN', 'ModelValidationP', 'output_format', 'output_md', 'output_similar',
        # 'TSV_activity', 'TSV_objnames', 'TSV_varnames', 'imbalance',
        # 'feature_selection', 'feature_number', 'mol_batch',
        # 'ensemble_names','ensemble_versions',
        # 'similarity_metric', 'similarity_cutoff_num', 'similarity_cutoff_distance',
        # 'numCPUs', 'verbose_error', 'modelingToolkit',
        # 'endpoint', 'model_path',
        # #'md5',
        # 'version']

        # order += ['MD_settings', 'RF_parameters','RF_optimize',
        # 'SVM_parameters','SVM_optimize',
        # 'PLSDA_parameters','PLSDA_optimize',
        # 'PLSR_parameters','PLSR_optimize',
        # 'GNB_parameters']

        # for ik in order:
        #     if ik in param.p:
        #         k = ik
        #         v = param.p[k]

        #         ivalue = ''
        #         idescr = ''
        #         ioptio = ''

        #         ## newest parameter formats are extended and contain
        #         ## rich metainformation for each entry
        #         if param.extended:
        #             if 'value' in v:
        #                 if not isinstance(v['value'] ,dict):
        #                     ivalue = v['value']
        #                 else:
        #                     # print header of dictionaty
        #                     print (f'{k} :')

        #                     # iterate keys assuming existence of value and description
        #                     for intk in v['value']:
        #                         intv = v['value'][intk]

        #                         iivalue = ''
        #                         if "value" in intv:
        #                             iivalue = intv["value"]

        #                         iidescr = ''
        #                         if "description" in intv and intv["description"] is not None:
        #                             iidescr = intv["description"]

        #                         iioptio = ''
        #                         if 'options' in intv:
        #                             toptio = intv['options']

        #                             if isinstance(toptio, list):
        #                                 if toptio != [None]:
        #                                     iioptio = f' {toptio}'

        #                         if isinstance (iivalue, float):
        #                             iivalue =  f'{iivalue:f}'
        #                         elif iivalue is None:
        #                             iivalue = ''

        #                         print (f'   {intk:27} : {str(iivalue):30} #{iioptio} {iidescr}')

        #                     continue

        #             if 'description' in v:
        #                 idescr = v['description']

        #             if 'options' in v:
        #                 toptio = v['options']

        #                 if isinstance(toptio, list):
        #                     ioptio = f' {toptio}'

        #         ### compatibility: old stile parameters
        #         else:
        #             if not isinstance(v ,dict):
        #                 ivalue = v
        #             else:
        #                 ivalue = '*dictionary*'
        #         ### end compatibility

        #         if isinstance (ivalue, float):
        #             ivalue =  f'{ivalue:f}'
        #         elif ivalue is None:
        #             ivalue = ''

        #         print (f'{k:30} : {str(ivalue):30} #{ioptio} {idescr}')

        return True, 'parameters listed'