Ejemplo n.º 1
0
def action_dir():
    '''
    Returns a JSON with the list of spaces and versions
    '''
    # get de space repo path
    spaces_path = pathlib.Path(utils.space_repository_path())

    # get directories in space repo path
    dirs = [x for x in spaces_path.iterdir() if x.is_dir()]

    # if dir contains dev/ -> is space (NAIVE APPROACH)
    # get last dir name [-1]: space name
    space_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))]

    results = []
    for ispace in space_dirs:
        idict = {}
        idict["spacename"] = ispace
        versions = [0]

        for iversion in os.listdir(utils.space_tree_path(ispace)):
            if iversion.startswith('ver'):
                versions.append(utils.modeldir2ver(iversion))

        idict["versions"] = versions
        results.append(idict)

    # print (json.dumps(results))
    return True, json.dumps(results)
Ejemplo n.º 2
0
def action_dir():
    '''
    Returns a list of models and versions
    '''
    # get de model repo path
    models_path = pathlib.Path(utils.model_repository_path())
    if models_path.is_dir() is False:
        return False, 'the model repository path does not exist. Please run "flame -c config".'

    # get directories in model repo path
    dirs = [x for x in models_path.iterdir() if x.is_dir()]

    # if dir contains dev/ -> is model (NAIVE APPROACH)
    # get last dir name [-1]: model name
    model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))]

    label_defaults = {
        'maturity': 'dev',
        'type': 'unk',
        'subtype': 'unk',
        'endpoint': 'unk',
        'species': 'unk'
    }
    results = []
    for imodel in model_dirs:
        idict = {}
        idict["modelname"] = imodel
        idict["version"] = 0
        idict["info"] = action_info(imodel, 0, output=None)[1]
        success, label = action_label(imodel, 0, oformat='object')
        if success:
            idict["label"] = label
        else:
            idict["label"] = label_defaults

        results.append(idict)

        for iversion in os.listdir(utils.model_tree_path(imodel)):
            if iversion.startswith('ver'):
                idict = {}
                idict["modelname"] = imodel
                idict["version"] = utils.modeldir2ver(iversion)
                idict["info"] = action_info(imodel,
                                            idict["version"],
                                            output=None)[1]
                success, label = action_label(imodel,
                                              idict["version"],
                                              oformat='object')
                if success:
                    idict["label"] = label
                else:
                    idict["label"] = label_defaults
                results.append(idict)

    # print (results)
    return True, results
Ejemplo n.º 3
0
def action_dir():
    '''
    Returns a the list of spaces and versions
    '''
    # get de space repo path
    spaces_path = pathlib.Path(utils.space_repository_path())
    if spaces_path.is_dir() is False:
        return False, 'the spaces repository path does not exist. Please run "flame -c config".'

    # get directories in space repo path
    dirs = [x for x in spaces_path.iterdir() if x.is_dir()]

    # if dir contains dev/ -> is space (NAIVE APPROACH)
    # get last dir name [-1]: space name
    space_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))]

    # results = []
    # for ispace in space_dirs:
    #     idict = {}
    #     idict ["spacename"] = ispace
    #     versions = [0]

    #     for iversion in os.listdir(utils.space_tree_path(ispace)):
    #         if iversion.startswith('ver'):
    #             versions.append(utils.modeldir2ver(iversion))

    #     idict ["versions"] = versions
    #     results.append(idict)

    results = []
    for ispace in space_dirs:
        idict = {}
        idict["spacename"] = ispace
        idict["version"] = 0
        idict["info"] = action_info(ispace, 0, output=None)[1]

        results.append(idict)

        for iversion in os.listdir(utils.space_tree_path(ispace)):
            if iversion.startswith('ver'):
                idict = {}
                idict["spacename"] = ispace
                idict["version"] = utils.modeldir2ver(iversion)
                idict["info"] = action_info(ispace,
                                            idict["version"],
                                            output=None)[1]

                results.append(idict)

    # print (json.dumps(results))
    return True, results
Ejemplo n.º 4
0
def action_report():
    '''
    Returns a list of models and the results of each one
    '''
    # get de model repo path
    models_path = pathlib.Path(utils.model_repository_path())

    # get directories in model repo path
    dirs = [x for x in models_path.iterdir() if x.is_dir()]

    # # if dir contains dev/ -> is model (NAIVE APPROACH)
    # # get last dir name [-1]: model name
    # model_dirs = [d.parts[-1] for d in dirs if list(d.glob('dev'))]

    results = []

    # iterate models
    for d in dirs:
        imodel_name = d.parts[-1]
        imodel_vers = [x.parts[-1] for x in d.iterdir() if x.is_dir()]

        # make sure the model contains 'dev' to recognize models
        if 'dev' not in imodel_vers:
            continue

        imodel_vers_info = []
        for ivtag in imodel_vers:

            iver = utils.modeldir2ver(ivtag)

            # now we have the model name and version, try to get the info
            try:
                isuccess, iresult = action_info(imodel_name,
                                                iver,
                                                output='bin')
            except:
                continue

            if not isuccess:
                continue

            # build a tuple (version, object) for each version and append
            imodel_vers_info.append((iver, iresult))

        # build a tuple (model_name, [version_info]) for each model and append
        results.append((imodel_name, imodel_vers_info))

    return True, results
Ejemplo n.º 5
0
def action_refresh(model=None, version=None, GUI=False):
    '''
    Rebuild one or many models making use of existing parameter files and
    locally stored training series. 
    '''

    import flame.context as context
    from flame.parameters import Parameters
    # from flame.documentation import Documentation
    import logging

    if GUI:
        token_file = os.path.join(tempfile.gettempdir(), 'refreshing_' + model)
        # update token file with content 'working'
        with open(token_file, 'w') as f:
            f.write('Analyzing and sorting models...')

    # list endpoints relevant for the arguments
    if model is not None:
        model_list = [model]
    else:
        model_root = pathlib.Path(utils.model_repository_path())
        model_list = [x.stem for x in model_root.iterdir() if x.is_dir()]

    # list versions relevant for the arguments
    task_list = []
    for imodel in model_list:

        if version is not None:
            task_list.append((imodel, version))
        else:
            model_root = pathlib.Path(utils.model_tree_path(imodel))
            itask_list = [(imodel, utils.modeldir2ver(x.stem))
                          for x in model_root.iterdir() if x.is_dir()]
            task_list += itask_list  # use "+=" and not "append" to merge the new list with the old one

    # analize task_list and add at the end ensemble models
    # this is needed to have low models refreshed BEFORE refreshing the high models
    # eliminating the need to refresh them recursively
    LOG.info("Analyzing and sorting models...")

    # make sure the lower models are in task_list and, if not, force the inclussion
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_nams = param.getVal('ensemble_names')
            ens_vers = param.getVal('ensemble_versions')
            for i in range(len(ens_nams)):
                iver = 0
                inam = ens_nams[i]
                if (i < len(ens_vers)):
                    iver = ens_vers[i]
                if ((inam, iver)) not in task_list:
                    task_list.append((inam, iver))

    # create separate lists for regular and ensemble models
    # and add ensemble models at the end
    # this needs to be carried out after the previos step because
    # some of the lower level models could be an ensemble model
    # itself
    mol_list = []
    ens_list = []
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            mol_list.append(itask)
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_list.append(itask)
        else:
            mol_list.append(itask)

    task_list = mol_list + ens_list

    # show all models before stating
    LOG.info(
        "Starting model refreshing task for the following models and versions")
    for itask in task_list:
        LOG.info(f'   model: {itask[0]}   version: {itask[1]}')

    LOG.info("This can take some time, please be patient...")

    source_dir = os.path.dirname(os.path.abspath(__file__))
    children_dir = os.path.join(source_dir, 'children')
    master_parameters = os.path.join(children_dir, 'parameters.yaml')
    master_documentation = os.path.join(children_dir, 'documentation.yaml')

    # now send the build command for each task
    for itask in task_list:

        destinat_path = utils.model_path(itask[0], 0)  # dev

        if itask[1] != 0:
            # move version to /dev for building
            original_path = utils.model_path(itask[0], itask[1])  # veri
            security_path = destinat_path + '_security'  # dev_sec
            shutil.move(destinat_path, security_path)  # dev --> dev_sec
            shutil.move(original_path, destinat_path)  # veri --> dev

        LOG.info(
            f'   refreshing model: {itask[0]}   version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...'
        )
        if GUI:
            with open(token_file, 'w') as f:
                f.write(
                    f'model: {itask[0]} version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})'
                )

        # dissable LOG output
        logging.disable(logging.ERROR)

        # update parameters
        dump_parameters = os.path.join(destinat_path, 'parameters_dump.yaml')
        success, param = action_parameters(itask[0], 0, oformat='bin')
        if success:
            param_yaml = param.dumpYAML()
            with open(dump_parameters, 'w') as f:
                for line in param_yaml:
                    f.write(line + '\n')
        else:
            LOG.info(
                '   ERROR: unable to merge parameters for model: {itask[0]}   version: {itask[1]}'
            )
            dump_parameters = None

        original_parameters = os.path.join(destinat_path, 'parameters.yaml')
        shutil.copy(master_parameters, original_parameters)

        #update documentation
        dump_documentation = os.path.join(destinat_path,
                                          'documentation_dump.yaml')
        success, documentation = action_documentation(itask[0],
                                                      0,
                                                      doc_file=None,
                                                      oformat='bin')

        original_documentation = os.path.join(destinat_path,
                                              'documentation.yaml')
        shutil.copy(master_documentation, original_documentation)

        if success:
            documentation_yaml = documentation.dumpYAML()
            with open(dump_documentation, 'w') as f:
                for line in documentation_yaml:
                    line = line.encode("ascii", "ignore")
                    line = line.decode("ascii", "ignore")
                    f.write(line + '\n')
            s2, documentation = action_documentation(itask[0],
                                                     0,
                                                     doc_file=None,
                                                     oformat='bin')
            s3, r3 = documentation.delta(itask[0], 0, dump_documentation)
        else:
            LOG.info(
                '   ERROR: unable to merge documentation for model: {itask[0]}   version: {itask[1]}'
            )

        # rebuild the model
        command_build = {
            'endpoint': itask[0],
            'infile': None,
            'param_file': dump_parameters,
            'incremental': False
        }

        success, results = context.build_cmd(command_build)

        # enable LOG output
        logging.disable(logging.NOTSET)

        if itask[1] != 0:
            shutil.move(destinat_path, original_path)  # dev --> veri
            shutil.move(security_path, destinat_path)  # dev_sec --> dev

        if not success:
            LOG.error(results)

    LOG.info("Model refreshing task finished")

    if GUI:
        # update token file with status 'ready'
        with open(token_file, 'w') as f:
            f.write('ready')

    return True, 'OK'
Ejemplo n.º 6
0
def action_refresh(model=None, version=None):
    '''
    Rebuild one or many models making use of existing parameter files and
    locally stored training series. 
    '''

    import flame.context as context
    from flame.parameters import Parameters
    import logging

    # list endpoints relevant for the arguments
    if model is not None:
        model_list = [model]
    else:
        model_root = pathlib.Path(utils.model_repository_path())
        model_list = [x.stem for x in model_root.iterdir() if x.is_dir()]

    # list versions relevant for the arguments
    task_list = []
    for imodel in model_list:
        if version is not None:
            task_list.append((imodel, version))
        else:
            model_root = pathlib.Path(utils.model_tree_path(imodel))
            itask_list = [(imodel, utils.modeldir2ver(x.stem))
                          for x in model_root.iterdir() if x.is_dir()]
            task_list += itask_list  # use "+=" and not "append" to merge the new list with the old one

    # analize task_list and add at the end ensemble models
    # this is needed to have low models refreshed BEFORE refreshing the high models
    # eliminating the need to refresh them recursively
    LOG.info("Analyzing and sorting models...")

    # make sure the lower models are in task_list and, if not, force the inclussion
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_nams = param.getVal('ensemble_names')
            ens_vers = param.getVal('ensemble_versions')
            for i in range(len(ens_nams)):
                iver = 0
                inam = ens_nams[i]
                if (i < len(ens_vers)):
                    iver = ens_vers[i]
                if ((inam, iver)) not in task_list:
                    task_list.append((inam, iver))

    # create separate lists for regular and ensemble models
    # and add ensemble models at the end
    # this needs to be carried out after the previos step because
    # some of the lower level models could be an ensemble model
    # itself
    mol_list = []
    ens_list = []
    for itask in task_list:
        param = Parameters()
        success, results = param.loadYaml(itask[0], itask[1])

        if not success:
            mol_list.append(itask)
            continue

        if param.getVal('input_type') == 'model_ensemble':
            ens_list.append(itask)
        else:
            mol_list.append(itask)

    task_list = mol_list + ens_list

    # show all models before stating
    LOG.info(
        "Starting model refreshing task for the following models and versions")
    for itask in task_list:
        LOG.info(f'   model: {itask[0]}   version: {itask[1]}')

    LOG.info("This can take some time, please be patient...")

    # now send the build command for each task
    for itask in task_list:

        if itask[1] != 0:
            # move version to /dev for building
            original_path = utils.model_path(itask[0], itask[1])  # veri
            destinat_path = utils.model_path(itask[0], 0)  # dev
            security_path = destinat_path + '_security'  # dev_sec
            shutil.move(destinat_path, security_path)  # dev --> dev_sec
            shutil.move(original_path, destinat_path)  # veri --> dev

        LOG.info(
            f'   refreshing model: {itask[0]}   version: {itask[1]} ({task_list.index(itask)+1} of {len(task_list)})...'
        )

        # dissable LOG output
        logging.disable(logging.ERROR)

        command_build = {
            'endpoint': itask[0],
            'infile': None,
            'param_file': None,
            'incremental': False
        }

        success, results = context.build_cmd(command_build)

        # enable LOG output
        logging.disable(logging.NOTSET)

        if itask[1] != 0:
            shutil.move(destinat_path, original_path)  # dev --> veri
            shutil.move(security_path, destinat_path)  # dev_sec --> dev

        if not success:
            LOG.error(results)

    LOG.info("Model refreshing task finished")

    return True, 'OK'