Beispiel #1
0
def evaluate_model(args):
    for f in args.files:
        try:
            path = f if os.path.isabs(f) else os.path.abspath(f)
            description = load_description_file(path)

            _print_heading("TESTING MODEL: {0}/{1}".format(description['name'], description['session']))
            module_logger.info(f"file: {path}")
            module_logger.info("test data source: ")
            for k, v in description['source'].items():
                if not isinstance(v, str) and v.__iter__:
                    module_logger.info("   {0}: ".format(k))
                    for x in v:
                        module_logger.info("   - {0}".format(x))
                else:
                    module_logger.info("   {0}: {1}".format(k, v))
            module_logger.info("")

            description['@mode'] = 'evaluate'
            manager = create_workflow_from_file(path, overrides={"@mode": "evaluate"})
            result = manager.run()

            # NOTE:
            #   score is evaluation of given loss function
            #   accuracy is accuracy of given function

            for k, v in result.__dict__.items():
                module_logger.info(f"{k}: {v}")

        except Exception as e:
            module_logger.error(e)
Beispiel #2
0
def export_data(args):
    import os
    import pandas as pd

    overrides = {
        "@mode": "export",
        "@pipelinePrimary": True if (args.pipelinePrimary or args.full) else False,
        "@pipelineSecondary": True if args.full else False
    }

    suffix = "source"

    if overrides['@pipelinePrimary']:
        suffix = "primary"

    if overrides['@pipelineSecondary']:
        suffix = "full"

    for f in args.files:
        _print_heading("EXPORT")
        desc_file = f if os.path.isabs(f) else os.path.abspath(f)
        desc_file = Path(desc_file)
        result: pd.DataFrame = create_workflow_from_file(desc_file, overrides=overrides).run()
        output_path = os.path.splitext(desc_file)[0] + f"__{suffix}.csv"
        module_logger.info(f"writing csv: {output_path}")
        result.to_csv(output_path)
Beispiel #3
0
def integrate_model(args):
    f = args.file
    path = f if os.path.isabs(f) else os.path.abspath(f)
    description = load_description_file(path)
    _print_heading(f"INTEGRATE MODEL: {description['name']}/{description['session']}")
    module_logger.info(f"file: {path}")
    manager = create_workflow_from_file(path, overrides={"@mode": "integrate"})
    manager.run()
Beispiel #4
0
 def test_analyze(self):
     path_to_file = dir_mlpipe / "test" / "workflows" / "example.training.yml"
     manager = create_workflow_from_file(path_to_file,
                                         overrides={"@mode": "analyze"})
     result = manager.run()
     data = simplejson.dumps(result,
                             ignore_nan=True,
                             default=lambda o: o.__dict__)
Beispiel #5
0
 def test_training(self):
     path_to_file = dir_mlpipe / "test" / "workflows" / "example.training.yml"
     manager = create_workflow_from_file(path_to_file,
                                         overrides={"@mode": "train"})
     train_dir, _model = manager.run()
     print(train_dir, _model)
     global _name_session
     _name_session = train_dir.split("/")[-2], train_dir.split("/")[-1]
Beispiel #6
0
def train_model(args):
    from mlpipe.dsl_interpreter.interpreter import create_workflow_from_file

    for f in args.files:
        path = f if os.path.isabs(f) else os.path.abspath(f)
        description = load_description_file(path)
        _print_heading(f"TRAINING MODEL: {description['name']}")
        module_logger.info(f"file: {path}")
        manager = create_workflow_from_file(path, overrides={"@mode": "train"})
        path_training_dir, model = manager.run()
        p = Path(path_training_dir)
        model_name, session_id = p.parent.name, p.name
        module_logger.info(f"model/session_id: {model_name}/{session_id}")
        module_logger.info(f"trained model: {path_training_dir}")
Beispiel #7
0
def analyze_data(args):
    import simplejson
    import os

    for f in args.files:
        _print_heading("ANALYZE")
        desc_file = f if os.path.isabs(f) else os.path.abspath(f)
        result: AnalyticsResult = create_workflow_from_file(desc_file, overrides={"@mode": "analyze"}).run()
        data = simplejson.dumps(result, ignore_nan=True, default=lambda o: o.__dict__)
        file_basename = '.'.join(basename(desc_file).split('.')[:-1])
        output_folder = Path(os.path.dirname(desc_file))
        output_file = output_folder / f"report_{file_basename}.html"
        output_file_json = output_folder / f"report_{file_basename}.json"
        print(f"report written: {output_file}")
        generate_html_report(json_str=data, output_path=output_file)

        if args.json:
            print(f"json file written: {output_file_json}")
            write_text_file(output_file_json, data)