Beispiel #1
0
def run_model(hyper_parameter_map):
    instance_directory = hyper_parameter_map['instance_directory']
    os.chdir(instance_directory)
    global logger
    logger = log_tools.get_logger(logger, "MODEL RUNNER")
    obj_return = get_obj_return()
    result = run_pre(hyper_parameter_map)
    if result == ModelResult.ERROR:
        print("run_pre() returned ERROR!")
        exit(1)
    elif result == ModelResult.SKIP:
        log("run_pre() returned SKIP ...")
        sys.stdout.flush()
        return ("SKIP", "HISTORY_EMPTY")
    else:
        assert (result == ModelResult.SUCCESS)  # proceed...

    result, history = run(hyper_parameter_map, obj_return)
    runner_utils.write_output(result, instance_directory)
    runner_utils.write_output(
        json.dumps(history, cls=runner_utils.FromNPEncoder),
        instance_directory, 'history.txt')

    run_post(hyper_parameter_map, {})
    log("RUN STOP")
    return (result, history)
Beispiel #2
0
def run(hyper_parameter_map, callbacks):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    history = pkg.run(params, callbacks)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    result = val_loss[-1]
    print("result: ", result)
    return result
Beispiel #3
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        # Return a large number for nan and flip sign for val_corr
        if(obj_return == "val_loss"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = val_loss[-1]
        elif(obj_return == "val_corr"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = -val_loss[-1] #Note negative sign
        else:
            raise ValueError("Unsupported objective function (use obj_param to specify val_corr or val_loss): {}".format(framework))

        print("result: " + str(result))
    return result
Beispiel #4
0
def run(hyper_parameter_map, obj_return):
    start = time.time()
    global logger
    logger = log_tools.get_logger(logger, 'MODEL RUNNER')

    log("START:")
    sys.stdout.flush()

    directory = hyper_parameter_map['instance_directory']
    os.chdir(directory)

    with open(directory + '/rank.txt', 'w') as fp:
        fp.write(str(os.getenv('ADLB_RANK_SELF')) + '\n')

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    params_arg = {}
    if 'config_file' in hyper_parameter_map:
        config_file = hyper_parameter_map['config_file']
        logger.info('specified config_file: "%s"' % config_file)
        params_arg = {'default_model': config_file}

    # params is a python dictionary
    params = setup_params(pkg, hyper_parameter_map, params_arg)

    Ps = setup_perf(params)

    # Run the model!
    history = pkg.run(params)

    if framework == 'keras':
        runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    history_result = {}
    if history != None:
        result, history_result = get_results(history, obj_return)

    stop_perf(Ps)

    finish = time.time()
    duration = finish - start
    log(" DONE: run_id %s in %0.2f seconds." %
        (hyper_parameter_map["run_id"], duration))
    return (result, history_result)
Beispiel #5
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        result = val_loss[-1]
    print("result: " + result)
    return result
Beispiel #6
0
def run(hyper_parameter_map, obj_param):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if (k == "dense"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "dense_feature_layers"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    # val_loss = history.history['val_loss']
    # result = val_loss[-1]
    result = history
    print("result: ", result)
    return result
Beispiel #7
0
    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    # val_loss = history.history['val_loss']
    # result = val_loss[-1]
    result = history
    print("result: ", result)
    return result


# Usage: see how sys.argv is unpacked below:
if __name__ == '__main__':
    logger = log_tools.get_logger(logger, __name__)
    logger.debug("RUN START")

    (
        _,  # The Python program name (unused)
        param_string,
        instance_directory,
        framework,
        runid,
        obj_param,
        benchmark_timeout) = sys.argv

    hyper_parameter_map = runner_utils.init(param_string, instance_directory,
                                            framework, 'save')
    hyper_parameter_map['model_name'] = os.getenv("MODEL_NAME")
    hyper_parameter_map['experiment_id'] = os.getenv("EXPID")
Beispiel #8
0
    if math.isnan(result):
        if obj_return == "val_corr" or obj_return == "val_dice_coef":
            # Return the negative result
            result = -result
        else:
            # Just return a large number
            result = 999999999

    print("result: " + obj_return + ": " + str(result))
    history_result = history.history.copy()
    return result, history_result


# Usage: see how sys.argv is unpacked below:
if __name__ == '__main__':
    logger = log_tools.get_logger(logger, "MODEL_RUNNER")
    log("RUN START")

    (
        _,  # The Python program name (unused)
        param_string,
        instance_directory,
        framework,
        runid,
        benchmark_timeout) = sys.argv

    hyper_parameter_map = runner_utils.init(param_string,
                                            instance_directory,
                                            framework,
                                            out_dir_key='save')
    hyper_parameter_map['model_name'] = os.getenv("MODEL_NAME")