Ejemplo n.º 1
0
def run(hyper_parameter_map):
    framework = hyper_parameter_map['framework']
    if framework == 'keras':
        import p3b1_baseline_keras2
        pkg = p3b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))

    # params is python dictionary
    params = pkg.initialize_parameters()
    runner_utils.format_params(hyper_parameter_map)

    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    avg_loss = pkg.do_n_fold(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    return avg_loss
Ejemplo n.º 2
0
def run(hyper_parameter_map):

    logger = get_logger()
    logger.debug("run()...")

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v
        try:
            runner_utils.write_params(params, hyper_parameter_map)
            history = pkg.run(params)
            runner_utils.keras_clear_session(framework)
            # use the last validation_loss as the value to minimize
            val_loss = history.history['val_loss']
            result = val_loss[-1]
        except:
            print('benchmark failed')
            result = 1.797693e+308

    print("result: ", result)
    return result
Ejemplo n.º 3
0
def run(hyper_parameter_map):
    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    history = pkg.run(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    return val_loss[-1]
Ejemplo n.º 4
0
def run(hyper_parameter_map, callbacks):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    history = pkg.run(params, callbacks)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    result = val_loss[-1]
    print("result: ", result)
    return result
Ejemplo n.º 5
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        # Return a large number for nan and flip sign for val_corr
        if(obj_return == "val_loss"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = val_loss[-1]
        elif(obj_return == "val_corr"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = -val_loss[-1] #Note negative sign
        else:
            raise ValueError("Unsupported objective function (use obj_param to specify val_corr or val_loss): {}".format(framework))

        print("result: " + str(result))
    return result
Ejemplo n.º 6
0
def run(hyper_parameter_map, obj_param):

    logger = get_logger()

    framework = hyper_parameter_map['framework']
    logger.debug("IMPORT START " + str(time.time()))
    if framework == 'keras':
        import p1b1_baseline_keras2
        pkg = p1b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))
    logger.debug("IMPORT STOP")

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    # use the last validation_loss as the value to minimize
    obj_corr = history.history['val_corr']
    obj_loss = history.history['val_loss']
    if (obj_param == "val_loss"):
        if (math.isnan(obj_loss[-1]) or math.isnan(obj_corr[-1])):
            last_val = 999999999
        else:
            last_val = obj_loss[-1]
    elif (obj_param == "val_corr"):
        if (math.isnan(obj_loss[-1]) or math.isnan(obj_corr[-1])):
            last_val = 999999999
        else:
            last_val = -obj_corr[-1]  #Note negative sign
    else:
        raise ValueError(
            "Unsupported objective function (use obj_param to specify val_corr or val_loss): {}"
            .format(framework))

    return last_val
Ejemplo n.º 7
0
def run(hyper_parameter_map, obj_return):
    start = time.time()
    global logger
    logger = log_tools.get_logger(logger, 'MODEL RUNNER')

    log("START:")
    sys.stdout.flush()

    directory = hyper_parameter_map['instance_directory']
    os.chdir(directory)

    with open(directory + '/rank.txt', 'w') as fp:
        fp.write(str(os.getenv('ADLB_RANK_SELF')) + '\n')

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    params_arg = {}
    if 'config_file' in hyper_parameter_map:
        config_file = hyper_parameter_map['config_file']
        logger.info('specified config_file: "%s"' % config_file)
        params_arg = {'default_model': config_file}

    # params is a python dictionary
    params = setup_params(pkg, hyper_parameter_map, params_arg)

    Ps = setup_perf(params)

    # Run the model!
    history = pkg.run(params)

    if framework == 'keras':
        runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    history_result = {}
    if history != None:
        result, history_result = get_results(history, obj_return)

    stop_perf(Ps)

    finish = time.time()
    duration = finish - start
    log(" DONE: run_id %s in %0.2f seconds." %
        (hyper_parameter_map["run_id"], duration))
    return (result, history_result)
Ejemplo n.º 8
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        result = val_loss[-1]
    print("result: " + result)
    return result
Ejemplo n.º 9
0
def run(hyper_parameter_map, obj_param):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if (k == "dense"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "dense_feature_layers"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    # val_loss = history.history['val_loss']
    # result = val_loss[-1]
    result = history
    print("result: ", result)
    return result
Ejemplo n.º 10
0
def run(hyper_parameter_map):
    framework = hyper_parameter_map['framework']
    if framework is 'keras':
        import p1b1_baseline_keras2
        pkg = p1b1_baseline_keras2
    elif framework is 'mxnet':
        import p1b1_baseline_mxnet
        pkg = p1b1_baseline_mxnet
    elif framework is 'neon':
        import p1b1_baseline_neon
        pkg = p1b1_baseline_neon
    else:
        raise ValueError("Invalid framework: {}".format(framework))

    # params is python dictionary
    params = pkg.initialize_parameters()
    runner_utils.format_params(hyper_parameter_map)

    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    print(params)
    history = pkg.run(params)

    if framework is 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    return val_loss[-1]
Ejemplo n.º 11
0
def run(hyper_parameter_map):

    logger = get_logger()
    framework = hyper_parameter_map['framework']
    logger.debug("IMPORT START " + str(time.time()))
    if framework == 'keras':
        import p3b1_baseline_keras2
        pkg = p3b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))
    logger.debug("IMPORT STOP")

    # params is python dictionary
    params = pkg.initialize_parameters()
    runner_utils.format_params(hyper_parameter_map)

    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")
    logger.debug("DO_N_FOLD START " + str(time.time()))
    avg_loss = pkg.do_n_fold(params, verbose=0)
    logger.debug("DO_N_FOLD STOP")

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    return avg_loss