Exemplo n.º 1
0
def run(hyper_parameter_map):

    logger = get_logger()
    logger.debug("run()...")

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v
        try:
            runner_utils.write_params(params, hyper_parameter_map)
            history = pkg.run(params)
            runner_utils.keras_clear_session(framework)
            # use the last validation_loss as the value to minimize
            val_loss = history.history['val_loss']
            result = val_loss[-1]
        except:
            print('benchmark failed')
            result = 1.797693e+308

    print("result: ", result)
    return result
Exemplo n.º 2
0
def run(hyper_parameter_map):
    framework = hyper_parameter_map['framework']
    if framework == 'keras':
        import p3b1_baseline_keras2
        pkg = p3b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))

    # params is python dictionary
    params = pkg.initialize_parameters()
    runner_utils.format_params(hyper_parameter_map)

    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    avg_loss = pkg.do_n_fold(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    return avg_loss
Exemplo n.º 3
0
def run(hyper_parameter_map):
    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    history = pkg.run(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    return val_loss[-1]
Exemplo n.º 4
0
def run(hyper_parameter_map, callbacks):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    runner_utils.write_params(params, hyper_parameter_map)
    history = pkg.run(params, callbacks)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    val_loss = history.history['val_loss']
    result = val_loss[-1]
    print("result: ", result)
    return result
Exemplo n.º 5
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        # Return a large number for nan and flip sign for val_corr
        if(obj_return == "val_loss"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = val_loss[-1]
        elif(obj_return == "val_corr"):
            if(math.isnan(val_loss[-1])):
                result = 999999999
            else:
                result = -val_loss[-1] #Note negative sign
        else:
            raise ValueError("Unsupported objective function (use obj_param to specify val_corr or val_loss): {}".format(framework))

        print("result: " + str(result))
    return result
Exemplo n.º 6
0
def run(hyper_parameter_map, obj_param):

    logger = get_logger()

    framework = hyper_parameter_map['framework']
    logger.debug("IMPORT START " + str(time.time()))
    if framework == 'keras':
        import p1b1_baseline_keras2
        pkg = p1b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))
    logger.debug("IMPORT STOP")

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    # use the last validation_loss as the value to minimize
    obj_corr = history.history['val_corr']
    obj_loss = history.history['val_loss']
    if (obj_param == "val_loss"):
        if (math.isnan(obj_loss[-1]) or math.isnan(obj_corr[-1])):
            last_val = 999999999
        else:
            last_val = obj_loss[-1]
    elif (obj_param == "val_corr"):
        if (math.isnan(obj_loss[-1]) or math.isnan(obj_corr[-1])):
            last_val = 999999999
        else:
            last_val = -obj_corr[-1]  #Note negative sign
    else:
        raise ValueError(
            "Unsupported objective function (use obj_param to specify val_corr or val_loss): {}"
            .format(framework))

    return last_val
Exemplo n.º 7
0
def run(hyper_parameter_map, obj_return):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k,v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if(k=="dense"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="dense_feature_layers"):
            if(type(v) != list):
                v=v.split(" ")
            v = [int(i) for i in v]
        if(k=="cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # Default result if there is no val_loss (as in infer.py)
    result = 0
    if history != None:
        # Return the history entry that the user requested.
        val_loss = history.history[obj_return]
        result = val_loss[-1]
    print("result: " + result)
    return result
Exemplo n.º 8
0
def run(hyper_parameter_map, obj_param):

    global logger
    logger = log_tools.get_logger(logger, __name__)

    framework = hyper_parameter_map['framework']
    model_name = hyper_parameter_map['model_name']
    pkg = import_pkg(framework, model_name)

    runner_utils.format_params(hyper_parameter_map)

    # params is python dictionary
    params = pkg.initialize_parameters()
    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        if (k == "dense"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "dense_feature_layers"):
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if (k == "cell_features"):
            cp_str = v
            v = list()
            v.append(cp_str)
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")

    history = pkg.run(params)

    runner_utils.keras_clear_session(framework)

    # use the last validation_loss as the value to minimize
    # val_loss = history.history['val_loss']
    # result = val_loss[-1]
    result = history
    print("result: ", result)
    return result
Exemplo n.º 9
0
def setup_params(pkg, hyper_parameter_map, params_arg):
    params = pkg.initialize_parameters(**params_arg)
    log("PARAM UPDATE START")
    for k, v in hyper_parameter_map.items():
        if k == "dense" or k == "dense_feature_layers":
            if (type(v) != list):
                v = v.split(" ")
            v = [int(i) for i in v]
        if k == "cell_features":
            cp_str = v
            v = list()
            v.append(cp_str)
        log(str(k) + " = " + str(v))
        params[k] = v
    log("PARAM UPDATE STOP")

    log("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    log("WRITE_PARAMS STOP")
    return params
Exemplo n.º 10
0
def run(hyper_parameter_map):

    logger = get_logger()
    framework = hyper_parameter_map['framework']
    logger.debug("IMPORT START " + str(time.time()))
    if framework == 'keras':
        import p3b1_baseline_keras2
        pkg = p3b1_baseline_keras2
    else:
        raise ValueError("Unsupported framework: {}".format(framework))
    logger.debug("IMPORT STOP")

    # params is python dictionary
    params = pkg.initialize_parameters()
    runner_utils.format_params(hyper_parameter_map)

    for k, v in hyper_parameter_map.items():
        #if not k in params:
        #    raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
        params[k] = v

    logger.debug("WRITE_PARAMS START")
    runner_utils.write_params(params, hyper_parameter_map)
    logger.debug("WRITE_PARAMS STOP")
    logger.debug("DO_N_FOLD START " + str(time.time()))
    avg_loss = pkg.do_n_fold(params, verbose=0)
    logger.debug("DO_N_FOLD STOP")

    if framework == 'keras':
        # works around this error:
        # https://github.com/tensorflow/tensorflow/issues/3388
        try:
            from keras import backend as K
            K.clear_session()
        except AttributeError:  # theano does not have this function
            pass

    return avg_loss