def run(hyper_parameter_map): logger = get_logger() logger.debug("run()...") framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) # params is python dictionary params = pkg.initialize_parameters() for k, v in hyper_parameter_map.items(): #if not k in params: # raise Exception("Parameter '{}' not found in set of valid arguments".format(k)) params[k] = v try: runner_utils.write_params(params, hyper_parameter_map) history = pkg.run(params) runner_utils.keras_clear_session(framework) # use the last validation_loss as the value to minimize val_loss = history.history['val_loss'] result = val_loss[-1] except: print('benchmark failed') result = 1.797693e+308 print("result: ", result) return result
def run(hyper_parameter_map, callbacks): global logger logger = log_tools.get_logger(logger, __name__) framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) # params is python dictionary params = pkg.initialize_parameters() for k, v in hyper_parameter_map.items(): #if not k in params: # raise Exception("Parameter '{}' not found in set of valid arguments".format(k)) params[k] = v runner_utils.write_params(params, hyper_parameter_map) history = pkg.run(params, callbacks) runner_utils.keras_clear_session(framework) # use the last validation_loss as the value to minimize val_loss = history.history['val_loss'] result = val_loss[-1] print("result: ", result) return result
def run(hyper_parameter_map, obj_return): global logger logger = log_tools.get_logger(logger, __name__) framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) # params is python dictionary params = pkg.initialize_parameters() for k,v in hyper_parameter_map.items(): #if not k in params: # raise Exception("Parameter '{}' not found in set of valid arguments".format(k)) if(k=="dense"): if(type(v) != list): v=v.split(" ") v = [int(i) for i in v] if(k=="dense_feature_layers"): if(type(v) != list): v=v.split(" ") v = [int(i) for i in v] if(k=="cell_features"): cp_str = v v = list() v.append(cp_str) params[k] = v logger.debug("WRITE_PARAMS START") runner_utils.write_params(params, hyper_parameter_map) logger.debug("WRITE_PARAMS STOP") history = pkg.run(params) runner_utils.keras_clear_session(framework) # Default result if there is no val_loss (as in infer.py) result = 0 if history != None: # Return the history entry that the user requested. val_loss = history.history[obj_return] # Return a large number for nan and flip sign for val_corr if(obj_return == "val_loss"): if(math.isnan(val_loss[-1])): result = 999999999 else: result = val_loss[-1] elif(obj_return == "val_corr"): if(math.isnan(val_loss[-1])): result = 999999999 else: result = -val_loss[-1] #Note negative sign else: raise ValueError("Unsupported objective function (use obj_param to specify val_corr or val_loss): {}".format(framework)) print("result: " + str(result)) return result
def run(hyper_parameter_map, obj_return): start = time.time() global logger logger = log_tools.get_logger(logger, 'MODEL RUNNER') log("START:") sys.stdout.flush() directory = hyper_parameter_map['instance_directory'] os.chdir(directory) with open(directory + '/rank.txt', 'w') as fp: fp.write(str(os.getenv('ADLB_RANK_SELF')) + '\n') framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) params_arg = {} if 'config_file' in hyper_parameter_map: config_file = hyper_parameter_map['config_file'] logger.info('specified config_file: "%s"' % config_file) params_arg = {'default_model': config_file} # params is a python dictionary params = setup_params(pkg, hyper_parameter_map, params_arg) Ps = setup_perf(params) # Run the model! history = pkg.run(params) if framework == 'keras': runner_utils.keras_clear_session(framework) # Default result if there is no val_loss (as in infer.py) result = 0 history_result = {} if history != None: result, history_result = get_results(history, obj_return) stop_perf(Ps) finish = time.time() duration = finish - start log(" DONE: run_id %s in %0.2f seconds." % (hyper_parameter_map["run_id"], duration)) return (result, history_result)
def run(hyper_parameter_map, obj_return): global logger logger = log_tools.get_logger(logger, __name__) framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) # params is python dictionary params = pkg.initialize_parameters() for k,v in hyper_parameter_map.items(): #if not k in params: # raise Exception("Parameter '{}' not found in set of valid arguments".format(k)) if(k=="dense"): if(type(v) != list): v=v.split(" ") v = [int(i) for i in v] if(k=="dense_feature_layers"): if(type(v) != list): v=v.split(" ") v = [int(i) for i in v] if(k=="cell_features"): cp_str = v v = list() v.append(cp_str) params[k] = v logger.debug("WRITE_PARAMS START") runner_utils.write_params(params, hyper_parameter_map) logger.debug("WRITE_PARAMS STOP") history = pkg.run(params) runner_utils.keras_clear_session(framework) # Default result if there is no val_loss (as in infer.py) result = 0 if history != None: # Return the history entry that the user requested. val_loss = history.history[obj_return] result = val_loss[-1] print("result: " + result) return result
def run(hyper_parameter_map, obj_param): global logger logger = log_tools.get_logger(logger, __name__) framework = hyper_parameter_map['framework'] model_name = hyper_parameter_map['model_name'] pkg = import_pkg(framework, model_name) runner_utils.format_params(hyper_parameter_map) # params is python dictionary params = pkg.initialize_parameters() for k, v in hyper_parameter_map.items(): #if not k in params: # raise Exception("Parameter '{}' not found in set of valid arguments".format(k)) if (k == "dense"): if (type(v) != list): v = v.split(" ") v = [int(i) for i in v] if (k == "dense_feature_layers"): if (type(v) != list): v = v.split(" ") v = [int(i) for i in v] if (k == "cell_features"): cp_str = v v = list() v.append(cp_str) params[k] = v logger.debug("WRITE_PARAMS START") runner_utils.write_params(params, hyper_parameter_map) logger.debug("WRITE_PARAMS STOP") history = pkg.run(params) runner_utils.keras_clear_session(framework) # use the last validation_loss as the value to minimize # val_loss = history.history['val_loss'] # result = val_loss[-1] result = history print("result: ", result) return result