def dummy(input, i): global counter counter += 1 logger = generate_logger_MPI(LOGFILE, LOGLEVEL, rank) logger.info(f"input={input},i= {i}, rank={MPI.COMM_WORLD.Get_rank()}") logger.info(f"counter value = {counter}") return [counter] * 2
def evaluate_model_MPI(*args, **kwargs): """ wrapper around the evaluate_model to allow for creating a logger with the MPI rank adds logger to the kwargs and calls the evaluate model function :return: evaluate_model(*args,**kwargs) :rtype: list(float) """ logger = generate_logger_MPI(LOGFILE, LOGLEVEL, rank) kwargs["logger"] = logger return evaluate_boosting(*args, **kwargs)
def train_and_evaluate_wrapper(model, train_indices,alpha,m,i): """ wrapper around the evaluate_model to allow for creating a logger with the MPI rank adds logger to the kwargs and calls the evaluate model function :return: evaluate_model(*args,**kwargs) :rtype: list(float) """ logger = generate_logger_MPI(LOGFILE,LOGLEVEL,rank) logger.info(f"executing{i}-th train_evaluate for M,alpha= {m},{alpha} ") X_train, y_train = DataContainer.X_train[train_indices],DataContainer.y_train[train_indices] return train_and_evaluate(model, X_train, y_train, DataContainer.X_test_list)
adds logger to the kwargs and calls the evaluate model function :return: evaluate_model(*args,**kwargs) :rtype: list(float) """ logger = generate_logger_MPI(LOGFILE, LOGLEVEL, rank) kwargs["logger"] = logger return evaluate_boosting(*args, **kwargs) comm = MPI.COMM_WORLD rank = comm.Get_rank() ## create logger logger = generate_logger_MPI(LOGFILE, LOGLEVEL, rank) logger.info(f"node with rank {rank} started") if rank == 0: """ executed by main MPI process mpiexec -n <num_nodes> python -m mpi4py.futures mpi\mpi_bagging.py will create 1 dispatcher node with rank 0 and num_node-1 workers for the pool """ ## generate Training set, Test set & V_0s X_train, y_train = generate_train_set(Config.N_train, Config.Delta, Config.d) X_test, y_test = generate_test_set(Config.N_test, Config.Delta, Config.d)