def submit_jobs(self, parameters, model_params, dimensions, save_bool, generate_bool, local): # Pretraning config_folder_pretrain = self.config_folder + "/stepB" if os.path.isdir(config_folder_pretrain): shutil.rmtree(config_folder_pretrain) os.mkdir(config_folder_pretrain) # Submit worker submit_job(config_folder_pretrain, parameters, model_params, dimensions, self.K_folds_B, self.train_names_B, self.valid_names_B, self.test_names_B, True, False, local, self.logger) while not os.path.isfile(config_folder_pretrain + '/DONE'): time.sleep(60) ############################ # In case of multi-thread, create a lock ############################ parameters['pretrained_model'] = os.path.join(config_folder_pretrain, 'model_accuracy') for K_fold_ind, K_fold in enumerate(self.K_folds_A): # Create fold folder config_folder_fold = self.config_folder + "/" + str(K_fold_ind) if os.path.isdir(config_folder_fold): shutil.rmtree(config_folder_fold) os.mkdir(config_folder_fold) # Submit worker submit_job(config_folder_fold, parameters, model_params, dimensions, K_fold, self.train_names_A[K_fold_ind], self.valid_names_A[K_fold_ind], self.test_names_A[K_fold_ind], save_bool, generate_bool, local, self.logger) return
def submit_jobs(self, parameters, model_params, dimensions, save_bool, generate_bool, local): for K_fold_ind, K_fold in enumerate(self.K_folds): # Create fold folder config_folder_fold = self.config_folder + "/" + str(K_fold_ind) if os.path.isdir(config_folder_fold): shutil.rmtree(config_folder_fold) os.mkdir(config_folder_fold) # Submit worker submit_job(config_folder_fold, parameters, model_params, dimensions, K_fold, self.train_names[K_fold_ind], self.valid_names[K_fold_ind], self.test_names[K_fold_ind], save_bool, generate_bool, local, self.logger) return
def submit_jobs(self, parameters, model_params, dimensions, save_bool, generate_bool, local): for K_fold_ind, (K_fold_0, K_fold_1) in enumerate( zip(self.K_folds_0, self.K_folds_1)): parameters['pretrained_model'] = None ############################ # Step 0 : pretraining on B config_folder_pretrain = self.config_folder + "/" + str( K_fold_ind) + "_0" if os.path.isdir(config_folder_pretrain): shutil.rmtree(config_folder_pretrain) os.mkdir(config_folder_pretrain) # Submit worker submit_job(config_folder_pretrain, parameters, model_params, dimensions, K_fold_0, self.train_names_0[K_fold_ind], self.valid_names_0[K_fold_ind], self.test_names_0[K_fold_ind], True, False, local, self.logger) ############################ parameters['pretrained_model'] = os.path.join( config_folder_pretrain, 'model_accuracy') ############################ # Step 1 : trainig on A. Test on B config_folder_fold = self.config_folder + "/" + str(K_fold_ind) if os.path.isdir(config_folder_fold): shutil.rmtree(config_folder_fold) os.mkdir(config_folder_fold) # Submit worker submit_job(config_folder_fold, parameters, model_params, dimensions, K_fold_1, self.train_names_1[K_fold_ind], self.valid_names_1[K_fold_ind], self.test_names_1[K_fold_ind], save_bool, generate_bool, local, self.logger) ############################ return
def submit_jobs(self, parameters, model_params, dimensions, save_bool, generate_bool, local): parameters['pretrained_model'] = None config_folder_0 = self.config_folder + "/step0" if os.path.isdir(config_folder_0): shutil.rmtree(config_folder_0) os.mkdir(config_folder_0) # Submit worker submit_job(config_folder_0, parameters, model_params, dimensions, self.K_folds_0, self.train_names_0, self.valid_names_0, self.test_names_0, True, False, local, self.logger) for K_fold_ind, (K_fold_1, K_fold_2) in enumerate( zip(self.K_folds_1, self.K_folds_2)): parameters['pretrained_model'] = os.path.join( config_folder_0, 'model_accuracy') # Pretraning B config_folder_1 = self.config_folder + "/" + str(K_fold_ind) + "_1" if os.path.isdir(config_folder_1): shutil.rmtree(config_folder_1) os.mkdir(config_folder_1) # Submit worker submit_job(config_folder_1, parameters, model_params, dimensions, self.K_folds_1, self.train_names_1, self.valid_names_1, self.test_names_1, True, False, local, self.logger) parameters['pretrained_model'] = os.path.join( config_folder_1, 'model_accuracy') # Create fold folder config_folder_2 = self.config_folder + "/" + str(K_fold_ind) if os.path.isdir(config_folder_2): shutil.rmtree(config_folder_2) os.mkdir(config_folder_2) # Submit worker submit_job(config_folder_fold, parameters, model_params, dimensions, K_fold_2, self.train_names_2[K_fold_ind], self.valid_names_2[K_fold_ind], self.test_names_2[K_fold_ind], save_bool, generate_bool, local, self.logger) return
def submit_jobs(self, parameters, model_params, dimensions, track_paths_generation, save_bool, generate_bool, local): # Pretraning C config_folder_C = self.config_folder + "/stepC" if os.path.isdir(config_folder_C): shutil.rmtree(config_folder_C) os.mkdir(config_folder_C) # Submit worker submit_job(config_folder_C, parameters, model_params, dimensions, self.K_folds_C, self.train_names_C, self.valid_names_C, self.test_names_C, track_paths_generation, True, False, local, self.logger) # Define new pre-trained model parameters['pretrained_model'] = os.path.join(config_folder_C, 'model_accuracy') # Pretraning B config_folder_B = self.config_folder + "/stepB" if os.path.isdir(config_folder_B): shutil.rmtree(config_folder_B) os.mkdir(config_folder_B) # Submit worker submit_job(config_folder_B, parameters, model_params, dimensions, self.K_folds_B, self.train_names_B, self.valid_names_B, self.test_names_B, track_paths_generation, True, False, local, self.logger) # Define new pre-trained model parameters['pretrained_model'] = os.path.join(config_folder_B, 'model_accuracy') for K_fold_ind, K_fold in enumerate(self.K_folds_A): # Create fold folder config_folder_fold = self.config_folder + "/" + str(K_fold_ind) if os.path.isdir(config_folder_fold): shutil.rmtree(config_folder_fold) os.mkdir(config_folder_fold) # Submit worker submit_job(config_folder_fold, parameters, model_params, dimensions, K_fold, self.train_names_A[K_fold_ind], self.valid_names_A[K_fold_ind], self.test_names_A[K_fold_ind], track_paths_generation, save_bool, generate_bool, local, self.logger) return