def main(): model_settings = [Baseline2, Baseline1] BNs = [True, False] dropouts = [True, False] augments = [True, False] idx = 0 for BN in BNs[::-1]: for aug in augments: for dropout in dropouts: for config in model_settings: print("BN", BN) print("Augmentation", aug) print("dropout", dropout) print("config", config) settings = config() settings.BATCH_NORM = BN settings.DROPOUT = dropout settings.AUGMENTATION = aug expdir = "experiments/train/baseline/" + str(idx) + "/" trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, print_every=print_every, small_debug=SMALL_DEBUG_DOM) idx += 1
def main(): res_layers = [3, 9, 27] cardinalities = [1, 8, 32] idx = 0 for layer in res_layers: for cardinality in cardinalities: print("Layers", layer) print("Cardinality", cardinality) kwargs = {"layers": layer, "cardinality": cardinality} settings = ResNeXt(**kwargs) settings.AUGMENTATION = True settings.DEBUG = False expdir = exp_base + str(idx) + "/" trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, learning_rate=LR, print_every=print_every, small_debug=SMALL_DEBUG_DOM) idx += 1
def main(): res_layers = [3, 9, 27] cardinalities = [1, 8, 32] idx = 0 layer = 3 cardinality = 1 expdir = exp_base + str(0) + "/" print("Layers", layer) print("Cardinality", cardinality) kwargs = {"layers": layer, "cardinality": cardinality} _, settings = ML_utils.load_model_and_settings_from_dir(exp_load) settings.AE_MODEL_FP = model_fp settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx) + "/" trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, learning_rate=LR, print_every=print_every, small_debug=SMALL_DEBUG_DOM)
def test_AE_train_3d(self, tmpdir): """Test no exception thrown""" epochs = 1 settings = self.__settings(tmpdir) expdir = tmpdir.mkdir("experiments/") trainer = TrainAE(settings, str(expdir)) model = trainer.train(epochs, num_workers=0)
def main(): layer = 6 cardinality = 4 print("Layers", layer) print("Cardinality", cardinality) kwargs = {"layers": layer, "cardinality": cardinality} settings = ResNeXt(**kwargs) settings.AUGMENTATION = True settings.DEBUG = False settings.GPU_DEVICE = GPU_DEVICE settings.SEED = 19 settings.export_env_vars() expdir = exp_base trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, learning_rate=LR, print_every=print_every, small_debug=SMALL_DEBUG_DOM)
def main(): print("updated") model_settings = [BaselineBlock, Baseline2, Baseline1,] augments = [True, False] idx = 0 for aug in augments: for config in model_settings: print("Augmentation", aug) print("model", config) settings = config() settings.AUGMENTATION = aug settings.DEBUG = False expdir = exp_base + str(idx) + "/" trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, learning_rate = LR, print_every=print_every, small_debug=SMALL_DEBUG_DOM) idx += 1
def check_train_load_DA(config, config_kwargs, small_debug=True, all_data=False, activation=None, params={ "var": VAR, "tol": TOL }): expdir = EXPDIR try: if not config_kwargs: config_kwargs = {} assert isinstance(config_kwargs, dict) settings = config(**config_kwargs) settings.DEBUG = False if activation: settings.ACTIVATION = activation calc_DA_MAE = RUN_DA_IN_TRAINING num_epochs_cv = 0 print_every = 1 test_every = 1 lr = 0.0002 print(settings.__class__.__name__) if config_kwargs: print(list([(k, v) for (k, v) in config_kwargs.items()])) trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, learning_rate=lr, test_every=test_every, num_epochs_cv=num_epochs_cv, print_every=print_every, small_debug=small_debug) if PRINT_MODEL: print(model.layers_encode) #test loading model, settings = ML_utils.load_model_and_settings_from_dir(expdir) model.to(ML_utils.get_device()) #TODO x_fp = settings.get_X_fp(True) #force init X_FP res_AE = run_DA_batch(settings, model, all_data, expdir, params) print(res_AE.head(10)) shutil.rmtree(expdir, ignore_errors=False, onerror=None) except Exception as e: try: shutil.rmtree(expdir, ignore_errors=False, onerror=None) raise e except Exception as z: raise e
def test_AE_train_3D_DA(self, tmpdir): """Test no exception thrown""" epochs = 1 settings = self.__settings(tmpdir, force_init= True) expdir = tmpdir.mkdir("experiments/") calc_DA_MAE = True trainer = TrainAE(settings, str(expdir)) model = trainer.train(epochs, num_workers=0, calc_DA_MAE=calc_DA_MAE)
def main(): blocks = [ "NeXt", "vanilla", "CBAM_vanilla", "CBAM_NeXt", ] Cstd = 64 sigmoid = False activations = ["GDN", "prelu"] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for activation in activations: for block in blocks: kwargs = { "model_name": "Tucodec", "block_type": block, "Cstd": Cstd, "sigmoid": sigmoid, "activation": activation } idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue if activation == "prelu": if block == "NeXt" or block == "vanilla": continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = CLIC(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): structures = [(8, 3), (1, 27)] #(cardinality, layers) substructures = ["ResNeXt3", "RDB3"] blocks = ["NeXt", "vanilla"] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for block in blocks: for substruct in substructures: for struct in structures: idx += 1 #split work across 2 gpus: if idx - 1 < 6 and GPU_DEVICE == 1: continue elif idx - 1 >= 4 and GPU_DEVICE == 0: continue (cardinality, layers) = struct kwargs = { "layers": layers, "cardinality": cardinality, "block_type": block, "module_type": substruct } for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): kwargs = { "cardinality": 1, "block_type": "RNAB", "sigmoid": True, "module_type": "Bespoke", "attenuation": False } layers = [ 1, 2, 4, 8, ] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for layer in layers: kwargs[ "subBlock"] = "NeXt" #this performed slightly better on first case kwargs["layers"] = layer idx += 1 for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): blocks = [ "NeXt", "vanilla", "CBAM_vanilla", "CBAM_NeXt", ] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for block in blocks: kwargs = { "block_type": block, "Cstd": 32, "aug_scheme": 0, "activation": "prelu" } idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = GRDNBaseline(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): layers = [3, 6, 9, 18, 27] cardinalities = [1, 4, 8, 16, 32] substructure = "ResNeXt3" block = "NeXt" if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for layer in layers: for cardinality in cardinalities: idx_ = idx idx += 1 #split work across 4 gpus: if idx_ % 4 != GPU_DEVICE: continue kwargs = { "layers": layer, "cardinality": cardinality, "block_type": block, "module_type": substructure } for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx_) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): kwargs = { "model_name": "Tucodec", "block_type": "NeXt", "Cstd": 64, "sigmoid": False, "activation": "prelu" } aug_schemes = list(range(1, 5)) if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for aug_scheme in aug_schemes: kwargs["aug_scheme"] = aug_scheme idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = CLIC(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): activations = ["relu"] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.LR *= 0.1 idx = 0 for act in activations: kwargs = { "layers": 0, "cardinality": 2, "aug_scheme": 4, "activation": act } idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): blocks = ["NeXt", "vanilla"] channels = [32, 64] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for block in blocks: for Cstd in channels: kwargs = { "model_name": "Tucodec", "block_type": block, "Cstd": Cstd } idx += 1 for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = CLIC(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): blocks = ["CBAM_vanilla", "vanilla", "CBAM_NeXt"] kwargs = {"cardinality": 1, "block_type": "RAB", "sigmoid": True, "module_type": "Bespoke", "attenuation": False, "layers": 4, "aug_scheme": 0} if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for block in blocks: kwargs["subBlock"] = block idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx_) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate = expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): blocks = ["NeXt", "vanilla"] kwargs = { "layers": 1, "cardinality": 1, "block_type": "RNAB", "module_type": "Bespoke" } if TEST: expt = ExptConfigTest() else: expt = ExptConfig() idx = 0 for block in blocks: kwargs["subBlock"] = block idx += 1 for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): #of form (layers, cardinality) param_vals = [(3, 1), (3, 8), (3, 32), (27, 32)] param_vals_2 = [ (9, 1), (9, 8), (9, 32), (27, 1), (27, 8), ] param_options = [param_vals, param_vals_2] idx = 0 params = param_options[PARAM_IDX] for param in params: layer, cardinality = param print("Layers", layer) print("Cardinality", cardinality) kwargs = {"layers": layer, "cardinality": cardinality} settings = ResNeXt(**kwargs) settings.AUGMENTATION = True settings.DEBUG = False expdir = exp_base + str(idx) + "/" trainer = TrainAE(settings, expdir, calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(EPOCHS, test_every=test_every, num_epochs_cv=num_epochs_cv, learning_rate=LR, print_every=print_every, small_debug=SMALL_DEBUG_DOM) idx += 1
def main(): activations = ["GDN", "relu"] lr_factors = [1, 0.2] resNextk3 = { "layers": 3, "cardinality": 8, "block_type": "vanilla", "module_type": "RDB3", "aug_scheme": 0 } rabkwargs = { "cardinality": 1, "block_type": "RAB", "sigmoid": True, "module_type": "Bespoke", "attenuation": False, "layers": 4, "aug_scheme": 0, "subBlock": "NeXt", } kwarg_lst = ( resNextk3, rabkwargs, ) models = ( ResStack3, ResStack3, ) assert len(models) == len(kwarg_lst) idx = 0 for index, kwargs in enumerate(kwarg_lst): for idx2, act in enumerate(activations): if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.LR = expt.LR * lr_factors[idx2] Model = models[index] kwargs["activation"] = act if act == "relu": kwargs["aug_scheme"] = 4 idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = Model(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): idx = 0 ##################### 02c structure = (4, 27) #(cardinality, layers) substructures = ["ResNeXt3", "RDB3"] blocks = ["CBAM_vanilla", "vanilla", "NeXt", "CBAM_NeXt"] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.EPOCHS = 150 for substruct in substructures: for block in blocks: (cardinality, layers) = structure idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue kwargs = { "layers": layers, "cardinality": cardinality, "block_type": block, "module_type": substruct, "aug_scheme": 0 } for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx_) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM) ################# 03c blocks = ["CBAM_vanilla", "vanilla", "CBAM_NeXt"] kwargs = { "cardinality": 1, "block_type": "RAB", "sigmoid": True, "module_type": "Bespoke", "attenuation": False, "layers": 4, "aug_scheme": 0 } if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.EPOCHS = 150 for block in blocks: kwargs["subBlock"] = block idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = ResStack3(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx_) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM) ################################06a5 blocks = [ "NeXt", "vanilla", "CBAM_vanilla", "CBAM_NeXt", ] Cstd = 64 sigmoid = False activations = ["relu"] if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.EPOCHS = 300 for activation in activations: for block in blocks: kwargs = { "model_name": "Tucodec", "block_type": block, "Cstd": Cstd, "sigmoid": sigmoid, "activation": activation, "aug_scheme": 0 } idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = CLIC(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" trainer = TrainAE(settings, expdir, expt.calc_DA_MAE) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)
def main(): activations = ["GDN", "relu"] lr_factors = [1, 0.10] resNextk1 = { "layers": 27, "cardinality": 4, "block_type": "CBAM_vanilla", "module_type": "RDB3", "aug_scheme": 0 } resNextk2 = { "layers": 27, "cardinality": 1, "block_type": "CBAM_vanilla", "module_type": "ResNeXt3", "aug_scheme": 0 } kwarg_lst = ( resNextk1, resNextk2, ) models = ( ResStack3, ResStack3, ) assert len(models) == len(kwarg_lst) idx = 0 for index, kwargs in enumerate(kwarg_lst): for idx2, act in enumerate(activations): if TEST: expt = ExptConfigTest() else: expt = ExptConfig() expt.LR = expt.LR * lr_factors[idx2] batch_sz = 16 # if act == "GDN" and "module_type" == "RDB3": # batch_sz = 8 if act == "relu": kwargs["aug_scheme"] = 4 Model = models[index] kwargs["activation"] = act idx_ = idx idx += 1 if idx_ % NUM_GPU != GPU_DEVICE - GPU_OFFSET: continue for k, v in kwargs.items(): print("{}={}, ".format(k, v), end="") print() settings = Model(**kwargs) settings.GPU_DEVICE = GPU_DEVICE settings.export_env_vars() expdir = exp_base + str(idx - 1) + "/" print(expdir) trainer = TrainAE(settings, expdir, expt.calc_DA_MAE, batch_sz=batch_sz) expdir = trainer.expdir #get full path model = trainer.train(expt.EPOCHS, test_every=expt.test_every, num_epochs_cv=expt.num_epochs_cv, learning_rate=expt.LR, print_every=expt.print_every, small_debug=expt.SMALL_DEBUG_DOM)