def execute(self): """ Executes the experiment with the given config. The experiment test each pre trained CSNN model for each given dataset by learning the defined reconstructor on the learned representation. If xFoldCrossValidation is set this will be repeated x times. """ for csnn_config in self.__config["csnnConfigs"]: csnn_name = csnn_config["modelName"] try: for dataset_config in self.__config["datasetConfigs"]: provider = getDatasetProvider(dataset_config) if not dataset_config["nameOfDataset"] in csnn_config["batchSizes"].keys(): continue for i in range(0, csnn_config["xFoldCrossValidation"]): model_dir = "/" + csnn_name + "/" + dataset_config["nameOfDataset"] + "/xFoldCrossVal" + str(i) self.__logger.info("Starting to test: " + model_dir, "CsnnReconstructionExperiment:execute") if csnn_config["xFoldCrossValidation"] <= 1: xseed = None else: xseed = 42 + i dataset, dataset_generator = prepareDataset(provider, dataset_config, xfold_seed=xseed, augment_data=csnn_config["augmentData"]) dataset_max_div, _ = prepareDataset(provider, dataset_config, xfold_seed=xseed, augment_data=csnn_config["augmentData"], normalize_data="maxDiv") self.__logger.info("Starting to create dataset encoding with: " + model_dir, "CsnnReconstructionExperiment:execute") encoding_provider, encoding = prepareEncoding(csnn_config, dataset_generator, dataset, dataset_config, csnn_name, self.__num_gpus, model_dir + "/Csnn", zero_mean_unit_variance= csnn_config["zeroMeanUnitVarianceEncoding"], return_with_encoding=dataset_max_div) self.__logger.info("Finished to create dataset encoding with: " + model_dir, "CsnnReconstructionExperiment:execute") self.__logger.info("Starting to train reconstructor for: " + model_dir, "CsnnReconstructionExperiment:execute") try: rec_config = self.__config["reconstructionConfig"] trainAndValReconstructionModel(Cifar10Reconstruction(rec_config), rec_config, self.__num_gpus, model_dir + "/" + rec_config["modelName"] + "/" + dataset_config["nameOfDataset"], encoding, dataset_config) except Exception: print(traceback.format_exc()) self.__logger.info("Finished to train reconstructor for: " + model_dir, "CsnnReconstructionExperiment:execute") except Exception: print(traceback.format_exc())
def execute(self): """ Executes the experiment with the given config. The experiment trains each model for each given dataset like a CNN. If xFoldCrossValidation is set this will be repeated x times. """ for hybrid_config in self.__config["hybridConfigs"]: hybrid_name = hybrid_config["modelName"] try: for dataset_config in self.__config["datasetConfigs"]: provider = getDatasetProvider(dataset_config) if not dataset_config["nameOfDataset"] in hybrid_config["batchSizes"].keys(): continue for i in range(0, hybrid_config["xFoldCrossValidation"]): model_dir = "/" + hybrid_name + "/" + dataset_config["nameOfDataset"] + "/xFoldCrossVal" + str(i) self.__logger.info("Starting to train: " + model_dir, "CsnnPerformancesExperiment:execute") if hybrid_config["xFoldCrossValidation"] <= 1: xseed = None else: xseed = 42 + i dataset, dataset_generator = prepareDataset(provider, dataset_config, xfold_seed=xseed, augment_data=hybrid_config["augmentData"]) trainAndValHybrid(hybrid_config, dataset_generator, dataset, dataset_config, self.__num_gpus, model_dir+"/Csnn") self.__logger.info("Finished to train: " + model_dir, "CsnnPerformancesExperiment:execute") except Exception: print(traceback.format_exc())
def execute(self): """ Executes the experiment with the given config. The experiment trains each CSNN model for each given dataset for all defined training steps per layer and learns thedefined classifier to test the learned representation. If xFoldCrossValidation is set this will be repeated x times. """ encoding_config = self.__config["encodingConfig"] # For each model to train and val for csnn_config in self.__config["csnnConfigs"]: csnn_name = csnn_config["modelName"] try: # For each dataset for dataset_config in self.__config["datasetConfigs"]: # Get the provider of the dataset provider = getDatasetProvider(dataset_config) if not dataset_config["nameOfDataset"] in csnn_config[ "batchSizes"].keys(): continue # and for each xFold iteration for i in range(0, csnn_config["xFoldCrossValidation"]): # Construct the model Name model_dir = "/" + csnn_name + "/" + dataset_config[ "nameOfDataset"] + "/xFoldCrossVal" + str(i) self.__logger.info("Starting to train: " + model_dir, "CsnnOfmExperiment:execute") # Reset the training of the model csnn_config["trainingSteps"] = 0 # Train model for training_step_per_layer steps prev_training_steps = 0 prev_training_steps_per_layer = 0 for training_step_per_layer in self.__config[ "ofmSteps"]: # Based on the current training_step_per_layer we calculate the new training steps for # non frozen layers csnn_config[ "trainingSteps"] = training_step_per_layer * len( csnn_config["layers"]) # Calculate the training intervals based on the old training intervals. for l in range(0, len(csnn_config["layers"])): csnn_config["layers"][l]["trainInterval"][ 0] = prev_training_steps prev_training_steps += ( training_step_per_layer - prev_training_steps_per_layer) csnn_config["layers"][l]["trainInterval"][ 1] = prev_training_steps prev_training_steps_per_layer = training_step_per_layer prev_training_steps = csnn_config["trainingSteps"] # If xFoldCrossValidation is set, create a seed to shuffle the dataset. if csnn_config["xFoldCrossValidation"] <= 1: xseed = None else: xseed = 42 + i dataset, dataset_generator = prepareDataset( provider, dataset_config, xfold_seed=xseed, augment_data=csnn_config["augmentData"]) # Train the CSNN trainAndValCsnn(csnn_config, dataset_generator, dataset, dataset_config, self.__num_gpus, model_dir + "/Csnn") self.__logger.info( "Finished to train: " + model_dir, "CsnnOfmExperiment:execute", "CsnnOfmExperiment:execute") self.__logger.info( "Starting to create dataset encoding with: " + model_dir, "CsnnOfmExperiment:execute") # Create the encoding of the dataset via the trained CSNN. encoding_provider = prepareEncoding( csnn_config, dataset_generator, dataset, dataset_config, csnn_name, self.__num_gpus, model_dir + "/Csnn", zero_mean_unit_variance=csnn_config[ "zeroMeanUnitVarianceEncoding"]) self.__logger.info( "Finished to create dataset encoding with: " + model_dir, "CsnnOfmExperiment:execute") self.__logger.info( "Starting to train classifiers for: " + model_dir, "CsnnOfmExperiment:execute") # Test the trained model by training the classifier(s) for classifier in self.__config["classifiers"]: try: classifier["numClasses"] = dataset_config[ "numClasses"] if (classifier["type"] == "nonlinear") or ( classifier["type"] == "linear"): trainAndValClassifier( Mlp(classifier), classifier, encoding_provider, encoding_config, self.__num_gpus, model_dir + "/" + classifier["modelName"] + str(training_step_per_layer)) elif classifier["type"] == "fewShot": trainAndValFewShotClassifier( Mlp(classifier), classifier, encoding_provider, encoding_config, self.__num_gpus, model_dir + "/" + classifier["modelName"] + str(training_step_per_layer)) except: print(traceback.format_exc()) self.__logger.info( "Finished to train classifiers for: " + model_dir, "CsnnOfmExperiment:execute") except: print(traceback.format_exc())
def execute(self): """ Executes the experiment with the given config. The experiment trains each CSNN model for each given dataset for the defined training steps per layer and learns the defined classifier to test the learned representation. If xFoldCrossValidation is set this will be repeated x times. """ encoding_config = self.__config["encodingConfig"] for csnn_config in self.__config["csnnConfigs"]: csnn_name = csnn_config["modelName"] try: for dataset_config in self.__config["datasetConfigs"]: provider = getDatasetProvider(dataset_config) if not dataset_config["nameOfDataset"] in csnn_config[ "batchSizes"].keys(): continue for i in range(0, csnn_config["xFoldCrossValidation"]): model_dir = "/" + csnn_name + "/" + dataset_config[ "nameOfDataset"] + "/xFoldCrossVal" + str(i) self.__logger.info( "Starting to train: " + model_dir, "CsnnPerformancesExperiment:execute") if csnn_config["xFoldCrossValidation"] <= 1: xseed = None else: xseed = 42 + i dataset, dataset_generator = prepareDataset( provider, dataset_config, xfold_seed=xseed, augment_data=csnn_config["augmentData"]) trainAndValCsnn(csnn_config, dataset_generator, dataset, dataset_config, self.__num_gpus, model_dir + "/Csnn") self.__logger.info( "Finished to train: " + model_dir, "CsnnPerformancesExperiment:execute") self.__logger.info( "Starting to create dataset encoding with: " + model_dir, "CsnnPerformancesExperiment:execute") encoding_provider = prepareEncoding( csnn_config, dataset_generator, dataset, dataset_config, csnn_name, self.__num_gpus, model_dir + "/Csnn", zero_mean_unit_variance=csnn_config[ "zeroMeanUnitVarianceEncoding"]) self.__logger.info( "Finished to create dataset encoding with: " + model_dir, "CsnnPerformancesExperiment:execute") self.__logger.info( "Starting to train classifiers for: " + model_dir, "CsnnPerformancesExperiment:execute") for classifier in self.__config["classifiers"]: try: classifier["numClasses"] = dataset_config[ "numClasses"] if (classifier["type"] == "nonlinear") or (classifier["type"] == "linear"): trainAndValClassifier( Mlp(classifier), classifier, encoding_provider, encoding_config, self.__num_gpus, model_dir + "/" + classifier["modelName"]) elif classifier["type"] == "fewShot": trainAndValFewShotClassifier( Mlp(classifier), classifier, encoding_provider, encoding_config, self.__num_gpus, model_dir + "/" + classifier["modelName"]) except Exception: print(traceback.format_exc()) self.__logger.info( "Finished to train classifiers for: " + model_dir, "CsnnPerformancesExperiment:execute") except Exception: print(traceback.format_exc())