def final_fit(self, x_train, y_train, x_test, y_test, trainer_args=None, retrain=False): """Final training after found the best architecture. Args: x_train: A numpy.ndarray of training data. y_train: A numpy.ndarray of training targets. x_test: A numpy.ndarray of testing data. y_test: A numpy.ndarray of testing targets. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. """ if trainer_args is None: trainer_args = {'max_no_improvement_num': 30} y_train = self.transform_y(y_train) y_test = self.transform_y(y_test) train_data = self.data_transformer.transform_train(x_train, y_train) test_data = self.data_transformer.transform_test(x_test, y_test) searcher = self.load_searcher() graph = searcher.load_best_model() if retrain: graph.weighted = False _, _1, graph = train((graph, train_data, test_data, trainer_args, None, self.metric, self.loss, self.verbose))
def fit(self, x, y, trainer_args=None): """Trains the model on the dataset given. Args: x: A numpy.ndarray instance containing the training data or the training data combined with the validation data. y: A numpy.ndarray instance containing the label of the training data. or the label of the training data combined with the validation label. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. """ validate_xy(x, y) self.y_encoder.fit(y) y = self.y_encoder.transform(y) # Divide training data into training and testing data. validation_set_size = int(len(y) * Constant.VALIDATION_SET_SIZE) validation_set_size = min(validation_set_size, 500) validation_set_size = max(validation_set_size, 1) x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=validation_set_size, random_state=42) #initialize data_transformer self.data_transformer = self.data_transformer_class(x_train) # Wrap the data into DataLoaders train_loader = self.data_transformer.transform_train(x_train, y_train) test_loader = self.data_transformer.transform_test(x_test, y_test) self.generator = self._init_generator(self.y_encoder.n_classes, x_train.shape[1:]) graph = self.generator.generate() if trainer_args is None: trainer_args = {'max_no_improvement_num': 30} _, _1, self.graph = train(None, graph, train_loader, test_loader, trainer_args, self.metric, self.loss, self.verbose, self.path)
def fit(self, x, y, trainer_args=None, retrain=False): """Trains the model on the given dataset. Args: x: A numpy.ndarray instance containing the training data or the training data combined with the validation data. y: A numpy.ndarray instance containing the label of the training data. or the label of the training data combined with the validation label. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. """ x = self.preprocess(x) # Divide training data into training and testing data. validation_set_size = int(len(y) * Constant.VALIDATION_SET_SIZE) validation_set_size = min(validation_set_size, 500) validation_set_size = max(validation_set_size, 1) x_train, x_test, y_train, y_test = train_test_split( x, y, test_size=validation_set_size, random_state=42) if trainer_args is None: trainer_args = {'max_no_improvement_num': 30} y_train = self.transform_y(y_train) y_test = self.transform_y(y_test) train_data = self.data_transformer.transform_train(x_train, y_train) test_data = self.data_transformer.transform_test(x_test, y_test) if retrain: self.graph.weighted = False _, _1, self.graph = train(None, self.graph, train_data, test_data, trainer_args, self.metric, self.loss, self.verbose, self.path)
def fit(self, x, y, trainer_args=None): """Trains the model on the dataset given. Args: x: A numpy.ndarray instance containing the training data or the training data combined with the validation data. y: A numpy.ndarray instance containing the label of the training data. or the label of the training data combined with the validation label. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. """ validate_xy(x, y) self.y_encoder.fit(y) y = self.y_encoder.transform(y) # Divide training data into training and testing data. validation_set_size = int(len(y) * Constant.VALIDATION_SET_SIZE) validation_set_size = min(validation_set_size, 500) validation_set_size = max(validation_set_size, 1) x_train, x_test, y_train, y_test = train_test_split( x, y, test_size=validation_set_size, random_state=42) # initialize data_transformer self.data_transformer = self.data_transformer_class(x_train) # Wrap the data into DataLoaders train_loader = self.data_transformer.transform_train(x_train, y_train) test_loader = self.data_transformer.transform_test(x_test, y_test) self.generator = self._init_generator(self.y_encoder.n_classes, x_train.shape[1:]) graph = self.generator.generate() if trainer_args is None: trainer_args = {'max_no_improvement_num': 30} _, _1, self.graph = train(None, graph, train_loader, test_loader, trainer_args, self.metric, self.loss, self.verbose, self.path)
def fit(self, x_train, y_train, x_test, y_test, trainer_args=None, retrain=False): """further training of the model (graph). Args: x_train: A numpy.ndarray of training data. y_train: A numpy.ndarray of training targets. x_test: A numpy.ndarray of testing data. y_test: A numpy.ndarray of testing targets. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. """ x_train = self.preprocess(x_train) x_test = self.preprocess(x_test) if trainer_args is None: trainer_args = {'max_no_improvement_num': 30} y_train = self.transform_y(y_train) y_test = self.transform_y(y_test) train_data = self.data_transformer.transform_train(x_train, y_train) test_data = self.data_transformer.transform_test(x_test, y_test) if retrain: self.graph.weighted = False _, _1, self.graph = train(None, self.graph, train_data, test_data, trainer_args, self.metric, self.loss, self.verbose, self.path)
def final_fit(self, x_train, y_train, x_test, y_test, trainer_args=None, retrain=False): """Final training after found the best architecture. Args: x_train: An numpy.ndarray of training data. y_train: An numpy.ndarray of training targets. x_test: An numpy.ndarray of testing data. y_test: An numpy.ndarray of testing targets. trainer_args: A dictionary containing the parameters of the ModelTrainer constructure. retrain: A boolean of whether reinitialize the weights of the model. """ if trainer_args is None: trainer_args = {} y_train = self.y_encoder.transform(y_train) y_test = self.y_encoder.transform(y_test) searcher = self.load_searcher() graph = searcher.load_best_model() if retrain: graph.weighted = False _, _1, graph = train( (graph, x_train, y_train, x_test, y_test, trainer_args, None))
def final_fit(self, train_data, test_data, trainer_args=None, retrain=False): """Final training after found the best architecture. Args: trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. train_data: A DataLoader instance representing the training data test_data: A DataLoader instance representing the testing data """ searcher = self._load_searcher() graph = searcher.load_best_model() if retrain: graph.weighted = False _, _1, graph = train((graph, train_data, test_data, trainer_args, None, self.metric, self.loss, self.verbose))
def final_fit(self, train_data, test_data, trainer_args=None, retrain=False): """Final training after found the best architecture. Args: trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. train_data: A DataLoader instance representing the training data. test_data: A DataLoader instance representing the testing data. """ graph = self.searcher.load_best_model() if retrain: graph.weighted = False _, _1, graph = train(None, graph, train_data, test_data, trainer_args, self.metric, self.loss, self.verbose, self.path) self.searcher.replace_model(graph, self.searcher.get_best_model_id()) pickle_to_file(self, os.path.join(self.path, 'module'))
def fit(self, x_train, y_train, time_limit=None): """Trains the model on the dataset given. Args: x_train: A numpy.ndarray instance containing the training data, or the training data combined with the validation data. y_train: A numpy.ndarray instance containing the label of the training data, or the label of the training data combined with the validation label. time_limit: A dictionary containing the parameters of the ModelTrainer constructor. """ validate_xy(x_train, y_train) self.resize_shape = compute_image_resize_params(x_train) x_train = self.preprocess(x_train) self.y_encoder.fit(y_train) y_train = self.transform_y(y_train) # Divide training data into training and testing data. validation_set_size = int(len(y_train) * Constant.VALIDATION_SET_SIZE) validation_set_size = min(validation_set_size, 500) validation_set_size = max(validation_set_size, 1) x_train_new, x_test, y_train_new, y_test = train_test_split( x_train, y_train, test_size=validation_set_size, random_state=42) # initialize data_transformer self.data_transformer = ImageDataTransformer(x_train_new) # Wrap the data into DataLoaders train_loader = self.data_transformer.transform_train( x_train_new, y_train_new) test_loader = self.data_transformer.transform_test(x_test, y_test) self.generator = self._init_generator(self.y_encoder.n_classes, x_train_new.shape[1:]) graph = self.generator.generate() if time_limit is None: time_limit = {'max_no_improvement_num': 30} _, _1, self.graph = train(None, graph, train_loader, test_loader, time_limit, self.metric, self.loss, self.verbose, self.path)
def final_fit(self, train_data, test_data, trainer_args=None, retrain=False): """Final training after found the best architecture. Args: train_data: A DataLoader instance representing the training data. test_data: A DataLoader instance representing the testing data. trainer_args: A dictionary containing the parameters of the ModelTrainer constructor. retrain: A boolean of whether reinitialize the weights of the model. """ graph = self.searcher.load_best_model() if retrain: graph.weighted = False _, _1, graph = train(None, graph, train_data, test_data, trainer_args, self.metric, self.loss, self.verbose, self.path) self.searcher.replace_model(graph, self.searcher.get_best_model_id()) pickle_to_file(self, os.path.join(self.path, 'module'))