def do_evaluation(self, modeleval: ModelEvaluation, round_value=False, make_nonnegative=False): ts_pred = self.fit_and_forecast(modeleval) if round_value: ts_pred = np.around(ts_pred) if make_nonnegative: ts_pred[ts_pred < 0] = 0 modeleval.evaluate(modeleval.ts_test, ts_pred)
def do_evaluation(self, modeleval: ModelEvaluation, round_value=False, make_nonnegative=False): # print("Fitting:", self.__class__) ts_pred = self.fit(modeleval) if round_value: ts_pred = np.around(ts_pred) if make_nonnegative: ts_pred[ts_pred < 0] = 0 modeleval.evaluate(modeleval.ts_test, ts_pred)
def run(self): num_epochs = int(np.rint(self.args["num_epochs"])) learning_rate = float(self.args["learning_rate"]) output_directory = self.args["output_directory"] num_units = int(np.rint(self.args["num_units"])) n_jobs = int(np.rint(self.args["n_jobs"])) samples_per_segment = int(np.rint(self.args["samples_per_segment"])) (x_train, y_train), (x_val, y_val), (x_test, y_test) = Dataset.load_dataset( self.args["dataset"], samples_per_segment=samples_per_segment) # Standardise dataset. collapsed = np.vstack(x_train) min_val, max_val = np.min(collapsed, axis=0), np.max(collapsed, axis=0) x_train = map(lambda xx: (xx - min_val) / (max_val - min_val), x_train) x_val = map(lambda xx: (xx - min_val) / (max_val - min_val), x_val) x_test = map(lambda xx: (xx - min_val) / (max_val - min_val), x_test) use_random_selection = True for num_labels in [12, 25]: x_train_labelled, x_train_unlabelled = x_train[: num_labels], x_train y_train_labelled, y_train_unlabelled = y_train[: num_labels], y_train potential_aux_tasks = get_potential_auxiliary_tasks( x_train_labelled, y_train_labelled, n_jobs) for num_auxiliary_tasks in [0, 5]: checkpoint_path = join(output_directory, "model.npy") if num_auxiliary_tasks != 0: task_provider = AuxiliaryTaskProvider( use_random_selection, potential_aux_tasks, num_auxiliary_tasks) aux_train_labelled = task_provider.calculate_auxiliary_tasks( x_train_labelled) aux_train_unlabelled = task_provider.calculate_auxiliary_tasks( x_train_unlabelled) # Standardise auxiliary tasks. scaler = StandardScaler() scaler.fit( np.concatenate( [aux_train_labelled, aux_train_unlabelled], axis=0)) print("scale:", scaler.scale_) aux_train_labelled = scaler.transform(aux_train_labelled) aux_train_unlabelled = scaler.transform( aux_train_unlabelled) aux_val = scaler.transform( task_provider.calculate_auxiliary_tasks(x_val)) aux_test = scaler.transform( task_provider.calculate_auxiliary_tasks(x_test)) if num_auxiliary_tasks != 0: model = ModelBuilder.build_semisupervised_dnn( signal_names=TrainApplication.get_signal_names(), signal_lengths=self.get_signal_lengths(), num_units=num_units, learning_rate=learning_rate, num_auxiliary_tasks=num_auxiliary_tasks, use_extra_auxiliary=True, use_highway=True, with_bn=False, print_summary=False) labelled_train_set, labelled_train_steps =\ TrainApplication.get_generator(x_train_labelled, y_train_labelled, aux_train_labelled) unlabelled_train_set, unlabelled_train_steps =\ TrainApplication.get_generator(x_train_unlabelled, y_train_unlabelled, aux_train_unlabelled) val_set, val_steps = TrainApplication.get_generator( x_val, y_val, aux_val) test_set, test_steps = TrainApplication.get_generator( x_test, y_test, aux_test) ModelTrainer.train_dnn_w_auxiliary( model, unlabelled_train_set, labelled_train_set, val_set, num_epochs, labelled_train_steps, val_steps, checkpoint_path=checkpoint_path) else: model = ModelBuilder.build_semisupervised_dnn( signal_names=TrainApplication.get_signal_names(), signal_lengths=self.get_signal_lengths(), num_units=num_units, learning_rate=learning_rate, num_auxiliary_tasks=num_auxiliary_tasks, use_extra_auxiliary=False, return_intermediary=False, use_highway=True, with_bn=False, print_summary=False) labelled_train_set, labelled_train_steps =\ TrainApplication.get_generator(x_train_labelled, y_train_labelled) val_set, val_steps = TrainApplication.get_generator( x_val, y_val) test_set, test_steps = TrainApplication.get_generator( x_test, y_test) ModelTrainer.train_dnn(model, labelled_train_set, val_set, num_epochs, labelled_train_steps, val_steps, checkpoint_path=checkpoint_path) if isinstance(model, tuple): model = model[0] else: model = model print("Evaluation with", num_labels, "labels and", num_auxiliary_tasks, "aux tasks.", file=sys.stderr) model.set_weights(np.load(checkpoint_path)) _, threshold = ModelEvaluation.evaluate_semisupervised_dnn( model, val_set, val_steps, with_auxiliary_tasks=num_auxiliary_tasks != 0, use_extra_auxiliary=num_auxiliary_tasks != 0) ModelEvaluation.evaluate_semisupervised_dnn( model, test_set, test_steps, with_auxiliary_tasks=num_auxiliary_tasks != 0, use_extra_auxiliary=num_auxiliary_tasks != 0, threshold=threshold)
def do_evaluation(self, modeleval: ModelEvaluation): ts_pred = self.fit(modeleval) modeleval.evaluate(modeleval.ts_test, ts_pred)