Пример #1
0
def load_optimizer(optimizer_selection: str) -> Optimizer:
    if optimizer_selection in optimizers:
        return optimizers[optimizer_selection]
    else:
        get_logger().info(
            "The {} is not available".format(optimizer_selection))
        raise Exception
def start_training(parameters_path: str) -> None:
    get_logger().info("Starting grid search")
    grid_search_parameters = init_environment_variables(parameters_path)
    use_case = UseCaseFactory()
    grid_search = use_case.build(use_case_name="grid-search",
                                 grid_search_parameters=grid_search_parameters)
    grid_search.start()
 def _load_data(self) -> List[Tuple[to.Tensor, to.Tensor, to.Tensor, str]]:
     get_logger().info("Loading dataset")
     self.postgres_connector.open_connection()
     dataset = self.postgres_connector.query_dataset()
     self.postgres_connector.close_connection()
     get_logger().info("Loaded {} entries. Size: {} MB".format(
         len(dataset), self._get_size_in_memory(dataset)))
     return dataset
 def start(self) -> None:
     get_logger().info('Starting Inference')
     inference_dataset, data_dimensions = self._prepare_dataset()
     model = self.loader.load_model(data_dimensions,
                                    self.saver.model_directory)
     outputs_labels_pairs = self.inferencer.do_inference(
         model, inference_dataset)
     self.saver.save_distance_maps(outputs_labels_pairs)
     get_logger().info('Finished Inference')
Пример #5
0
 def save_model(self, epoch: int, configuration_id: str,
                model: to.nn.Module) -> None:
     current_folder = os.path.join(self.model_directory, configuration_id)
     if not os.path.exists(current_folder):
         os.makedirs(current_folder)
     path_and_filename = os.path.join(
         current_folder,
         "_".join([EPOCH, str(epoch), MODEL_STATE_DICTIONARY]))
     to.save(model.state_dict(), path_and_filename)
     get_logger().info("Saved model checkpoint in " + path_and_filename)
Пример #6
0
 def save_results(self, results: Dict, configuration_id: str = '') -> None:
     current_folder = os.path.join(self.results_directory, configuration_id)
     if not os.path.exists(current_folder):
         os.makedirs(current_folder)
     results_dataframe = self._construct_dataframe_from_nested_dictionary(
         results)
     path_and_filename = os.path.join(
         current_folder,
         "_".join([datetime.now().strftime("%d-%b-%YT%H_%M"), RESULTS_CSV]))
     results_dataframe.to_csv(path_and_filename)
     get_logger().info("Saved results in " + path_and_filename)
 def _instantiate_the_optimizer(self,
                                configuration_dictionary: dict) -> None:
     optimizer = load_optimizer(configuration_dictionary['optimizer'])
     model_parameters = list(self.model.parameters())
     try:
         self.optimizer = optimizer(model_parameters,
                                    lr=0.001,
                                    momentum=0.9)
     except:
         self.optimizer = optimizer(model_parameters, lr=0.001)
     get_logger().info('Optimizer: {}'.format(
         configuration_dictionary['optimizer']))
Пример #8
0
 def start(self) -> Dict:
     get_logger().info('Starting Grid Search')
     configuration_id = ''
     for configuration in self.grid_search_configurations:
         configuration_dictionary, dataloaders = self._build_a_configuration(configuration)
         self._train_a_single_configuration(configuration_dictionary['configuration_id'],
                                            dataloaders,
                                            configuration_dictionary['epochs'],
                                            configuration_dictionary['validation_period'])
     self.saver.save_results(self.results, configuration_id)
     get_logger().info('Finished Training')
     return self.results
Пример #9
0
 def save_distance_maps(self,
                        distance_maps: List[Tuple],
                        configuration_id: str = ''):
     current_folder = os.path.join(self.results_directory, configuration_id)
     if not os.path.exists(current_folder):
         os.makedirs(current_folder)
     path_and_filename = os.path.join(
         current_folder, "_".join(
             [datetime.now().strftime("%d-%b-%YT%H_%M"), DISTANCE_MAPS]))
     with open(path_and_filename, 'wb') as file:
         pickle.dump(distance_maps, file)
     get_logger().info("Saved inference outputs in " + path_and_filename)
 def do_train_step(self, training_data: DataLoader, epoch: int) -> float:
     training_loss = []
     for data in training_data:
         node_features, all_neighbors, labels = self._send_to_device(data)
         self.optimizer.zero_grad()
         outputs = self.model(node_features,
                              all_neighbors,
                              batch_size=labels.shape[0])
         loss = self.loss_function(outputs, labels, node_features)
         training_loss.append(loss.item())
         loss.backward()
         self.optimizer.step()
     get_logger().info('[Iteration %d] training loss: %.6f' %
                       (epoch, np.average(training_loss)))
     return np.average(training_loss)
 def _instantiate_the_model(self, configuration_dictionary: dict,
                            data_dimensions: dict) -> None:
     self.model = load_model(configuration_dictionary['model'])
     self.model = self.model(
         time_steps=configuration_dictionary['time_steps'],
         number_of_nodes=data_dimensions["number_of_nodes"],
         number_of_node_features=data_dimensions["number_of_node_features"],
         fully_connected_layer_input_size=data_dimensions[
             "fully_connected_layer_input_size"],
         fully_connected_layer_output_size=data_dimensions[
             "fully_connected_layer_output_size"],
         device=self.device)
     self.model.to(self.device)
     get_logger().info(
         'Loaded the {} model on {}. Model size: {} MB'.format(
             configuration_dictionary['model'], self.device,
             self.model.get_model_size()))
Пример #12
0
 def _train_a_single_configuration(self,
                                   configuration_id: str,
                                   dataloaders: Tuple[DataLoader, DataLoader, DataLoader],
                                   epochs: int,
                                   validation_period: int) -> None:
     get_logger().info('Starting training:'.format(configuration_id))
     training_data, validation_data, test_data = dataloaders
     validation_loss_max = np.inf
     for epoch in range(1, epochs + 1):
         training_loss = self.trainer.do_train_step(training_data, epoch)
         self.results['training_loss'][configuration_id].update({epoch: training_loss})
         if epoch % validation_period == 0:
             validation_loss = self.trainer.do_evaluate_step(validation_data, epoch)
             self._save_best_model(configuration_id, epoch, validation_loss, validation_loss_max)
             self.results['validation_loss'][configuration_id].update({epoch: validation_loss})
     test_loss = self.trainer.do_evaluate_step(test_data)
     self.results['test_loss'][configuration_id].update({'final_epoch': test_loss})
     get_logger().info('Finished training:'.format(configuration_id))
def load_loss_function(
        configuration: dict,
        postgres_connector: PostgresConnector = None
) -> Module or LossWithPenalty:
    if configuration["loss_function"] in loss_functions:
        if 'penalty' in configuration["loss_function"].lower():
            penalty_dictionary = build_penalty_loss(postgres_connector)
            return LossWithPenalty(
                loss_function=loss_functions[configuration["loss_function"]](),
                penalty=penalty_dictionary,
                scaling_factor=configuration['scaling_factor'],
                penalty_decimals=configuration['penalty_decimals'],
                batch_size=configuration['batch_size']), True
        return loss_functions[configuration["loss_function"]](), False
    else:
        get_logger().info("The {} is not available".format(
            configuration["loss_function"]))
        raise Exception
 def do_evaluate_step(self,
                      evaluation_data: DataLoader,
                      epoch: int = None) -> float:
     with to.no_grad():
         evaluation_loss = []
         if len(evaluation_data):
             for data in evaluation_data:
                 node_features, all_neighbors, labels = self._send_to_device(
                     data)
                 outputs = self.model(node_features,
                                      all_neighbors,
                                      batch_size=labels.shape[0])
                 loss = self.loss_function(outputs, labels, node_features)
                 evaluation_loss.append(loss.item())
             evaluation_loss = np.average(evaluation_loss)
             if epoch is not None:
                 get_logger().info('[Iteration %d] validation loss: %.6f' %
                                   (epoch, evaluation_loss))
             else:
                 get_logger().info('Test loss: %.6f' % evaluation_loss)
         else:
             get_logger().warning('No evaluation data found!')
     return evaluation_loss
def start_inference(parameters_path: str) -> None:
    get_logger().info("Starting inference")
    init_environment_variables(parameters_path)
    use_case = UseCaseFactory()
    inference = use_case.build(use_case_name="inference")
    inference.start()
 def _instantiate_the_loss_function(self, configuration: dict) -> None:
     loss_function, penalty = load_loss_function(configuration,
                                                 self.postgres_connector)
     self.loss_function = LossFunctionWrapper(loss_function, penalty)
     get_logger().info('Loss function: {}'.format(
         configuration['loss_function']))
def load_model(model_selection: str) -> nn.Module:
    if model_selection in models:
        return models[model_selection]
    else:
        get_logger().info("The " + model_selection + " model is not available")
        raise Exception
 def start(self):
     try:
         self.use_case.start()
     except Exception:
         get_logger().exception("message")