def create_grid_search(dataset_name: str,
                       data_directory: str,
                       model_directory: str,
                       results_directory: str,
                       model: str,
                       device: str,
                       epochs: str,
                       loss_function_selection: str,
                       optimizer_selection: str,
                       batch_size: str,
                       validation_split: str,
                       test_split: str,
                       time_steps: str,
                       validation_period: str) -> MessagePassingNN:
    grid_search_dictionary = GridSearchParametersParser().get_grid_search_dictionary(model,
                                                                                     epochs,
                                                                                     loss_function_selection,
                                                                                     optimizer_selection,
                                                                                     batch_size,
                                                                                     validation_split,
                                                                                     test_split,
                                                                                     time_steps,
                                                                                     validation_period)
    data_path = _get_data_path(data_directory, dataset_name)
    data_preprocessor = DataPreprocessor()
    trainer = Trainer(data_preprocessor, device)
    saver = Saver(model_directory, results_directory)
    grid_search = GridSearch(data_path,
                             data_preprocessor,
                             trainer,
                             grid_search_dictionary,
                             saver)
    return MessagePassingNN(grid_search)
    def test_start(self):
        # Given
        tests_model_directory = os.path.join(
            "tests", "test_data", "model-checkpoints-test",
            "configuration&id__model&RNN__epochs&10__loss_function&MSE__optimizer"
            "&Adagrad__batch_size&100__validation_split&0.2__test_split"
            "&0.1__time_steps&1__validation_period&5",
            "Epoch_5_model_state_dictionary.pth")
        tests_results_directory = os.path.join('tests', 'results_inference')
        device = "cpu"
        data_preprocessor = DataPreprocessor()
        loader = Loader("RNN")
        inferencer = Inferencer(data_preprocessor, device)
        saver = Saver(tests_model_directory, tests_results_directory)
        self.postgres_connector = PostgresConnector()
        self._insert_test_data(dataset_size=1)
        dataset = GraphDataset(self.postgres_connector)
        inference = Inference(dataset, data_preprocessor, loader, inferencer,
                              saver)

        # When
        inference.start()

        # Then
        filename_expected = datetime.now().strftime(
            "%d-%b-%YT%H_%M") + "_distance_maps.pickle"
        self.assertTrue(
            os.path.isfile(
                os.path.join(tests_results_directory, filename_expected)))

        # Tear down
        self._truncate_table()
 def _build_grid_search(self, grid_search_parameters) -> GridSearch:
     data_preprocessor = DataPreprocessor()
     postgres_connector = PostgresConnector()
     dataset = GraphDataset(postgres_connector)
     model_trainer = Trainer(data_preprocessor, self.device, postgres_connector)
     saver = Saver(self.model_directory, self.results_directory)
     grid_search_configurations = self._get_all_grid_search_configurations(grid_search_parameters)
     return GridSearch(dataset, data_preprocessor, model_trainer, grid_search_configurations, saver)
 def _build_inference(self) -> Inference:
     self.model = os.environ['MODEL']
     data_preprocessor = DataPreprocessor()
     model_loader = Loader(self.model)
     model_inferencer = Inferencer(data_preprocessor, self.device)
     postgres_connector = PostgresConnector()
     dataset = GraphDataset(postgres_connector)
     saver = Saver(self.model_directory, self.results_directory)
     return Inference(dataset, data_preprocessor, model_loader, model_inferencer, saver)
def create_inference(dataset_name: str,
                     data_directory: str,
                     model_directory: str,
                     results_directory: str,
                     model: str,
                     device: str) -> MessagePassingNN:
    data_path = data_directory + dataset_name + "/"
    data_preprocessor = DataPreprocessor()
    model_loader = Loader(model)
    model_inferencer = Inferencer(data_preprocessor, device)
    saver = Saver(model_directory, results_directory)
    inference = Inference(data_path, data_preprocessor, model_loader, model_inferencer, saver)
    return MessagePassingNN(inference)
Пример #6
0
 def setUp(self) -> None:
     self.features = BASE_GRAPH_NODE_FEATURES
     self.adjacency_matrix = BASE_GRAPH
     self.labels = BASE_GRAPH.view(-1)
     self.dataset = 'training-test-data'
     self.tests_data_directory = 'tests/test_data/'
     tests_model_directory = 'tests/model_checkpoints'
     tests_results_directory = 'tests/grid_search_results'
     device = "cpu"
     self.data_path = self.tests_data_directory + self.dataset + "/"
     self.repository = FileSystemRepository(self.tests_data_directory,
                                            self.dataset)
     self.data_preprocessor = DataPreprocessor()
     self.data_preprocessor.enable_test_mode()
     self.model_trainer = Trainer(self.data_preprocessor, device)
     self.saver = Saver(tests_model_directory, tests_results_directory)
Пример #7
0
    def test_start(self):
        # Given
        dataset_size = 1
        features = to.ones(4, 2)
        adjacency_matrix = to.ones(4, 4)
        labels = to.ones(16)
        dataset = 'inference-test-data'
        tests_data_directory = 'tests/test_data/'
        tests_model_directory = "tests/test_data/model-checkpoints-test/configuration&id__model&" + \
                                "RNN__epochs&10__loss_function&MSE__optimizer&Adagrad__batch_size&" + \
                                "100__validation_split&0.2__test_split&0.1__time_steps&1__validation_period&" + \
                                "5/Epoch_5_model_state_dictionary.pth"
        tests_results_directory = 'tests/results_inference'
        device = "cpu"
        repository = FileSystemRepository(tests_data_directory, dataset)
        data_path = tests_data_directory + dataset + "/"
        data_preprocessor = DataPreprocessor()
        data_preprocessor.enable_test_mode()
        loader = Loader("RNN")
        inferencer = Inferencer(data_preprocessor, device)
        saver = Saver(tests_model_directory, tests_results_directory)
        inference = Inference(data_path,
                              data_preprocessor,
                              loader,
                              inferencer,
                              saver,
                              test_mode=True)

        adjacency_matrix_filenames, features_filenames, labels_filenames = self._save_test_data(
            adjacency_matrix, dataset_size, features, labels, repository)

        # When
        inference.start()

        # Then
        filename_expected = datetime.now().strftime(
            "%d-%b-%YT%H_%M") + "_distance_maps.pickle"
        self.assertTrue(
            os.path.isfile(tests_results_directory + "/" + filename_expected))

        # Tear down
        self._remove_files(dataset_size, features_filenames,
                           adjacency_matrix_filenames, labels_filenames,
                           tests_data_directory, dataset,
                           tests_results_directory)
Пример #8
0
 def setUp(self) -> None:
     self.features = BASE_GRAPH_NODE_FEATURES
     self.adjacency_matrix = BASE_GRAPH
     self.labels = BASE_GRAPH.view(-1)
     self.dataset_name = 'training-test-data'
     self.tests_data_directory = os.path.join('tests', 'test_data')
     tests_model_directory = os.path.join('tests', 'model_checkpoints')
     tests_results_directory = os.path.join('tests', 'grid_search_results')
     device = "cpu"
     self.data_path = os.path.join("./", self.tests_data_directory,
                                   self.dataset_name)
     self.repository = FileSystemRepository(self.tests_data_directory,
                                            self.dataset_name)
     self.data_preprocessor = DataPreprocessor()
     self.postgres_connector = PostgresConnector()
     self.model_trainer = Trainer(self.data_preprocessor, device,
                                  self.postgres_connector)
     self.saver = Saver(tests_model_directory, tests_results_directory)