def test_calculate_radius(self): map_dim = MapDimension(self.MAP_ROWS, self.MAP_COLS) total_iter_cnt = torch.tensor(float(1000)) radius = Radius(map_dim, total_iter_cnt) iter_cnt = 100 r = [radius.radius(i) for i in range(iter_cnt)] self.assertEqual(len(r), iter_cnt)
def test_provide_neighbourhood_radius(self): map_dim = MapDimension(10, 15, 3) total_iter_cnt = torch.tensor(100, dtype=torch.float64) radius = SomContainer.get_neighbourhood_radius(map_dim, total_iter_cnt) self.assertIsInstance(radius, Radius)
def setUpClass(cls): cls.__map_dim = MapDimension(cls.MAP_DIM, cls.MAP_DIM, cls.FEATS_CNT) cls.__map_indices = MapIndicesBuilder().build(cls.__map_dim) cls.__bmu = Bmu(EuclideanDistance(), cls.__map_indices) cls.__total_iter_cnt = torch.tensor(100, dtype=torch.float64) cls.__learning_rate = LearningRate(0.01, cls.__total_iter_cnt) cls.__radius = Radius(cls.__map_dim, cls.__total_iter_cnt) cls.__weights_updater = WeightsUpdater(cls.__map_indices)
def test_get_som(self): map_dim = MapDimension(10, 10, 3) board_dir = os.path.join(os.path.dirname(__file__), "board") train_params = TrainParams(50, 1, 10, 10, board_dir) som = SomContainer.get_som(map_dim, .001, 100.0, (-1, 1), board_dir, train_params) self.assertIsInstance(som, Som)
def test_build_indices(self): map_dim = MapDimension(5, 10, 2) map_indices = MapIndicesBuilder() indices = map_indices.build(map_dim) self.assertIsInstance(indices, torch.Tensor) self.assertEqual(indices.shape, (map_dim.rows, map_dim.cols, map_dim.features)) for row_num in range(map_dim.rows): for col_num in range(map_dim.cols): expected_value = np.asarray([row_num, col_num]) np.testing.assert_array_equal( indices[row_num, col_num].numpy(), expected_value)
def setUpClass(cls): cls.__logger = cls.__setup_logger() cls.__corpus_file = cls.__setup_file(cls.FEATS_CNT, 0, cls.ROWS_CNT) cls.__map_dim = MapDimension(cls.MAP_SIZE, cls.MAP_SIZE, cls.FEATS_CNT) cls.__map_indices = MapIndicesBuilder().build(cls.__map_dim) cls.__bmu = Bmu(EuclideanDistance(), cls.__map_indices) cls.__dataset = CsvDatasetFactory([cls.__corpus_file]) cls.__total_iter_cnt = torch.tensor(cls.ROWS_CNT / cls.BATCH_SIZE, dtype=torch.float64) cls.__learning_rate = LearningRate(cls.LEARNING_RATE, cls.__total_iter_cnt) cls.__summary_writer = SomSummaryWriter(cls.EVAL_ROOT_DIR) cls.__radius = Radius(cls.__map_dim, cls.__total_iter_cnt) cls.__weights_updater = WeightsUpdater(cls.__map_indices)
def setUpClass(cls): np.set_printoptions(threshold=np.inf) cls.__corpus_files = cls.__setup_corpus_files() cls.__logger = cls.__setup_logger() cls.__map_dim = MapDimension(cls.MAP_SIZE, cls.MAP_SIZE, 3) cls.__map_indices = MapIndicesBuilder().build(cls.__map_dim) cls.__bmu = Bmu(EuclideanDistance(), cls.__map_indices) cls.__dataset = CsvDatasetFactory(cls.__corpus_files, False) cls.__total_iter_cnt = torch.tensor(cls.SAMPLES_IN_FILE * len(cls.__corpus_files) / cls.BATCH_SIZE, dtype=torch.float64) cls.__learning_rate = LearningRate(cls.LEARNING_RATE, cls.__total_iter_cnt) cls.__radius = Radius(cls.__map_dim, cls.__total_iter_cnt) cls.__weights_updater = WeightsUpdater(cls.__map_indices) cls.__weights_plot = WeightsPlot() cls.__summary_writer = SomSummaryWriter(cls.EVAL_ROOT_DIR) cls.__weights_sver = WeightsSaver(3)
def test_get_som_model(self): dist = EuclideanDistance() map_indices = torch.tensor(np.asarray((10, 10, 2))) bmu = Bmu(dist, map_indices) total_iter_cnt = torch.tensor(100, dtype=torch.float64) learning_rate = LearningRate(.001, total_iter_cnt) map_dim = MapDimension(10, 10, 3) radius = Radius(map_dim, total_iter_cnt) weights_updater = WeightsUpdater(map_indices) w_range = (-1.0, 1.0) som_model = SomContainer.get_som_model(bmu, learning_rate, map_dim, map_indices, radius, weights_updater, w_range) self.assertIsInstance(som_model, SomModel)
def setUpClass(cls): cls.__w_tensor = torch.tensor(cls.W) cls.__v_tensor = torch.tensor(cls.V) w_shape = cls.W.shape cls.__bmu = Bmu(EuclideanDistance(), MapDimension(w_shape[0], w_shape[1]))