def setUp(self) -> None: """Set up the generic testing method.""" # fix seeds for reproducibility set_random_seed(seed=42) kwargs = self.kwargs or {} kwargs = self._pre_instantiation_hook(kwargs=dict(kwargs)) self.instance = self.cls(**kwargs) self.post_instantiation_hook()
def _pre_instantiation_hook( self, kwargs: MutableMapping[str, Any] ) -> MutableMapping[str, Any]: # noqa: D102 kwargs = super()._pre_instantiation_hook(kwargs=kwargs) self.generator = set_random_seed(seed=self.seed)[1] kwargs["triples_factory"] = self.triples_factory = Nations().training return kwargs
def test_tensor_sum(self): """Test tensor_sum.""" _, generator, _ = set_random_seed(seed=42) for shapes in _generate_shapes(generator=generator): tensors = [torch.rand(*shape) for shape in shapes] result = tensor_sum(*tensors) # compare result to sequential addition assert torch.allclose(result, sum(tensors))
def test_tensor_product(self): """Test tensor_product.""" _, generator, _ = set_random_seed(seed=42) for shapes in _generate_shapes(generator=generator): tensors = [torch.rand(*shape) for shape in shapes] result = tensor_product(*tensors) # compare result to sequential addition assert torch.allclose(result, functools.reduce(operator.mul, tensors[1:], tensors[0]))
def setUp(self) -> None: """Set up the test case with a triples factory and model.""" _, self.generator, _ = set_random_seed(42) dataset = Nations(create_inverse_triples=self.create_inverse_triples) self.factory = dataset.training self.model = self.model_cls(self.factory, embedding_dim=self.embedding_dim, **(self.model_kwargs or {})).to_device_()
def test_broadcast_cat(self): """Test broadcast_cat.""" generator = set_random_seed(seed=42)[1] for shapes in _generate_shapes(generator=generator): tensors = [torch.rand(*shape) for shape in shapes] for dim in range(len(tensors[0].shape)): result = broadcast_cat(tensors, dim=dim) # check result shape assert result.shape == tuple( sum(dims) if i == dim else max(dims) for i, dims in enumerate(zip(*shapes)))
def test_get_optimal_sequence(self): """Test ``get_optimal_sequence()``.""" _, generator, _ = set_random_seed(seed=42) for shapes in _generate_shapes(generator=generator): # get optimal sequence opt_cost, opt_seq = get_optimal_sequence(*shapes) # check correct cost exp_opt_cost = estimate_cost_of_sequence(*(shapes[i] for i in opt_seq)) assert exp_opt_cost == opt_cost # check optimality for perm in itertools.permutations(list(range(len(shapes)))): cost = estimate_cost_of_sequence(*(shapes[i] for i in perm)) assert cost >= opt_cost
def test_estimate_cost_of_add_sequence(self): """Test ``estimate_cost_of_add_sequence()``.""" _, generator, _ = set_random_seed(seed=42) # create random array, estimate the costs of addition, and measure some execution times. # then, compute correlation between the estimated cost, and the measured time. data = [] for shapes in _generate_shapes(generator=generator): arrays = [torch.empty(*shape) for shape in shapes] cost = estimate_cost_of_sequence(*(a.shape for a in arrays)) n_samples, time = timeit.Timer(stmt="sum(arrays)", globals=dict(arrays=arrays)).autorange() consumption = time / n_samples data.append((cost, consumption)) a = numpy.asarray(data) # check for strong correlation between estimated costs and measured execution time assert (numpy.corrcoef(x=a[:, 0], y=a[:, 1])[0, 1]) > 0.8
def test_get_optimal_sequence_caching(self): """Test caching of ``get_optimal_sequence()``.""" _, generator, _ = set_random_seed(seed=42) for shapes in _generate_shapes(iterations=10, generator=generator): # get optimal sequence first_time = timeit.default_timer() get_optimal_sequence(*shapes) first_time = timeit.default_timer() - first_time # check caching samples, second_time = timeit.Timer(stmt="get_optimal_sequence(*shapes)", globals=dict( get_optimal_sequence=get_optimal_sequence, shapes=shapes, )).autorange() second_time /= samples assert second_time < first_time
def pre_setup_hook(self) -> None: """Instantiate a generator for usage in the test case.""" self.generator = set_random_seed(seed=42)[1]