def test_forward_single_input(self, get_circuit, output_dim, n_qubits): """Test if the forward() method accepts a single input (i.e., not with an extra batch dimension) and returns a tensor of the right shape""" c, w = get_circuit layer = TorchLayer(c, w) x = torch.Tensor(np.ones(n_qubits)) layer_out = layer.forward(x) assert layer_out.shape == torch.Size((output_dim,))
def test_forward(self, get_circuit, output_dim, n_qubits): """Test if the forward() method accepts a batched input and returns a tensor of the right shape""" c, w = get_circuit layer = TorchLayer(c, w) x = torch.Tensor(np.ones((2, n_qubits))) layer_out = layer.forward(x) assert layer_out.shape == torch.Size((2, output_dim))
def test_forward_broadcasting(self, get_circuit, output_dim, middle_dim, batch_size, n_qubits): """Test if the forward() method accepts a batched input with multiple dimensions and returns a tensor of the right shape by broadcasting. Also tests if gradients are still backpropagated correctly.""" c, w = get_circuit layer = TorchLayer(c, w) x = torch.Tensor(np.ones((batch_size, middle_dim, n_qubits))) weights = layer.qnode_weights.values() layer_out = layer.forward(x) layer_out.backward(torch.ones_like(layer_out)) g_layer = [w.grad for w in weights] assert g_layer.count(None) == 0 assert layer_out.shape == torch.Size((batch_size, middle_dim, output_dim))