def test_kron(backend, dtype, pivots):
    """ Checks that Tensor.kron() works.
  """
    pivotA, pivotB = pivots
    shapeA = (2, 3, 4)
    shapeB = (1, 2)
    shapeC = (2, 3, 4, 1, 2)
    A, _ = testing_utils.safe_randn(shapeA, backend, dtype)
    if A is not None:
        B, _ = testing_utils.safe_randn(shapeB, backend, dtype)
        if pivotA is None and pivotB is None:
            C = tensornetwork.kron(A, B)
            matrixA = tensornetwork.pivot(A, pivot_axis=-1)
            matrixB = tensornetwork.pivot(B, pivot_axis=-1)
        elif pivotA is None:
            C = tensornetwork.kron(A, B, pivot_axisB=pivotB)
            matrixA = tensornetwork.pivot(A, pivot_axis=-1)
            matrixB = tensornetwork.pivot(B, pivot_axis=pivotB)
        elif pivotB is None:
            C = tensornetwork.kron(A, B, pivot_axisA=pivotA)
            matrixA = tensornetwork.pivot(A, pivot_axis=pivotA)
            matrixB = tensornetwork.pivot(B, pivot_axis=-1)
        else:
            C = tensornetwork.kron(A,
                                   B,
                                   pivot_axisA=pivotA,
                                   pivot_axisB=pivotB)
            matrixA = tensornetwork.pivot(A, pivot_axis=pivotA)
            matrixB = tensornetwork.pivot(B, pivot_axis=pivotB)

        Ctest = C.backend.einsum("ij,kl->ikjl", matrixA.array, matrixB.array)
        Ctest = C.backend.reshape(Ctest, shapeC)
        np.testing.assert_allclose(C.array, Ctest)
Example #2
0
def test_tensor_matmul(backend, dtype):
  """ Checks that Tensor@Tensor works.
  """
  shape = (3, 3)
  A, initA = testing_utils.safe_randn(shape, backend, dtype)
  B, initB = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None and B is not None:
    testA = A.backend.convert_to_tensor(initA)
    testB = B.backend.convert_to_tensor(initB)
    result = A @ B
    result2 = A.backend.matmul(testA, testB)
    np.testing.assert_allclose(result.array, result2)
Example #3
0
def test_tensor_subtraction(backend, dtype):
  """ Checks that Tensor-Tensor works.
  """
  shape = (2, 3, 1)
  A, initA = testing_utils.safe_randn(shape, backend, dtype)
  B, initB = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None:
    testA = A.backend.convert_to_tensor(initA)
    testB = B.backend.convert_to_tensor(initB)
    result = A - B
    result2 = A.backend.subtraction(testA, testB)
    np.testing.assert_allclose(result.array, result2)
Example #4
0
def test_ncon_builder(backend):
  a, _ = testing_utils.safe_randn((2, 2, 2), backend, np.float32)
  b, _ = testing_utils.safe_randn((2, 2, 2), backend, np.float32)
  c, _ = testing_utils.safe_randn((2, 2, 2), backend, np.float32)
  tmp = a(2, 1, -1)
  assert tmp.tensors[0] is a
  assert tmp.axes[0] == [2, 1, -1]
  builder = a(2, 1, -1) @ b(2, 3, -2) @ c(1, 3, -3)
  assert builder.tensors == [a, b, c]
  assert builder.axes == [[2, 1, -1], [2, 3, -2], [1, 3, -3]]
  np.testing.assert_allclose(
      ncon_interface.ncon(
          [a, b, c], 
          [[2, 1, -1], [2, 3, -2], [1, 3, -3]], 
          backend=backend).array,
      ncon_interface.finalize(builder).array)
Example #5
0
def test_tensor_ops_raise(dtype):
  """ Checks that tensor operators raise the right error.
  """
  shape = (2, 3, 1)
  A, _ = testing_utils.safe_randn(shape, "numpy", dtype)
  B, _ = testing_utils.safe_randn(shape, "jax", dtype)
  with pytest.raises(ValueError):
    _ = A * B
  with pytest.raises(ValueError):
    _ = A + B
  with pytest.raises(ValueError):
    _ = A - B
  with pytest.raises(ValueError):
    _ = A / B
  with pytest.raises(ValueError):
    _ = A @ B
Example #6
0
def test_tensor_flatten(backend, dtype):
  """ Checks that Tensor.flatten() works.
  """
  shape = (2, 3, 1)
  A, init = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None:
    np.testing.assert_allclose(A.flatten().array, init.flatten())
Example #7
0
def test_tensor_conjugate(backend, dtype):
  """ Checks that Tensor.conjugate() works.
  """
  shape = (2, 3, 1)
  A, init = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None:
    np.testing.assert_allclose(A.conjugate().array, A.backend.conj(init))
Example #8
0
def test_trace(backend, dtype):
    """ Checks that Tensor.trace() works.
  """
    shape = (2, 3, 3)
    A, _ = testing_utils.safe_randn(shape, backend, dtype)
    if A is not None:
        np.testing.assert_allclose(
            tensornetwork.trace(A).array, A.backend.trace(A.array))
Example #9
0
def test_tensor_reshape(backend, dtype):
  """ Checks that Tensor.copy() works.
  """
  shape = (2, 3, 1)
  newshape = (6, 1)
  A, init = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None:
    np.testing.assert_allclose(A.reshape(newshape).array,
                               init.reshape(newshape))
Example #10
0
def test_tensor_hconj(backend, dtype):
  """ Checks that Tensor.hconj() works.
  """
  shape = (2, 3, 1)
  permutation = (1, 2, 0)
  A, init = testing_utils.safe_randn(shape, backend, dtype)
  if A is not None:
    test = A.backend.convert_to_tensor(init)
    test = A.backend.transpose(A.backend.conj(test), perm=permutation)
    np.testing.assert_allclose(A.hconj(perm=permutation).array, test)
Example #11
0
def test_tensor_scalar_rsubtraction(backend, dtype):
  """ Checks that scalar-Tensor works.
  """
  shape = (2, 3, 1)
  A, initA = testing_utils.safe_randn(shape, backend, dtype)
  B = 2.
  if A is not None:
    testA = A.backend.convert_to_tensor(initA)
    result = B - A
    result2 = A.backend.subtraction(B, testA)
    np.testing.assert_allclose(result.array, result2)
Example #12
0
def test_tensor_scalar_addition(backend, dtype):
  """ Checks that Tensor+scalar works.
  """
  shape = (2, 3, 1)
  A, initA = testing_utils.safe_randn(shape, backend, dtype)
  B = 2.
  if A is not None:
    testA = A.backend.convert_to_tensor(initA)
    result = A + B
    result2 = A.backend.addition(testA, B)
    np.testing.assert_allclose(result.array, result2)
Example #13
0
def test_tensor_divide(backend, dtype):
  """ Checks that Tensor/Tensor works.
  """
  shape = (2, 3, 1)
  A, initA = testing_utils.safe_randn(shape, backend, dtype)
  B, _ = testing_utils.safe_zeros(shape, backend, dtype)
  if A is not None:
    B = B + 1
    testA = A.backend.convert_to_tensor(initA)
    result = A / B
    result2 = A.backend.divide(testA, B.array)
    np.testing.assert_allclose(result.array, result2)
Example #14
0
def test_pivot(backend, dtype, pivotA):
    """ Checks that Tensor.pivot() works.
  """
    shapeA = (2, 3, 4, 2)
    A, _ = testing_utils.safe_randn(shapeA, backend, dtype)
    if A is not None:
        if pivotA is None:
            matrixA = tensornetwork.pivot(A)
            tA = A.backend.pivot(A.array, pivot_axis=-1)
        else:
            matrixA = tensornetwork.pivot(A, pivot_axis=pivotA)
            tA = A.backend.pivot(A.array, pivot_axis=pivotA)
        np.testing.assert_allclose(matrixA.array, tA)
Example #15
0
def test_transpose_vs_backend(backend, dtype):
    """
  Tests that transpose yields the same result as the backend equivalent.
  """
    shape = (3, 2, 4)
    permutation = (1, 2, 0)
    tensor, array = testing_utils.safe_randn(shape, backend, dtype)

    if tensor is not None:
        backend_obj = backends.backend_factory.get_backend(backend)
        test = backend_obj.convert_to_tensor(array)
        test = backend_obj.transpose(test, perm=permutation)
        tensor_test = tensornetwork.transpose(tensor, perm=permutation)
        np.testing.assert_allclose(test, tensor_test.array)