def test_arrayexpr_convert_indexed_to_array_and_back_to_matrix(): expr = a.T * b elem = expr[0, 0] cg = convert_indexed_to_array(elem) assert cg == ArrayElement( ArrayContraction(ArrayTensorProduct(a, b), (0, 2)), [0, 0]) expr = M[i, j] + N[i, j] p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix(p1) == M + N expr = M[i, j] + N[j, i] p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix(p1) == M + N.T expr = M[i, j] * N[k, l] + N[i, j] * M[k, l] p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix(p1) == ArrayAdd(ArrayTensorProduct(M, N), ArrayTensorProduct(N, M)) expr = (M * N * P)[i, j] p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix(p1) == M * N * P expr = Sum(M[i, j] * (N * P)[j, m], (j, 0, k - 1)) p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix(p1) == M * N * P expr = Sum((P[j, m] + P[m, j]) * (M[i, j] * N[m, n] + N[i, j] * M[m, n]), (j, 0, k - 1), (m, 0, k - 1)) p1, p2 = _convert_indexed_to_array(expr) assert convert_array_to_matrix( p1) == M * P * N + M * P.T * N + N * P * M + N * P.T * M
def test_arrayexpr_convert_array_element_to_array_expression(): A = ArraySymbol("A", (k, )) B = ArraySymbol("B", (k, )) s = Sum(A[i] * B[i], (i, 0, k - 1)) cg = convert_indexed_to_array(s) assert cg == ArrayContraction(ArrayTensorProduct(A, B), (0, 1)) s = A[i] * B[i] cg = convert_indexed_to_array(s) assert cg == ArrayDiagonal(ArrayTensorProduct(A, B), (0, 1)) s = A[i] * B[j] cg = convert_indexed_to_array(s, [i, j]) assert cg == ArrayTensorProduct(A, B) cg = convert_indexed_to_array(s, [j, i]) assert cg == ArrayTensorProduct(B, A)
def test_arrayexpr_convert_indexed_to_array_broadcast(): A = ArraySymbol("A", (3, 3)) B = ArraySymbol("B", (3, 3)) expr = A[i, j] + B[k, l] O2 = OneArray(3, 3) expected = ArrayAdd(ArrayTensorProduct(A, O2), ArrayTensorProduct(O2, B)) assert convert_indexed_to_array(expr) == expected assert convert_indexed_to_array(expr, [i, j, k, l]) == expected assert convert_indexed_to_array(expr, [l, k, i, j]) == ArrayAdd( PermuteDims(ArrayTensorProduct(O2, A), [1, 0, 2, 3]), PermuteDims(ArrayTensorProduct(B, O2), [1, 0, 2, 3])) expr = A[i, j] + B[j, k] O1 = OneArray(3) assert convert_indexed_to_array(expr, [i, j, k]) == ArrayAdd( ArrayTensorProduct(A, O1), ArrayTensorProduct(O1, B)) C = ArraySymbol("C", (d0, d1)) D = ArraySymbol("D", (d3, d1)) expr = C[i, j] + D[k, j] assert convert_indexed_to_array(expr, [i, j, k]) == ArrayAdd( ArrayTensorProduct(C, OneArray(d3)), PermuteDims(ArrayTensorProduct(OneArray(d0), D), [0, 2, 1])) X = ArraySymbol("X", (5, 3)) expr = X[i, n] - X[j, n] assert convert_indexed_to_array(expr, [i, j, n]) == ArrayAdd( ArrayTensorProduct(-1, OneArray(5), X), PermuteDims(ArrayTensorProduct(X, OneArray(5)), [0, 2, 1])) raises(ValueError, lambda: convert_indexed_to_array(C[i, j] + D[i, j]))
def from_index_summation(expr, first_index=None, last_index=None, dimensions=None): r""" Parse expression of matrices with explicitly summed indices into a matrix expression without indices, if possible. This transformation expressed in mathematical notation: `\sum_{j=0}^{N-1} A_{i,j} B_{j,k} \Longrightarrow \mathbf{A}\cdot \mathbf{B}` Optional parameter ``first_index``: specify which free index to use as the index starting the expression. Examples ======== >>> from sympy import MatrixSymbol, MatrixExpr, Sum >>> from sympy.abc import i, j, k, l, N >>> A = MatrixSymbol("A", N, N) >>> B = MatrixSymbol("B", N, N) >>> expr = Sum(A[i, j]*B[j, k], (j, 0, N-1)) >>> MatrixExpr.from_index_summation(expr) A*B Transposition is detected: >>> expr = Sum(A[j, i]*B[j, k], (j, 0, N-1)) >>> MatrixExpr.from_index_summation(expr) A.T*B Detect the trace: >>> expr = Sum(A[i, i], (i, 0, N-1)) >>> MatrixExpr.from_index_summation(expr) Trace(A) More complicated expressions: >>> expr = Sum(A[i, j]*B[k, j]*A[l, k], (j, 0, N-1), (k, 0, N-1)) >>> MatrixExpr.from_index_summation(expr) A*B.T*A.T """ from sympy.tensor.array.expressions.conv_indexed_to_array import convert_indexed_to_array from sympy.tensor.array.expressions.conv_array_to_matrix import convert_array_to_matrix first_indices = [] if first_index is not None: first_indices.append(first_index) if last_index is not None: first_indices.append(last_index) arr = convert_indexed_to_array(expr, first_indices=first_indices) return convert_array_to_matrix(arr)
def test_arrayexpr_convert_indexed_to_array_expression(): s = Sum(A[i] * B[i], (i, 0, 3)) cg = convert_indexed_to_array(s) assert cg == ArrayContraction(ArrayTensorProduct(A, B), (0, 1)) expr = M * N result = ArrayContraction(ArrayTensorProduct(M, N), (1, 2)) elem = expr[i, j] assert convert_indexed_to_array(elem) == result expr = M * N * M elem = expr[i, j] result = _array_contraction(_array_tensor_product(M, M, N), (1, 4), (2, 5)) cg = convert_indexed_to_array(elem) assert cg == result cg = convert_indexed_to_array((M * N * P)[i, j]) assert cg == _array_contraction(ArrayTensorProduct(M, N, P), (1, 2), (3, 4)) cg = convert_indexed_to_array((M * N.T * P)[i, j]) assert cg == _array_contraction(ArrayTensorProduct(M, N, P), (1, 3), (2, 4)) expr = -2 * M * N elem = expr[i, j] cg = convert_indexed_to_array(elem) assert cg == ArrayContraction(ArrayTensorProduct(-2, M, N), (1, 2))
def test_arrayexpr_convert_indexed_to_array_out_of_bounds(): expr = Sum(M[i, i], (i, 0, 4)) raises(ValueError, lambda: convert_indexed_to_array(expr)) expr = Sum(M[i, i], (i, 0, k)) raises(ValueError, lambda: convert_indexed_to_array(expr)) expr = Sum(M[i, i], (i, 1, k - 1)) raises(ValueError, lambda: convert_indexed_to_array(expr)) expr = Sum(M[i, j] * N[j, m], (j, 0, 4)) raises(ValueError, lambda: convert_indexed_to_array(expr)) expr = Sum(M[i, j] * N[j, m], (j, 0, k)) raises(ValueError, lambda: convert_indexed_to_array(expr)) expr = Sum(M[i, j] * N[j, m], (j, 1, k - 1)) raises(ValueError, lambda: convert_indexed_to_array(expr))
def _arrayify(self, indexed): from sympy.tensor.array.expressions.conv_indexed_to_array import convert_indexed_to_array try: return convert_indexed_to_array(indexed) except Exception: return indexed