示例#1
0
import numpy as np
import os
import sys
import tempfile

_SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
sys.path.append(_SCRIPT_PATH)

from tools import mlir_pytaco_api as pt
from tools import testing_utils as utils

# Define the CSR format.
csr = pt.format([pt.dense, pt.compressed], [0, 1])

# Read matrices A and B from file, infer size of output matrix C.
A = pt.read(os.path.join(_SCRIPT_PATH, "data/A.mtx"), csr)
B = pt.read(os.path.join(_SCRIPT_PATH, "data/B.mtx"), csr)
C = pt.tensor([A.shape[0], B.shape[1]], csr)

# Define the kernel.
i, j, k = pt.get_index_vars(3)
C[i, j] = A[i, k] * B[k, j]

# Force evaluation of the kernel by writing out C.
with tempfile.TemporaryDirectory() as test_dir:
    golden_file = os.path.join(_SCRIPT_PATH, "data/gold_C.tns")
    out_file = os.path.join(test_dir, "C.tns")
    pt.write(out_file, C)
    #
    # CHECK: Compare result True
    #
示例#2
0
# See http://tensor-compiler.org/docs/scientific_computing/index.html.

compressed = pt.compressed
dense = pt.dense

# Define formats for storing the sparse matrix and dense vectors.
csr = pt.format([dense, compressed])
dv = pt.format([dense])

# Load a sparse matrix stored in the matrix market format) and store it
# as a CSR matrix.  The matrix in this test is a reduced version of the data
# downloaded from here:
# https://www.cise.ufl.edu/research/sparse/MM/Boeing/pwtk.tar.gz
# In order to run the program using the matrix above, you can download the
# matrix and replace this path to the actual path to the file.
A = pt.read(os.path.join(_SCRIPT_PATH, "data/pwtk.mtx"), csr)

# These two lines have been modified from the original program to use static
# data to support result comparison.
x = pt.from_array(np.full((A.shape[1], ), 1, dtype=np.float64))
z = pt.from_array(np.full((A.shape[0], ), 2, dtype=np.float64))

# Declare the result to be a dense vector
y = pt.tensor([A.shape[0]], dv)

# Declare index vars
i, j = pt.get_index_vars(2)

# Define the SpMV computation
y[i] = A[i, j] * x[j] + z[i]
示例#3
0
###### This PyTACO part is taken from the TACO open-source project. ######
# See http://tensor-compiler.org/docs/data_analytics/index.html.

compressed = pt.compressed
dense = pt.dense

# Define formats for storing the sparse tensor and dense matrices.
csf = pt.format([compressed, compressed, compressed])
rm = pt.format([dense, dense])

# Load a sparse three-dimensional tensor from file (stored in the FROSTT
# format) and store it as a compressed sparse fiber tensor. We use a small
# tensor for the purpose of testing. To run the program using the data from
# the real application, please download the data from:
# http://frostt.io/tensors/nell-2/
B = pt.read(os.path.join(_SCRIPT_PATH, "data/nell-2.tns"), csf)

# These two lines have been modified from the original program to use static
# data to support result comparison.
C = pt.from_array(np.full((B.shape[1], 25), 1, dtype=np.float64))
D = pt.from_array(np.full((B.shape[2], 25), 2, dtype=np.float64))

# Declare the result to be a dense matrix.
A = pt.tensor([B.shape[0], 25], rm)

# Declare index vars.
i, j, k, l = pt.get_index_vars(4)

# Define the MTTKRP computation.
A[i, j] = B[i, k, l] * D[l, j] * C[k, j]