def test_spmv(): W.set(64) H.set(64) nnz.set(640) print('Sparse Matrix-Vector Multiplication %dx%d (%d non-zero elements)' % (W.get(), H.get(), nnz.get())) A_row = dace.ndarray([H + 1], dtype=dace.uint32) A_col = dace.ndarray([nnz], dtype=dace.uint32) A_val = dace.ndarray([nnz], dtype=dace.float32) x = dace.ndarray([W], dace.float32) b = dace.ndarray([H], dace.float32) # Assuming uniform sparsity distribution across rows nnz_per_row = nnz.get() // H.get() nnz_last_row = nnz_per_row + (nnz.get() % H.get()) if nnz_last_row > W.get(): print('Too many nonzeros per row') exit(1) # RANDOMIZE SPARSE MATRIX A_row[0] = dace.uint32(0) A_row[1:H.get()] = dace.uint32(nnz_per_row) A_row[-1] = dace.uint32(nnz_last_row) A_row = np.cumsum(A_row, dtype=np.uint32) # Fill column data for i in range(H.get() - 1): A_col[nnz_per_row*i:nnz_per_row*(i+1)] = \ np.sort(np.random.choice(W.get(), nnz_per_row, replace=False)) # Fill column data for last row A_col[nnz_per_row * (H.get() - 1):] = np.sort( np.random.choice(W.get(), nnz_last_row, replace=False)) A_val[:] = np.random.rand(nnz.get()).astype(dace.float32.type) ######################### x[:] = np.random.rand(W.get()).astype(dace.float32.type) b[:] = dace.float32(0) # Setup regression A_sparse = scipy.sparse.csr_matrix((A_val, A_col, A_row), shape=(H.get(), W.get())) sdfg = spmv.to_sdfg() vectorize(sdfg, 'j') sdfg(A_row=A_row, A_col=A_col, A_val=A_val, x=x, b=b, H=H, W=W, nnz=nnz) if dace.Config.get_bool('profiling'): dace.timethis('spmv', 'scipy', 0, A_sparse.dot, x) diff = np.linalg.norm(A_sparse.dot(x) - b) / float(H.get()) print("Difference:", diff) print("==== Program end ====") assert diff <= 1e-5
def test_mandelbrot_fpga(): mandelbrot = import_sample(Path("simple") / "mandelbrot.py") h, w, max_iterations = 64, 64, 1000 out = dace.ndarray([h, w], dtype=dace.uint16) out[:] = dace.uint32(0) sdfg = mandelbrot.mandelbrot.to_sdfg() sdfg.apply_transformations(FPGATransformSDFG) sdfg(output=out, MAXITER=max_iterations, W=w, H=h) return sdfg
parser = argparse.ArgumentParser() parser.add_argument("W", type=int, nargs="?", default=64) parser.add_argument("H", type=int, nargs="?", default=64) parser.add_argument("MAXITER", type=int, nargs="?", default=1000) args = vars(parser.parse_args()) W.set(args["W"]) H.set(args["H"]) MAXITER.set(args["MAXITER"]) print('Mandelbrot %dx%d (iterations=%d)' % (W.get(), H.get(), MAXITER.get())) out = dace.ndarray([H, W], dtype=dace.uint16) out[:] = dace.uint32(0) # Run DaCe program mandelbrot(out, MAXITER) print('Result:') printmatrix(out) # Uncomment to output a PNG file #import png #with open('dacebrot.png', 'wb') as fp: # w = png.Writer(W.get(), H.get(), greyscale=True, bitdepth=8) # mn = np.min(out) # mx = np.max(out) # w.write(fp, 255.0 * (out - mn) / (mx - mn))
A_row = dace.ndarray([H + 1], dtype=dace.uint32) A_col = dace.ndarray([nnz], dtype=dace.uint32) A_val = dace.ndarray([nnz], dtype=dace.float32) x = dace.ndarray([W], dace.float32) b = dace.ndarray([H], dace.float32) # Assuming uniform sparsity distribution across rows nnz_per_row = nnz.get() // H.get() nnz_last_row = nnz_per_row + (nnz.get() % H.get()) if nnz_last_row > W.get(): print('Too many nonzeros per row') exit(1) # RANDOMIZE SPARSE MATRIX A_row[0] = dace.uint32(0) A_row[1:H.get()] = dace.uint32(nnz_per_row) A_row[-1] = dace.uint32(nnz_last_row) A_row = np.cumsum(A_row, dtype=np.uint32) # Fill column data for i in range(H.get() - 1): A_col[nnz_per_row*i:nnz_per_row*(i+1)] = \ np.sort(np.random.choice(W.get(), nnz_per_row, replace=False)) # Fill column data for last row A_col[nnz_per_row * (H.get() - 1):] = np.sort( np.random.choice(W.get(), nnz_last_row, replace=False)) A_val[:] = np.random.rand(nnz.get()).astype(dace.float32.type) #########################
histogram = make_sdfg(True) histogram.specialize(dict(H=H, W=W, num_bins=num_bins)) else: histogram = make_sdfg(False) histogram.specialize(dict(num_bins=num_bins)) H.set(args["H"]) W.set(args["W"]) print("Histogram {}x{} ({}specialized)".format( H.get(), W.get(), "" if args["specialize"] else "not ")) A = dace.ndarray([H, W], dtype=dtype) hist = dace.ndarray([num_bins], dtype=dace.uint32) A[:] = np.random.rand(H.get(), W.get()).astype(dace.float32.type) hist[:] = dace.uint32(0) if args["specialize"]: histogram(A=A, hist=hist) else: histogram(A=A, H=H, W=W, hist=hist) if dace.Config.get_bool('profiling'): dace.timethis('histogram', 'numpy', (H.get() * W.get()), np.histogram, A, num_bins) diff = np.linalg.norm( np.histogram(A, bins=num_bins.get(), range=(0.0, 1.0))[0][1:-1] - hist[1:-1]) print("Difference:", diff)
# Setup inputs A_row = np.empty([args.H + 1], dtype=np.uint32) A_col = np.empty([args.nnz], dtype=np.uint32) A_val = np.random.rand(args.nnz).astype(np.float32) x = np.random.rand(args.W).astype(np.float32) # Assuming uniform sparsity distribution across rows nnz_per_row = args.nnz // args.H nnz_last_row = nnz_per_row + (args.nnz % args.H) if nnz_last_row > args.W: print('Too many nonzeros per row') exit(1) # Randomize sparse matrix structure A_row[0] = dace.uint32(0) A_row[1:args.H] = dace.uint32(nnz_per_row) A_row[-1] = dace.uint32(nnz_last_row) A_row = np.cumsum(A_row, dtype=np.uint32) # Fill column data for i in range(args.H - 1): A_col[nnz_per_row*i:nnz_per_row*(i+1)] = \ np.sort(np.random.choice(args.W, nnz_per_row, replace=False)) # Fill column data for last row A_col[nnz_per_row * (args.H - 1):] = np.sort( np.random.choice(args.W, nnz_last_row, replace=False)) ######################### # Run program