def test_solve_callable(self, zero_vec, solve_mat, solve_vec): loopy.set_caching_enabled(False) k = loopy.make_kernel( ["{[i,j] : 0 <= i,j < 2}"], """ x[:] = solve(A[:,:], b[:]) """, [ loopy.GlobalArg('x', dtype=np.float64, shape=(2, )), loopy.GlobalArg('A', dtype=np.float64, shape=(2, 2)), loopy.GlobalArg( 'b', dtype=np.float64, shape=(2, ), ) ], target=loopy.CTarget(), name="callable_kernel2", lang_version=(2018, 2)) k = loopy.register_function_id_to_in_knl_callable_mapper( k, solve_fn_lookup) code = loopy.generate_code_v2(k).device_code() code.replace('void callable_kernel2', 'static void callable_kernel2') loopykernel = op2.Kernel(code, k.name, ldargs=["-llapack"]) args = [zero_vec(op2.READ), solve_mat(op2.READ), solve_vec(op2.WRITE)] op2.par_loop(loopykernel, solve_mat.dataset.set, *args) expected = np.linalg.solve(solve_mat.data, solve_vec.data) assert np.allclose(expected, zero_vec.data)
def setUp(self): lp.set_caching_enabled(False) if not self.is_setup: utils.setup_logging() # load equations self.dirpath = os.path.dirname(os.path.realpath(__file__)) gasname = os.path.join(self.dirpath, 'test.cti') # first check test config gasname = get_mechanism_file() # load the gas gas = ct.Solution(gasname) # the mechanism elems, specs, reacs = read_mech_ct(gasname) # and finally check for a test platform platform = get_platform_file() try: if platform is None: platform = '' raise OSError platform = build_and_validate('test_platform_schema.yaml', platform) except (OSError, IOError): logger = logging.getLogger(__name__) logger.warn('Test platform file {} was not found, reverting ' 'to default.'.format(platform)) platform = None self.store = storage(platform, gas, specs, reacs) self.is_setup = True
def test_inverse_callable(self, zero_mat, inv_mat): loopy.set_caching_enabled(False) k = loopy.make_kernel( ["{[i,j] : 0 <= i,j < 2}"], """ B[:,:] = inv(A[:,:]) """, [ loopy.GlobalArg('B', dtype=np.float64, shape=(2, 2)), loopy.GlobalArg('A', dtype=np.float64, shape=(2, 2)) ], target=loopy.CTarget(), name="callable_kernel", lang_version=(2018, 2)) k = loopy.register_function_id_to_in_knl_callable_mapper( k, inv_fn_lookup) code = loopy.generate_code_v2(k).device_code() code.replace('void callable_kernel', 'static void callable_kernel') loopykernel = op2.Kernel(code, k.name, ldargs=["-llapack"]) op2.par_loop(loopykernel, zero_mat.dataset.set, zero_mat(op2.WRITE), inv_mat(op2.READ)) expected = np.linalg.inv(inv_mat.data) assert np.allclose(expected, zero_mat.data)
def main(args=None): lp.set_caching_enabled(False) utils.setup_logging() if args is None: # command line arguments parser = ArgumentParser(description='Tests pyJac versus an' ' autodifferentiated jacobian\n') parser.add_argument('-w', '--working_directory', type=str, default='performance', help='Directory storing the mechanisms / data.' ) parser.add_argument('-t', '--test_matrix', type=str, help='The platforms / tests to run, as well as ' 'possible memory limits. For an example see' 'the pyjac/examples/test_matrix.yaml included with' 'pyJac' ) parser.add_argument('-r', '--runtype', choices=['jac', 'spec', 'both'], default='both', help='The type of validation test to run, Jacobian [jac]' ' or species rates [spec], or [both].') parser.add_argument('-p', '--prefix', type=str, default='', help='A prefix to store the output of this test in' 'for each mechanism in the working_directory.' 'This can be a helpful tool on a cluster to ' 'run multiple tests at once on different platforms') args = parser.parse_args() methods = [] if args.runtype == 'jac': methods = [jacobian_tester] elif args.runtype == 'spec': methods = [species_rate_tester] else: methods = [species_rate_tester, jacobian_tester] for m in methods: m(args.working_directory, args.test_matrix, args.prefix)
def setUp(self): lp.set_caching_enabled(False) if not self.is_setup: utils.setup_logging() # first check test config gasname = get_mechanism_file() # load the gas gas = ct.Solution(gasname) # the mechanism elems, specs, reacs = read_mech_ct(gasname) # get sort type sorting = get_rxn_sorting() if sorting != reaction_sorting.none: # get ordering ordering = sort_reactions(reacs, sorting, return_order=True) # and apply reacs = sort_reactions(reacs, sorting) ct_reacs = gas.reactions() # and apply to gas gas = ct.Solution(thermo='IdealGas', kinetics='GasKinetics', species=gas.species(), reactions=[ct_reacs[i] for i in ordering]) # and reassign utils.reassign_species_lists(reacs, specs) # and finally check for a test platform platform = get_platform_file() try: if platform is None: platform = '' raise OSError platform = build_and_validate('test_platform_schema.yaml', platform) except (OSError, IOError): logger = logging.getLogger(__name__) logger.warn('Test platform file {} was not found, reverting ' 'to default.'.format(platform)) platform = None self.store = storage(platform, gas, specs, reacs) self.is_setup = True
from pystella.stencil import Stencil, StreamingStencil from pystella.reduction import Reduction, FieldStatistics from pystella.histogram import Histogrammer, FieldHistogrammer from pystella.step import (RungeKutta4, RungeKutta3SSP, RungeKutta3Heun, RungeKutta3Nystrom, RungeKutta3Ralston, RungeKutta2Midpoint, RungeKutta2Ralston, LowStorageRK54, LowStorageRK3Williamson, LowStorageRK3Inhomogeneous, LowStorageRK3SSP) from pystella.derivs import FiniteDifferencer from pystella.decomp import DomainDecomposition from pystella.expansion import Expansion from pystella.fourier import (DFT, RayleighGenerator, Projector, PowerSpectra, SpectralCollocator, SpectralPoissonSolver) from loopy import set_caching_enabled set_caching_enabled(True) import logging logger = logging.getLogger(__name__) def choose_device_and_make_context(platform_choice=None, device_choice=None): """ A wrapper to choose a device and create a :class:`pyopencl.Context` on a particular device. :arg platform_number: An integer specifying which element of the :class:`list` returned by :func:`pyopencl.get_platforms` to choose. Defaults to *None*, in which case a NVIDIA platform. If one is not found, then the first platform is chosen.
""" Parallel simulation of 105 connectomes from a multiple schlerosis study. A lot of this is data munging for the moment. """ import os.path import numpy as np import loopy as lp from scipy import sparse lp.set_caching_enabled(False) from tvb_hpc import model, coupling, network, utils, compiler, scheme LOG = utils.getLogger('sep645') import glob # load data, cache as npz sep_npz_fname = 'data/sep.npz' if not os.path.exists(sep_npz_fname): LOG.info('reading from txt files..') W = np.array(utils.loadtxt_many('data/sep/*/*_N.txt')) L = np.array(utils.loadtxt_many('data/sep/*/*_dist.txt')) np.savez(sep_npz_fname, W=W, L=L) else: LOG.info('reading from npz') npz = np.load(sep_npz_fname) W = npz['W'] L = npz['L'] assert W.shape == L.shape
'GlobalDataSet', 'MixedDataSet', 'Halo', 'Dat', 'MixedDat', 'Mat', 'Global', 'Map', 'MixedMap', 'Sparsity', 'par_loop', 'ParLoop', 'DatView' ] def ParLoop(kernel, *args, **kwargs): if isinstance(kernel, types.FunctionType): return PyParLoop(kernel, *args, **kwargs) else: return SeqParLoop(kernel, *args, **kwargs) _initialised = False # turn off loopy caching because pyop2 kernels are cached already loopy.set_caching_enabled(False) def initialised(): """Check whether PyOP2 has been yet initialised but not yet finalised.""" return _initialised @collective def init(**kwargs): """Initialise PyOP2: select the backend and potentially other configuration options. :arg debug: The level of debugging output. :arg comm: The MPI communicator to use for parallel communication, defaults to `MPI_COMM_WORLD`
import loopy __all__ = ['configuration', 'READ', 'WRITE', 'RW', 'INC', 'MIN', 'MAX', 'ON_BOTTOM', 'ON_TOP', 'ON_INTERIOR_FACETS', 'ALL', 'debug', 'info', 'warning', 'error', 'critical', 'initialised', 'set_log_level', 'MPI', 'init', 'exit', 'Kernel', 'Set', 'ExtrudedSet', 'MixedSet', 'Subset', 'DataSet', 'GlobalDataSet', 'MixedDataSet', 'Halo', 'Dat', 'MixedDat', 'Mat', 'Global', 'Map', 'MixedMap', 'Sparsity', 'par_loop', 'DatView'] _initialised = False # turn off loopy caching because pyop2 kernels are cached already loopy.set_caching_enabled(False) def initialised(): """Check whether PyOP2 has been yet initialised but not yet finalised.""" return _initialised @collective def init(**kwargs): """Initialise PyOP2: select the backend and potentially other configuration options. :arg debug: The level of debugging output. :arg comm: The MPI communicator to use for parallel communication, defaults to `MPI_COMM_WORLD`