def compile(self, code):
     # Note, we do not add carray or particle_array as nnps_base would
     # have been rebuilt anyway if they changed.
     depends = ["pysph.base.nnps_base"]
     self._ext_mod = ExtModule(code, verbose=True, depends=depends)
     self._module = self._ext_mod.load()
     return self._module
Exemplo n.º 2
0
def _check_compile(root):
    with mock.patch('shutil.copy') as m:
        s = ExtModule("print('hello')", root=root)
        s.build()
    if m.called:
        # If it was called, do the copy to mimic the action.
        shutil.copy(*m.call_args[0])
    return m.call_count
Exemplo n.º 3
0
    def test_rebuild_when_dependencies_change(self):
        # Given.
        data = self.data
        depends = ["test_rebuild"]
        s = ExtModule(data, root=self.root, depends=depends)
        fname = self._create_dummy_module()
        f_stat = os.stat(fname)

        with self._add_root_to_sys_path():
            # When
            self.assertTrue(s.should_recompile())
            s.build()

            # Then.
            self.assertFalse(s.should_recompile())

            # Now lets re-create the module and try again.

            # When.
            fname = self._create_dummy_module()
            # Update the timestamp to make it newer, otherwise we need to
            # sleep.
            os.utime(fname, (f_stat.st_atime, f_stat.st_mtime + 10))

            # Then.
            self.assertTrue(s.should_recompile())
Exemplo n.º 4
0
    def compile(self):
        """Compile the generated code to an extension module and
        setup the objects that need this by calling their setup_compiled_module.
        """
        if self.ext_mod is not None:
            return
        code = self._get_code()
        # Note, we do not add carray or particle_array as nnps_base would have
        # been rebuilt anyway if they changed.
        depends = ["pysph.base.nnps_base"]
        self.ext_mod = ExtModule(code, verbose=True, depends=depends)
        mod = self.ext_mod.load()
        self.module = mod

        self.acceleration_eval_helper.setup_compiled_module(mod)
        cython_a_eval = self.acceleration_eval.c_acceleration_eval
        if self.integrator is not None:
            self.integrator_helper.setup_compiled_module(mod, cython_a_eval)
Exemplo n.º 5
0
 def test_default_root(self):
     try:
         data = self.data
         s = ExtModule(data)
         self.assertTrue(exists(join(s.root, 'build')))
         self.assertEqual(s.hash, get_md5(data))
         self.assertEqual(s.code, data)
         self.assertTrue(exists(s.src_path))
         self.assertEqual(data, open(s.src_path).read())
     finally:
         os.unlink(s.src_path)
Exemplo n.º 6
0
def _check_write_source(root):
    """Used to create an ExtModule and test if a file was opened.

    It returns the number of times "open" was called.
    """
    m = mock.mock_open()
    with mock.patch('pysph.base.ext_module.open', m, create=True):
        s = ExtModule("print('hello')", root=root)
    if m.called:
        with open(*m.call_args[0]) as fp:
            fp.write("junk")
    return m.call_count
Exemplo n.º 7
0
    def test_constructor(self):
        data = self.data
        s = ExtModule(data, root=self.root)
        self.assertTrue(exists(join(self.root, 'build')))

        self.assertEqual(s.hash, get_md5(data))
        self.assertEqual(s.code, data)
        expect_name = 'm_%s' % (s.hash)
        self.assertEqual(s.name, expect_name)
        self.assertEqual(s.src_path, join(self.root, expect_name + '.pyx'))
        self.assertEqual(s.ext_path,
                         join(self.root, expect_name + get_config_var('SO')))

        self.assertTrue(exists(s.src_path))
        self.assertEqual(data, open(s.src_path).read())
Exemplo n.º 8
0
def _check_write_source(root):
    """Used to create an ExtModule and test if a file was opened.

    It returns the number of times "open" was called.
    """
    m = mock.mock_open()
    orig_side_effect = m.side_effect

    def _side_effect(*args, **kw):
        with open(*args, **kw) as fp:
            fp.write("junk")
        return orig_side_effect(*args, **kw)

    m.side_effect = _side_effect

    with mock.patch('pysph.base.ext_module.open', m, create=True):
        ExtModule("print('hello')", root=root)
    return m.call_count
Exemplo n.º 9
0
class SPHCompiler(object):
    def __init__(self, acceleration_eval, integrator):
        self.acceleration_eval = acceleration_eval
        self.acceleration_eval_helper = AccelerationEvalCythonHelper(
            self.acceleration_eval)
        self.integrator = integrator
        self.integrator_helper = IntegratorCythonHelper(
            integrator, self.acceleration_eval_helper)
        self.ext_mod = None
        self.module = None

    #### Public interface. ####################################################
    def compile(self):
        """Compile the generated code to an extension module and
        setup the objects that need this by calling their setup_compiled_module.
        """
        if self.ext_mod is not None:
            return
        code = self._get_code()
        # Note, we do not add carray or particle_array as nnps_base would have
        # been rebuilt anyway if they changed.
        depends = ["pysph.base.nnps_base"]
        self.ext_mod = ExtModule(code, verbose=True, depends=depends)
        mod = self.ext_mod.load()
        self.module = mod

        self.acceleration_eval_helper.setup_compiled_module(mod)
        cython_a_eval = self.acceleration_eval.c_acceleration_eval
        if self.integrator is not None:
            self.integrator_helper.setup_compiled_module(mod, cython_a_eval)

    #### Private interface. ####################################################
    def _get_code(self):
        main = self.acceleration_eval_helper.get_code()
        integrator_code = self.integrator_helper.get_code()
        return main + integrator_code
class AccelerationEvalCythonHelper(object):
    def __init__(self, acceleration_eval):
        self.object = acceleration_eval
        self.config = get_config()
        self.all_array_names = get_all_array_names(self.object.particle_arrays)
        self.known_types = get_known_types_for_arrays(self.all_array_names)
        self._ext_mod = None
        self._module = None

    ##########################################################################
    # Public interface.
    ##########################################################################
    def get_code(self):
        path = join(dirname(__file__), 'acceleration_eval_cython.mako')
        template = Template(filename=path)
        main = template.render(helper=self)
        return main

    def setup_compiled_module(self, module):
        # Create the compiled module.
        object = self.object
        acceleration_eval = module.AccelerationEval(object.kernel,
                                                    object.all_group.equations,
                                                    object.particle_arrays)
        object.set_compiled_object(acceleration_eval)

    def compile(self, code):
        # Note, we do not add carray or particle_array as nnps_base would
        # have been rebuilt anyway if they changed.
        depends = ["pysph.base.nnps_base"]
        self._ext_mod = ExtModule(code, verbose=True, depends=depends)
        self._module = self._ext_mod.load()
        return self._module

    ##########################################################################
    # Mako interface.
    ##########################################################################
    def get_array_decl_for_wrapper(self):
        array_names = self.all_array_names
        decl = []
        for a_type in sorted(array_names.keys()):
            props = array_names[a_type]
            decl.append('cdef public {a_type} {attrs}'.format(
                a_type=a_type, attrs=', '.join(sorted(props))))
        return '\n'.join(decl)

    def get_header(self):
        object = self.object
        headers = []
        headers.extend(get_code(object.kernel))

        # get headers from the Equations
        for equation in object.all_group.equations:
            headers.extend(get_code(equation))

        # Kernel wrappers.
        cg = CythonGenerator(known_types=self.known_types)
        cg.parse(object.kernel)
        headers.append(cg.get_code())

        # Equation wrappers.
        self.known_types['KERNEL'] = KnownType(
            object.kernel.__class__.__name__)
        headers.append(object.all_group.get_equation_wrappers(
            self.known_types))

        return '\n'.join(headers)

    def get_equation_defs(self):
        return self.object.all_group.get_equation_defs()

    def get_equation_init(self):
        return self.object.all_group.get_equation_init()

    def get_kernel_defs(self):
        return 'cdef public %s kernel' % (
            self.object.kernel.__class__.__name__)

    def get_kernel_init(self):
        object = self.object
        return 'self.kernel = %s(**kernel.__dict__)' % (
            object.kernel.__class__.__name__)

    def get_variable_declarations(self):
        group = self.object.all_group
        ctx = group.context
        return group.get_variable_declarations(ctx)

    def get_array_declarations(self):
        group = self.object.all_group
        src, dest = group.get_array_names()
        src.update(dest)
        return group.get_array_declarations(src, self.known_types)

    def get_dest_array_setup(self, dest_name, eqs_with_no_source, sources,
                             real):
        src, dest_arrays = eqs_with_no_source.get_array_names()
        for g in sources.values():
            s, d = g.get_array_names()
            dest_arrays.update(d)
        lines = ['NP_DEST = self.%s.size(real=%s)' % (dest_name, real)]
        lines += ['%s = dst.%s.data' % (n, n[2:]) for n in sorted(dest_arrays)]
        return '\n'.join(lines)

    def get_src_array_setup(self, src_name, eq_group):
        src_arrays, dest = eq_group.get_array_names()
        lines = ['NP_SRC = self.%s.size()' % src_name]
        lines += ['%s = src.%s.data' % (n, n[2:]) for n in sorted(src_arrays)]
        return '\n'.join(lines)

    def get_parallel_block(self):
        if self.config.use_openmp:
            return "with nogil, parallel():"
        else:
            return "if True: # Placeholder used for OpenMP."

    def get_parallel_range(self, start, stop=None, step=1):
        if stop is None:
            stop = start
            start = 0

        args = "{start},{stop},{step}"
        if self.config.use_openmp:
            schedule = self.config.omp_schedule[0]
            chunksize = self.config.omp_schedule[1]

            if schedule is not None:
                args = args + ", schedule='{schedule}'"

            if chunksize is not None:
                args = args + ", chunksize={chunksize}"

            args = args.format(start=start,
                               stop=stop,
                               step=step,
                               schedule=schedule,
                               chunksize=chunksize)
            return "prange({})".format(args)

        else:
            args = args.format(start=start, stop=stop, step=step)
            return "range({})".format(args)

    def get_particle_array_names(self):
        parrays = [pa.name for pa in self.object.particle_arrays]
        return ', '.join(parrays)
Exemplo n.º 11
0
 def test_load_module(self):
     data = self.data
     s = ExtModule(data, root=self.root)
     mod = s.load()
     self.assertEqual(mod.f(), "hello world")
     self.assertTrue(exists(s.ext_path))
Exemplo n.º 12
0
 def test_load_module(self):
     data = self.data
     s = ExtModule(data, root=self.root)
     mod = s.load()
     self.assertEqual(mod.f(), "hello world")
     self.assertTrue(exists(s.ext_path))
Exemplo n.º 13
0
        ll_neighbor_times_cell.append( time() - t1 )

    data = dict(
        bs_update=bs_update_times,
        ll_update=ll_update_times,
        bs_nb=bs_neighbor_times,
        bs_cell_nb=bs_neighbor_times_cell,
        ll_nb=ll_neighbor_times,
        ll_cell_nb=ll_neighbor_times_cell
    )
    results = pd.DataFrame(data=data, index=np)
    return results
"""

_ext_mod = ExtModule(code)
mod = _ext_mod.load()
bench_nnps = mod.bench_nnps


def bench_random_distribution():
    arrays = []
    for numPoints in _numPoints:
        dx = numpy.power( 1./numPoints, 1.0/3.0 )
        xa = random.random(numPoints)
        ya = random.random(numPoints)
        za = random.random(numPoints)
        ha = numpy.ones_like(xa) * 2*dx
        gida = numpy.arange(numPoints).astype(numpy.uint32)

        # create the particle array