def cyimport(import_path): """ Import a Cython module if available, otherwise return None (and skip any relevant tests). """ if HAVE_CYTHON: import pyximport py_importer, pyx_importer = pyximport.install() mod = __import__(import_path, fromlist=[True]) pyximport.uninstall(py_importer, pyx_importer) return mod
"""Utilities for reading real time clocks and keeping soft real time constraints.""" import os import time import platform import subprocess import multiprocessing from cffi import FFI # Build and load cython module import pyximport installer = pyximport.install(inplace=True, build_dir='/tmp') from common.clock import monotonic_time, sec_since_boot # pylint: disable=no-name-in-module, import-error pyximport.uninstall(*installer) assert monotonic_time assert sec_since_boot ffi = FFI() ffi.cdef("long syscall(long number, ...);") libc = ffi.dlopen(None) def set_realtime_priority(level): if os.getuid() != 0: print("not setting priority, not root") return if platform.machine() == "x86_64": NR_gettid = 186 elif platform.machine() == "aarch64": NR_gettid = 178 else: raise NotImplementedError
def unimport(): pyximport.uninstall(*importers) sys.modules.pop('zmq.tests.cython_ext', None)
# enabling cython globally. # # 2. build_dir: Also this variable is set in install(). We use the # default value, that is a folder in the home-dir. # # 3. load_dynamic will load the module but not extend the globald # directory. We rely on the fact that the loading has been already # performed and call the import * explicitely # # Since the .so is compiled outside of the PYTHON_PATH, there is # no ambiguity when importing the parser: the only way to load the # cython version is by the so_path that targets .pyxbld . # import imp pyx = pyximport.install() pyximport.uninstall(*pyx) build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') path = os.path.join(os.path.dirname(__file__), "parser.py") name="pysmt.smtlib.parser.parser" so_path = pyximport.build_module(name, path, pyxbuild_dir=build_dir) mod = imp.load_dynamic(name, so_path) assert mod.__file__ == so_path, (mod.__file__, so_path) # print(so_path) from pysmt.smtlib.parser.parser import * # End of preamble #
def _get_Delaunay(ndim, periodic=False, parallel=False, bit64=False, overwrite=False, comm=None): r"""Dynamically import module for nD Delaunay triangulation and return the associated class. Args: ndim (int): Dimensionality that module should have. periodic (bool, optional): If True, the periodic triangulation class is returned. Defaults to False. parallel (bool, optional): If True, the parallel triangulation class is returned. Defaults to False. bit64 (bool, optional): If True, the 64bit triangulation class is returned. Defaults to False. overwrite (bool, optional): If True, generated extension files are re-generated. Defaults to False. comm (`mpi4py.Comm`, optional): MPI communicator. If provided, import of the requested module will be attempted first on the root process, then imported on all processes. This prevents pyximport compilation being called multiple times and causing a race condition. If not provided, import is done on all processes. Returns: class: Delaunay triangulation class. """ rank = 0 if comm is not None: rank = comm.Get_rank() modname = _delaunay_filename('module', ndim, periodic=periodic, parallel=parallel, bit64=bit64) clsname = _delaunay_filename('pyclass', ndim, periodic=periodic, parallel=parallel, bit64=bit64) # Barrier for non-root processes if comm is not None: if rank > 0: comm.Barrier() # Create extension gen = _make_ext(ndim, periodic=periodic, parallel=parallel, bit64=bit64, overwrite=overwrite) # If generated extension, install pyximport if gen: importers = pyximport.install( setup_args={"include_dirs": np.get_include()}, reload_support=True) # Stop obnoxious -Wstrict-prototypes warning with c++ cfg_vars = distutils.sysconfig.get_config_vars() for key, value in cfg_vars.items(): if type(value) == str: cfg_vars[key] = value.replace("-Wstrict-prototypes", "") # Import out = getattr(importlib.import_module(modname), clsname) # If generated extension, uninstall pyximport if gen: pyximport.uninstall(*importers) # Import on all processes once root is successful if comm is not None: if rank == 0: comm.Barrier() # out = getattr(importlib.import_module(modname),clsname) return out
# enabling cython globally. # # 2. build_dir: Also this variable is set in install(). We use the # default value, that is a folder in the home-dir. # # 3. load_dynamic will load the module but not extend the globald # directory. We rely on the fact that the loading has been already # performed and call the import * explicitely # # Since the .so is compiled outside of the PYTHON_PATH, there is # no ambiguity when importing the parser: the only way to load the # cython version is by the so_path that targets .pyxbld . # import imp pyx = pyximport.install() pyximport.uninstall(*pyx) build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') path = os.path.join(os.path.dirname(__file__), "parser.py") name = "pysmt.smtlib.parser.parser" so_path = pyximport.build_module(name, path, pyxbuild_dir=build_dir) mod = imp.load_dynamic(name, so_path) assert mod.__file__ == so_path, (mod.__file__, so_path) # print(so_path) from pysmt.smtlib.parser.parser import * # End of preamble # # #
# Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library. import numpy as np from nose.tools import * from landmarks_datasets import BioId from PythonWrapper.tree_based_regression import ForestRegressorTraining as CppForestRegressorTraining from PythonWrapper.tree_based_regression import AlignmentMethod, AlignmentMethodTraining import pyximport install = pyximport.install() from tree_based_regression_training import CyForestRegressorTraining pyximport.uninstall(*install) path = "models/alignment/" class TestRandomForestTraining: @classmethod def setup_class(cls): dataset = BioId() cls.images = dataset.loadImages() cls.ground_truth = dataset.loadGroundTruth() def setup(self): self.random_forest_training = CppForestRegressorTraining(5, 5) self.reference = CyForestRegressorTraining(5, 5) self.training_images = self.images[:500]
def setup_pyx_import(): import pyximport px = pyximport.install() yield pyximport.uninstall(*px)
from random import random import pyximport importers = pyximport.install() from common_fast import rand_int, rand_double, rand_bool pyximport.uninstall(*importers) WORDS = ["foo ", "bar ", "baz ", "qux ", "quux ", "corge ", "grault ", "garply ", "waldo ", "fred ", "plugh ", "xyzzy ", "thud "] def from_bytes_helper(klass): def helper(text): obj = klass() obj.ParseFromString(text) return obj return helper def pass_by_object(reuse, iters, benchmark): for _ in range(iters): request = benchmark.Request() expected = benchmark.setup(request) response = benchmark.Response() benchmark.handle(request, response) if not benchmark.check(response, expected): raise ValueError('Expected {}'.format(expected)) def pass_by_bytes(reuse, iters, benchmark): for _ in range(iters): request = benchmark.Request() expected = benchmark.setup(request)
__author__ = "Wiadufa Chen <*****@*****.**>" __version__ = "2.5" import pyximport a, b = pyximport.install() from mydupfilekiller.core import * from mydupfilekiller.console import * from mydupfilekiller.gui import * from mydupfilekiller.exceptions import * pyximport.uninstall(a, b)