def cython(self, line, cell): """Compile and import everything from a Cython code cell. The contents of the cell are written to a `.pyx` file in the directory `IPYTHONDIR/cython` using a filename with the hash of the code. This file is then cythonized and compiled. The resulting module is imported and all of its symbols are injected into the user's namespace. The usage is similar to that of `%%cython_pyximport` but you don't have to pass a module name:: %%cython def f(x): return 2.0*x """ args = parse_argstring(self.cython, line) code = cell if cell.endswith('\n') else cell + '\n' lib_dir = os.path.join(self.shell.ipython_dir, 'cython') cython_include_dirs = ['.'] force = args.force quiet = True ctx = Context(cython_include_dirs, default_options) key = code, sys.version_info, sys.executable, Cython.__version__ module_name = "_cython_magic_" + hashlib.md5( str(key).encode('utf-8')).hexdigest() module_path = os.path.join(lib_dir, module_name + self.so_ext) if not os.path.exists(lib_dir): os.makedirs(lib_dir) if force or not os.path.isfile(module_path): c_include_dirs = args.include if 'numpy' in code: import numpy c_include_dirs.append(numpy.get_include()) pyx_file = os.path.join(lib_dir, module_name + '.pyx') pyx_file = py3compat.cast_bytes_py2( pyx_file, encoding=sys.getfilesystemencoding()) with io.open(pyx_file, 'w', encoding='utf-8') as f: f.write(code) extension = Extension( name=module_name, sources=[pyx_file], include_dirs=c_include_dirs, extra_compile_args=args.compile_args, libraries=args.lib, ) build_extension = self._get_build_extension() try: build_extension.extensions = cythonize([extension], ctx=ctx, quiet=quiet) except CompileError: return build_extension.build_temp = os.path.dirname(pyx_file) build_extension.build_lib = lib_dir build_extension.run() self._code_cache[key] = module_name module = imp.load_dynamic(module_name, module_path) self._import_all(module)
def create_dependency_tree(ctx=None, quiet=False): global _dep_tree if _dep_tree is None: if ctx is None: ctx = Context(["."], CompilationOptions(default_options)) _dep_tree = DependencyTree(ctx, quiet=quiet) return _dep_tree
def unbound_symbols(code, context=None): code = to_unicode(code) if context is None: context = Context([], default_options) from Cython.Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform tree = parse_from_strings('(tree fragment)', code) for phase in context.create_pipeline(pxd=False): if phase is None: continue tree = phase(tree) if isinstance(phase, AnalyseDeclarationsTransform): break symbol_collector = AllSymbols() symbol_collector(tree) unbound = [] import __builtin__ for name in symbol_collector.names: if not tree.scope.lookup(name) and not hasattr(__builtin__, name): unbound.append(name) return unbound
def unbound_symbols(code, context=None): code = to_unicode(code) if context is None: context = Context([], default_options) from Cython.Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform tree = parse_from_strings('(tree fragment)', code) for phase in context.create_pipeline(pxd=False): if phase is None: continue tree = phase(tree) if isinstance(phase, AnalyseDeclarationsTransform): break symbol_collector = AllSymbols() symbol_collector(tree) unbound = [] try: import builtins except ImportError: import __builtin__ as builtins for name in symbol_collector.names: if not tree.scope.lookup(name) and not hasattr(builtins, name): unbound.append(name) return unbound
def unbound_symbols(code, context=None): if context is None: context = Context([], default_options) from Cython.Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform tree = parse_from_strings('(tree fragment)', code) for phase in Pipeline.create_pipeline(context, 'pyx'): if phase is None: continue tree = phase(tree) if isinstance(phase, AnalyseDeclarationsTransform): break try: import builtins except ImportError: import __builtin__ as builtins return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
def check_cythonized_extensions(self): options = self.distribution.ext_modules.options if 'include_path' not in options: options['include_path'] = ['.'] from Cython.Build.Dependency import create_extension_list from Cython.Compiler.Main import Context from Cython.Compiler.Options import CompilationOptions c_options = CompilationOptions(**options) ctx = Context.from_options(c_options) options = c_options module_list, _ = create_extension_list( self.distribution.ext_modules, ctx=ctx, **options, )
try: from Cython.Build.Dependencies import cythonize_one except ImportError: print( "You need cython. https://cython.org/, pip install cython --user") sys.exit(1) from Cython.Build.Dependencies import create_extension_list from Cython.Build.Dependencies import create_dependency_tree try: from Cython.Compiler.Main import Context from Cython.Compiler.Options import CompilationOptions, default_options c_options = CompilationOptions(default_options) ctx = Context.from_options(c_options) except ImportError: from Cython.Compiler.Main import Context, CompilationOptions, default_options c_options = CompilationOptions(default_options) ctx = c_options.create_context() import glob pyx_files = glob.glob(os.path.join('src_c', 'cython', 'pygame', '*.pyx')) + \ glob.glob(os.path.join('src_c', 'cython', 'pygame', '**', '*.pyx')) pyx_files, pyx_meta = create_extension_list(pyx_files, ctx=ctx) deps = create_dependency_tree(ctx) queue = []
def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_cache_dir(), 'inline'), cython_include_dirs=['.'], force=False, quiet=False, locals=None, globals=None, **kwds): if get_type is None: get_type = lambda x: 'object' code = to_unicode(code) orig_code = code code, literals = strip_string_literals(code) code = strip_common_indent(code) ctx = Context(cython_include_dirs, default_options) if locals is None: locals = inspect.currentframe().f_back.f_back.f_locals if globals is None: globals = inspect.currentframe().f_back.f_back.f_globals try: for symbol in unbound_symbols(code): if symbol in kwds: continue elif symbol in locals: kwds[symbol] = locals[symbol] elif symbol in globals: kwds[symbol] = globals[symbol] else: print("Couldn't find ", symbol) except AssertionError: if not quiet: # Parsing from strings not fully supported (e.g. cimports). print("Could not parse code as a string (to extract unbound symbols).") cimports = [] for name, arg in kwds.items(): if arg is cython_module: cimports.append('\ncimport cython as %s' % name) del kwds[name] arg_names = kwds.keys() arg_names.sort() arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names]) key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__ module_name = "_cython_inline_" + hashlib.md5(str(key).encode('utf-8')).hexdigest() so_ext = [ ext for ext,_,mod_type in imp.get_suffixes() if mod_type == imp.C_EXTENSION ][0] module_path = os.path.join(lib_dir, module_name+so_ext) if not os.path.exists(lib_dir): os.makedirs(lib_dir) if force or not os.path.isfile(module_path): cflags = [] c_include_dirs = [] qualified = re.compile(r'([.\w]+)[.]') for type, _ in arg_sigs: m = qualified.match(type) if m: cimports.append('\ncimport %s' % m.groups()[0]) # one special case if m.groups()[0] == 'numpy': import numpy c_include_dirs.append(numpy.get_include()) # cflags.append('-Wno-unused') module_body, func_body = extract_func_code(code) params = ', '.join(['%s %s' % a for a in arg_sigs]) module_code = """ %(module_body)s %(cimports)s def __invoke(%(params)s): %(func_body)s """ % {'cimports': '\n'.join(cimports), 'module_body': module_body, 'params': params, 'func_body': func_body } for key, value in literals.items(): module_code = module_code.replace(key, value) pyx_file = os.path.join(lib_dir, module_name + '.pyx') fh = open(pyx_file, 'w') try: fh.write(module_code) finally: fh.close() extension = Extension( name = module_name, sources = [pyx_file], include_dirs = c_include_dirs, extra_compile_args = cflags) dist = Distribution() # Ensure the build respects distutils configuration by parsing # the configuration files config_files = dist.find_config_files() dist.parse_config_files(config_files) build_extension = build_ext(dist) build_extension.finalize_options() build_extension.extensions = cythonize([extension], ctx=ctx, quiet=quiet) build_extension.build_temp = os.path.dirname(pyx_file) build_extension.build_lib = lib_dir build_extension.run() _code_cache[key] = module_name module = imp.load_dynamic(module_name, module_path) arg_list = [kwds[arg] for arg in arg_names] return module.__invoke(*arg_list)
def create_context(cython_include_dirs): from Cython.Compiler.Main import Context, default_options return Context(list(cython_include_dirs), default_options)
def agglomerate( affs, thresholds, gt=None, fragments=None, aff_threshold_low=0.0001, aff_threshold_high=0.9999, return_merge_history=False, scoring_function='OneMinus<MeanAffinity<RegionGraphType, ScoreValue>>', discretize_queue=0, force_rebuild=False): ''' Compute segmentations from an affinity graph for several thresholds. Passed volumes need to be converted into contiguous memory arrays. This will be done for you if needed, but you can save memory by making sure your volumes are already C_CONTIGUOUS. Parameters ---------- affs: numpy array, float32, 4 dimensional The affinities as an array with affs[channel][z][y][x]. thresholds: list of float32 The thresholds to compute segmentations for. For each threshold, one segmentation is returned. gt: numpy array, uint32, 3 dimensional (optional) An optional ground-truth segmentation as an array with gt[z][y][x]. If given, metrics fragments: numpy array, uint64, 3 dimensional (optional) An optional volume of fragments to use, instead of the build-in zwatershed. aff_threshold_low: float, default 0.0001 aff_threshold_high: float, default 0.9999, Thresholds on the affinities for the initial segmentation step. return_merge_history: bool If set to True, the returning tuple will contain a merge history, relative to the previous segmentation. scoring_function: string, default 'OneMinus<MeanAffinity<RegionGraphType, ScoreValue>>' A C++ type string specifying the edge scoring function to use. See https://github.com/funkey/waterz/blob/master/waterz/backend/MergeFunctions.hpp for available functions, and https://github.com/funkey/waterz/blob/master/waterz/backend/Operators.hpp for operators to combine them. discretize_queue: int If set to non-zero, a bin queue with that many bins will be used to approximate the priority queue for merge operations. force_rebuild: Force the rebuild of the module. Only needed for development. Returns ------- Results are returned as tuples from a generator object, and only computed on-the-fly when iterated over. This way, you can ask for hundreds of thresholds while at any point only one segmentation is stored in memory. Depending on the given parameters, the returned values are a subset of the following items (in that order): segmentation The current segmentation (numpy array, uint64, 3 dimensional). metrics (only if ground truth was provided) A dictionary with the keys 'V_Rand_split', 'V_Rand_merge', 'V_Info_split', and 'V_Info_merge'. merge_history (only if return_merge_history is True) A list of dictionaries with keys 'a', 'b', 'c', and 'score', indicating that region a got merged with b into c with the given score. Examples -------- affs = ... gt = ... # only segmentation for segmentation in agglomerate(affs, range(100,10000,100)): # ... # segmentation with merge history for segmentation, merge_history in agglomerate(affs, range(100,10000,100), return_merge_history = True): # ... # segmentation with merge history and metrics compared to gt for segmentation, metrics, merge_history in agglomerate(affs, range(100,10000,100), gt, return_merge_history = True): # ... ''' import sys, os import shutil import glob import numpy import fcntl try: import hashlib except ImportError: import md5 as hashlib from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext from distutils.sysconfig import get_config_vars, get_python_inc import Cython from Cython.Compiler.Main import Context, default_options from Cython.Build.Dependencies import cythonize # compile frontend.pyx for given scoring function source_dir = os.path.dirname(os.path.abspath(__file__)) source_files = [ os.path.join(source_dir, 'frontend.pyx'), os.path.join(source_dir, 'c_frontend.h'), os.path.join(source_dir, 'c_frontend.cpp'), os.path.join(source_dir, 'evaluate.hpp') ] source_files += glob.glob(source_dir + '/backend/*.hpp') source_files.sort() source_files_hashes = [ hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest() for f in source_files ] key = scoring_function, discretize_queue, source_files_hashes, sys.version_info, sys.executable, Cython.__version__ module_name = 'waterz_' + hashlib.md5(str(key).encode('utf-8')).hexdigest() lib_dir = os.path.expanduser('~/.cython/inline') # since this could be called concurrently, there is no good way to check # whether the directory already exists try: os.makedirs(lib_dir) except: pass # make sure the same module is not build concurrently with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file: fcntl.lockf(lock_file, fcntl.LOCK_EX) try: if lib_dir not in sys.path: sys.path.append(lib_dir) if force_rebuild: raise ImportError else: __import__(module_name) print("Re-using already compiled waterz version") except ImportError: print("Compiling waterz in " + str(lib_dir)) cython_include_dirs = ['.'] ctx = Context(cython_include_dirs, default_options) include_dir = os.path.join(lib_dir, module_name) if not os.path.exists(include_dir): os.makedirs(include_dir) include_dirs = [ source_dir, include_dir, os.path.join(source_dir, 'backend'), os.path.dirname(get_python_inc()), numpy.get_include(), ] scoring_function_header = os.path.join(include_dir, 'ScoringFunction.h') with open(scoring_function_header, 'w') as f: f.write('typedef %s ScoringFunctionType;' % scoring_function) queue_header = os.path.join(include_dir, 'Queue.h') with open(queue_header, 'w') as f: if discretize_queue == 0: f.write( 'template<typename T, typename S> using QueueType = PriorityQueue<T, S>;' ) else: f.write( 'template<typename T, typename S> using QueueType = BinQueue<T, S, %d>;' % discretize_queue) # cython requires that the pyx file has the same name as the module shutil.copy(os.path.join(source_dir, 'frontend.pyx'), os.path.join(lib_dir, module_name + '.pyx')) shutil.copy(os.path.join(source_dir, 'c_frontend.cpp'), os.path.join(lib_dir, module_name + '_c_frontend.cpp')) # Remove the "-Wstrict-prototypes" compiler option, which isn't valid # for C++. cfg_vars = get_config_vars() if "CFLAGS" in cfg_vars: cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace( "-Wstrict-prototypes", "") extension = Extension( module_name, sources=[ os.path.join(lib_dir, module_name + '.pyx'), os.path.join(lib_dir, module_name + '_c_frontend.cpp') ], include_dirs=include_dirs, language='c++', extra_link_args=['-std=c++11'], extra_compile_args=['-std=c++11', '-w']) build_extension = build_ext(Distribution()) build_extension.finalize_options() build_extension.extensions = cythonize([extension], quiet=True, nthreads=2) build_extension.build_temp = lib_dir build_extension.build_lib = lib_dir build_extension.run() return __import__(module_name).agglomerate(affs, thresholds, gt, fragments, aff_threshold_low, aff_threshold_high, return_merge_history)
def __compile( scoring_function='OneMinus<MeanAffinity<RegionGraphType, ScoreValue>>', discretize_queue=0, force_rebuild=False): import sys import os import shutil import glob import numpy import fcntl try: import hashlib except ImportError: import md5 as hashlib from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext from distutils.sysconfig import get_config_vars, get_python_inc import Cython from Cython.Compiler.Main import Context, default_options from Cython.Build.Dependencies import cythonize # compile frontend.pyx for given scoring function source_dir = os.path.dirname(os.path.abspath(__file__)) source_files = [ os.path.join(source_dir, 'agglomerate.pyx'), os.path.join(source_dir, 'frontend_agglomerate.h'), os.path.join(source_dir, 'frontend_agglomerate.cpp') ] source_files += glob.glob(source_dir + '/backend/*.hpp') source_files.sort() source_files_hashes = [ hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest() for f in source_files ] key = scoring_function, discretize_queue, source_files_hashes, sys.version_info, sys.executable, Cython.__version__ module_name = 'waterz_' + hashlib.md5(str(key).encode('utf-8')).hexdigest() lib_dir = os.path.expanduser('~/.cython/inline') # since this could be called concurrently, there is no good way to check # whether the directory already exists try: os.makedirs(lib_dir) except: pass # make sure the same module is not build concurrently with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file: fcntl.lockf(lock_file, fcntl.LOCK_EX) try: if lib_dir not in sys.path: sys.path.append(lib_dir) if force_rebuild: raise ImportError else: print("Re-using already compiled waterz version") return module_name except ImportError: print("Compiling waterz in " + str(lib_dir)) cython_include_dirs = ['.'] ctx = Context(cython_include_dirs, default_options) include_dir = os.path.join(lib_dir, module_name) if not os.path.exists(include_dir): os.makedirs(include_dir) include_dirs = [ source_dir, include_dir, os.path.join(source_dir, 'backend'), os.path.dirname(get_python_inc()), numpy.get_include(), ] scoring_function_header = os.path.join(include_dir, 'ScoringFunction.h') with open(scoring_function_header, 'w') as f: f.write('typedef %s ScoringFunctionType;' % scoring_function) queue_header = os.path.join(include_dir, 'Queue.h') with open(queue_header, 'w') as f: if discretize_queue == 0: f.write( 'template<typename T, typename S> using QueueType = PriorityQueue<T, S>;' ) else: f.write( 'template<typename T, typename S> using QueueType = BinQueue<T, S, %d>;' % discretize_queue) # cython requires that the pyx file has the same name as the module shutil.copy(os.path.join(source_dir, 'agglomerate.pyx'), os.path.join(lib_dir, module_name + '.pyx')) shutil.copy( os.path.join(source_dir, 'frontend_agglomerate.cpp'), os.path.join(lib_dir, module_name + '_frontend_agglomerate.cpp')) # Remove the "-Wstrict-prototypes" compiler option, which isn't valid # for C++. cfg_vars = get_config_vars() if "CFLAGS" in cfg_vars: cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace( "-Wstrict-prototypes", "") extension = Extension( module_name, sources=[ os.path.join(lib_dir, module_name + '.pyx'), os.path.join(lib_dir, module_name + '_frontend_agglomerate.cpp') ], include_dirs=include_dirs, language='c++', extra_link_args=['-std=c++11'], extra_compile_args=['-std=c++11', '-w']) build_extension = build_ext(Distribution()) build_extension.finalize_options() build_extension.extensions = cythonize([extension], quiet=True, nthreads=2) build_extension.build_temp = lib_dir build_extension.build_lib = lib_dir build_extension.run() return module_name
def _create_context(cython_include_dirs): return Context(list(cython_include_dirs), default_options)
def malis(affs, gt, force_rebuild=False): import sys, os import shutil import glob import numpy import fcntl try: import hashlib except ImportError: import md5 as hashlib from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext from distutils.sysconfig import get_config_vars, get_python_inc import Cython from Cython.Compiler.Main import Context, default_options from Cython.Build.Dependencies import cythonize source_dir = os.path.dirname(os.path.abspath(__file__)) source_files = [ os.path.join(source_dir, 'frontend.pyx'), os.path.join(source_dir, 'malis_loss_layer.hpp'), os.path.join(source_dir, 'malis_loss_layer.cpp'), os.path.join(source_dir, 'c_frontend.h'), ] source_files.sort() source_files_hashes = [ hashlib.md5(open(f, 'r').read().encode('utf-8')).hexdigest() for f in source_files ] key = source_files_hashes, sys.version_info, sys.executable, Cython.__version__ module_name = 'pymalis_' + hashlib.md5( str(key).encode('utf-8')).hexdigest() lib_dir = os.path.expanduser('~/.cython/inline') # since this could be called concurrently, there is no good way to check # whether the directory already exists try: os.makedirs(lib_dir) except: pass # make sure the same module is not build concurrently with open(os.path.join(lib_dir, module_name + '.lock'), 'w') as lock_file: fcntl.lockf(lock_file, fcntl.LOCK_EX) try: if lib_dir not in sys.path: sys.path.append(lib_dir) if force_rebuild: raise ImportError else: __import__(module_name) print("Re-using already compiled pymalis version") except ImportError: print("Compiling pymalis in " + str(lib_dir)) cython_include_dirs = ['.'] ctx = Context(cython_include_dirs, default_options) scoring_function_include_dir = os.path.join(lib_dir, module_name) if not os.path.exists(scoring_function_include_dir): os.makedirs(scoring_function_include_dir) include_dirs = [ source_dir, os.path.dirname(get_python_inc()), numpy.get_include(), ] # cython requires that the pyx file has the same name as the module shutil.copy(os.path.join(source_dir, 'frontend.pyx'), os.path.join(lib_dir, module_name + '.pyx')) shutil.copy( os.path.join(source_dir, 'malis_loss_layer.cpp'), os.path.join(lib_dir, module_name + '_malis_loss_layer.cpp')) # Remove the "-Wstrict-prototypes" compiler option, which isn't valid # for C++. cfg_vars = get_config_vars() if "CFLAGS" in cfg_vars: cfg_vars["CFLAGS"] = cfg_vars["CFLAGS"].replace( "-Wstrict-prototypes", "") extension = Extension( module_name, sources=[ os.path.join(lib_dir, module_name + '.pyx'), os.path.join(lib_dir, module_name + '_malis_loss_layer.cpp') ], include_dirs=include_dirs, language='c++', extra_link_args=['-std=c++11'], extra_compile_args=['-std=c++11', '-w']) build_extension = build_ext(Distribution()) build_extension.finalize_options() build_extension.extensions = cythonize([extension], quiet=True, nthreads=2) build_extension.build_temp = lib_dir build_extension.build_lib = lib_dir build_extension.run() return __import__(module_name).malis(affs, gt)