def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, force=False, exclude_failures=False, **options): """ Compile a set of source modules into C/C++ files and return a list of distutils Extension objects for them. As module list, pass either a glob pattern, a list of glob patterns or a list of Extension objects. The latter allows you to configure the extensions separately through the normal distutils options. When using glob patterns, you can exclude certain module names explicitly by passing them into the 'exclude' option. For parallel compilation, set the 'nthreads' option to the number of concurrent builds. For a broad 'try to compile' mode that ignores compilation failures and simply excludes the failed extensions, pass 'exclude_failures=True'. Note that this only really makes sense for compiling .py files which can also be used without compilation. Additional compilation options can be passed as keyword arguments. """ if 'include_path' not in options: options['include_path'] = ['.'] if 'common_utility_include_dir' in options: if options.get('cache'): raise NotImplementedError("common_utility_include_dir does not yet work with caching") if not os.path.exists(options['common_utility_include_dir']): os.makedirs(options['common_utility_include_dir']) c_options = CompilationOptions(**options) cpp_options = CompilationOptions(**options); cpp_options.cplus = True ctx = c_options.create_context() options = c_options module_list = create_extension_list( module_list, exclude=exclude, ctx=ctx, quiet=quiet, exclude_failures=exclude_failures, aliases=aliases) deps = create_dependency_tree(ctx, quiet=quiet) build_dir = getattr(options, 'build_dir', None) modules_by_cfile = {} to_compile = [] for m in module_list: if build_dir: root = os.path.realpath(os.path.abspath(find_root_package_dir(m.sources[0]))) def copy_to_build_dir(filepath, root=root): filepath_abs = os.path.realpath(os.path.abspath(filepath)) if os.path.isabs(filepath): filepath = filepath_abs if filepath_abs.startswith(root): mod_dir = os.path.join(build_dir, os.path.dirname(_relpath(filepath, root))) if not os.path.isdir(mod_dir): os.makedirs(mod_dir) shutil.copy(filepath, mod_dir) for dep in m.depends: copy_to_build_dir(dep) new_sources = [] for source in m.sources: base, ext = os.path.splitext(source) if ext in ('.pyx', '.py'): if m.language == 'c++': c_file = base + '.cpp' options = cpp_options else: c_file = base + '.c' options = c_options # setup for out of place build directory if enabled if build_dir: c_file = os.path.join(build_dir, c_file) dir = os.path.dirname(c_file) if not os.path.isdir(dir): os.makedirs(dir) if os.path.exists(c_file): c_timestamp = os.path.getmtime(c_file) else: c_timestamp = -1 # Priority goes first to modified files, second to direct # dependents, and finally to indirect dependents. if c_timestamp < deps.timestamp(source): dep_timestamp, dep = deps.timestamp(source), source priority = 0 else: dep_timestamp, dep = deps.newest_dependency(source) priority = 2 - (dep in deps.immediate_dependencies(source)) if force or c_timestamp < dep_timestamp: if not quiet: if source == dep: print("Compiling %s because it changed." % source) else: print("Compiling %s because it depends on %s." % (source, dep)) if not force and hasattr(options, 'cache'): extra = m.language fingerprint = deps.transitive_fingerprint(source, extra) else: fingerprint = None to_compile.append((priority, source, c_file, fingerprint, quiet, options, not exclude_failures)) new_sources.append(c_file) if c_file not in modules_by_cfile: modules_by_cfile[c_file] = [m] else: modules_by_cfile[c_file].append(m) else: new_sources.append(source) if build_dir: copy_to_build_dir(source) m.sources = new_sources if hasattr(options, 'cache'): if not os.path.exists(options.cache): os.makedirs(options.cache) to_compile.sort() if nthreads: # Requires multiprocessing (or Python >= 2.6) try: import multiprocessing pool = multiprocessing.Pool(nthreads) except (ImportError, OSError): print("multiprocessing required for parallel cythonization") nthreads = 0 else: try: pool.map(cythonize_one_helper, to_compile) finally: pool.close() if not nthreads: for args in to_compile: cythonize_one(*args[1:]) if exclude_failures: failed_modules = set() for c_file, modules in modules_by_cfile.iteritems(): if not os.path.exists(c_file): failed_modules.update(modules) elif os.path.getsize(c_file) < 200: f = io_open(c_file, 'r', encoding='iso8859-1') try: if f.read(len('#error ')) == '#error ': # dead compilation result failed_modules.update(modules) finally: f.close() if failed_modules: for module in failed_modules: module_list.remove(module) print("Failed compilations: %s" % ', '.join(sorted([ module.name for module in failed_modules]))) if hasattr(options, 'cache'): cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100)) # cythonize() is often followed by the (non-Python-buffered) # compiler output, flush now to avoid interleaving output. sys.stdout.flush() return module_list
def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, force=False, exclude_failures=False, **options): """ Compile a set of source modules into C/C++ files and return a list of distutils Extension objects for them. As module list, pass either a glob pattern, a list of glob patterns or a list of Extension objects. The latter allows you to configure the extensions separately through the normal distutils options. When using glob patterns, you can exclude certain module names explicitly by passing them into the 'exclude' option. For parallel compilation, set the 'nthreads' option to the number of concurrent builds. For a broad 'try to compile' mode that ignores compilation failures and simply excludes the failed extensions, pass 'exclude_failures=True'. Note that this only really makes sense for compiling .py files which can also be used without compilation. Additional compilation options can be passed as keyword arguments. """ if 'include_path' not in options: options['include_path'] = ['.'] if 'common_utility_include_dir' in options: if options.get('cache'): raise NotImplementedError, "common_utility_include_dir does not yet work with caching" if not os.path.exists(options['common_utility_include_dir']): os.makedirs(options['common_utility_include_dir']) c_options = CompilationOptions(**options) cpp_options = CompilationOptions(**options) cpp_options.cplus = True ctx = c_options.create_context() options = c_options module_list = create_extension_list(module_list, exclude=exclude, ctx=ctx, quiet=quiet, exclude_failures=exclude_failures, aliases=aliases) deps = create_dependency_tree(ctx, quiet=quiet) build_dir = getattr(options, 'build_dir', None) modules_by_cfile = {} to_compile = [] for m in module_list: if build_dir: root = os.path.realpath( os.path.abspath(find_root_package_dir(m.sources[0]))) def copy_to_build_dir(filepath, root=root): filepath_abs = os.path.realpath(os.path.abspath(filepath)) if os.path.isabs(filepath): filepath = filepath_abs if filepath_abs.startswith(root): mod_dir = os.path.join( build_dir, os.path.dirname(_relpath(filepath, root))) if not os.path.isdir(mod_dir): os.makedirs(mod_dir) shutil.copy(filepath, mod_dir) for dep in m.depends: copy_to_build_dir(dep) new_sources = [] for source in m.sources: base, ext = os.path.splitext(source) if ext in ('.pyx', '.py'): if m.language == 'c++': c_file = base + '.cpp' options = cpp_options else: c_file = base + '.c' options = c_options # setup for out of place build directory if enabled if build_dir: c_file = os.path.join(build_dir, c_file) dir = os.path.dirname(c_file) if not os.path.isdir(dir): os.makedirs(dir) if os.path.exists(c_file): c_timestamp = os.path.getmtime(c_file) else: c_timestamp = -1 # Priority goes first to modified files, second to direct # dependents, and finally to indirect dependents. if c_timestamp < deps.timestamp(source): dep_timestamp, dep = deps.timestamp(source), source priority = 0 else: dep_timestamp, dep = deps.newest_dependency(source) priority = 2 - (dep in deps.immediate_dependencies(source)) if force or c_timestamp < dep_timestamp: if not quiet: if source == dep: print("Compiling %s because it changed." % source) else: print("Compiling %s because it depends on %s." % (source, dep)) if not force and hasattr(options, 'cache'): extra = m.language fingerprint = deps.transitive_fingerprint( source, extra) else: fingerprint = None to_compile.append((priority, source, c_file, fingerprint, quiet, options, not exclude_failures)) new_sources.append(c_file) if c_file not in modules_by_cfile: modules_by_cfile[c_file] = [m] else: modules_by_cfile[c_file].append(m) else: new_sources.append(source) if build_dir: copy_to_build_dir(source) m.sources = new_sources if hasattr(options, 'cache'): if not os.path.exists(options.cache): os.makedirs(options.cache) to_compile.sort() if nthreads: # Requires multiprocessing (or Python >= 2.6) try: import multiprocessing pool = multiprocessing.Pool(nthreads) except (ImportError, OSError): print("multiprocessing required for parallel cythonization") nthreads = 0 else: pool.map(cythonize_one_helper, to_compile) if not nthreads: for args in to_compile: cythonize_one(*args[1:]) if exclude_failures: failed_modules = set() for c_file, modules in modules_by_cfile.iteritems(): if not os.path.exists(c_file): failed_modules.update(modules) elif os.path.getsize(c_file) < 200: f = io_open(c_file, 'r', encoding='iso8859-1') try: if f.read(len('#error ')) == '#error ': # dead compilation result failed_modules.update(modules) finally: f.close() if failed_modules: for module in failed_modules: module_list.remove(module) print("Failed compilations: %s" % ', '.join(sorted([module.name for module in failed_modules]))) if hasattr(options, 'cache'): cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100)) # cythonize() is often followed by the (non-Python-buffered) # compiler output, flush now to avoid interleaving output. sys.stdout.flush() return module_list
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=False, exclude_failures=False): if not isinstance(patterns, (list, tuple)): patterns = [patterns] explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)]) seen = set() deps = create_dependency_tree(ctx, quiet=quiet) to_exclude = set() if not isinstance(exclude, list): exclude = [exclude] for pattern in exclude: to_exclude.update(map(os.path.abspath, extended_iglob(pattern))) module_list = [] for pattern in patterns: if isinstance(pattern, str): filepattern = pattern template = None name = '*' base = None exn_type = Extension elif isinstance(pattern, Extension): for filepattern in pattern.sources: if os.path.splitext(filepattern)[1] in ('.py', '.pyx'): break else: # ignore non-cython modules module_list.append(pattern) continue template = pattern name = template.name base = DistutilsInfo(exn=template) exn_type = template.__class__ else: raise TypeError(pattern) for file in extended_iglob(filepattern): if os.path.abspath(file) in to_exclude: continue pkg = deps.package(file) if '*' in name: module_name = deps.fully_qualified_name(file) if module_name in explicit_modules: continue else: module_name = name if module_name not in seen: try: kwds = deps.distutils_info(file, aliases, base).values except Exception: if exclude_failures: continue raise if base is not None: for key, value in base.values.items(): if key not in kwds: kwds[key] = value sources = [file] if template is not None: sources += [m for m in template.sources if m != filepattern] if 'sources' in kwds: # allow users to add .c files etc. for source in kwds['sources']: source = encode_filename_in_py2(source) if source not in sources: sources.append(source) del kwds['sources'] if 'depends' in kwds: depends = resolve_depends(kwds['depends'], (kwds.get('include_dirs') or []) + [find_root_package_dir(file)]) if template is not None: # Always include everything from the template. depends = list(set(template.depends).union(set(depends))) kwds['depends'] = depends module_list.append(exn_type( name=module_name, sources=sources, **kwds)) m = module_list[-1] seen.add(name) return module_list
def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=False, exclude_failures=False): if not isinstance(patterns, (list, tuple)): patterns = [patterns] explicit_modules = set( [m.name for m in patterns if isinstance(m, Extension)]) seen = set() deps = create_dependency_tree(ctx, quiet=quiet) to_exclude = set() if not isinstance(exclude, list): exclude = [exclude] for pattern in exclude: to_exclude.update(map(os.path.abspath, extended_iglob(pattern))) module_list = [] for pattern in patterns: if isinstance(pattern, str): filepattern = pattern template = None name = '*' base = None exn_type = Extension elif isinstance(pattern, Extension): filepattern = pattern.sources[0] if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'): # ignore non-cython modules module_list.append(pattern) continue template = pattern name = template.name base = DistutilsInfo(exn=template) exn_type = template.__class__ else: raise TypeError(pattern) for file in extended_iglob(filepattern): if os.path.abspath(file) in to_exclude: continue pkg = deps.package(file) if '*' in name: module_name = deps.fully_qualified_name(file) if module_name in explicit_modules: continue else: module_name = name if module_name not in seen: try: kwds = deps.distutils_info(file, aliases, base).values except Exception: if exclude_failures: continue raise if base is not None: for key, value in base.values.items(): if key not in kwds: kwds[key] = value sources = [file] if template is not None: sources += template.sources[1:] if 'sources' in kwds: # allow users to add .c files etc. for source in kwds['sources']: source = encode_filename_in_py2(source) if source not in sources: sources.append(source) del kwds['sources'] if 'depends' in kwds: depends = resolve_depends( kwds['depends'], (kwds.get('include_dirs') or []) + [find_root_package_dir(file)]) if template is not None: # Always include everything from the template. depends = list( set(template.depends).union(set(depends))) kwds['depends'] = depends module_list.append( exn_type(name=module_name, sources=sources, **kwds)) m = module_list[-1] seen.add(name) return module_list