def get_tag(self): supported_tags = pep425tags.get_supported() if self.distribution.is_pure(): if self.universal: impl = 'py2.py3' else: impl = self.python_tag tag = (impl, 'none', 'any') else: plat_name = self.plat_name if plat_name is None: plat_name = get_platform() plat_name = plat_name.replace('-', '_').replace('.', '_') impl_name = get_abbr_impl() impl_ver = get_impl_ver() # PEP 3149 -- no SOABI in Py 2 # For PyPy? # "pp%s%s" % (sys.pypy_version_info.major, # sys.pypy_version_info.minor) abi_tag = sysconfig.get_config_vars().get('SOABI', 'none') if abi_tag.startswith('cpython-'): abi_tag = 'cp' + abi_tag.rsplit('-', 1)[-1] tag = (impl_name + impl_ver, abi_tag, plat_name) # XXX switch to this alternate implementation for non-pure: assert tag == supported_tags[0] return tag
def python_is_optimized(): cflags = sysconfig.get_config_vars()['PY_CFLAGS'] final_opt = "" for opt in cflags.split(): if opt.startswith('-O'): final_opt = opt return (final_opt and final_opt != '-O0')
def get_tag(self): supported_tags = pep425tags.get_supported() purity = self.distribution.is_pure() impl_ver = get_impl_ver() abi_tag = 'none' plat_name = 'any' impl_name = 'py' if purity: wheel = self.distribution.get_option_dict('wheel') if 'universal' in wheel: # please don't define this in your global configs val = wheel['universal'][1].strip() if val.lower() in ('1', 'true', 'yes'): impl_name = 'py2.py3' impl_ver = '' tag = (impl_name + impl_ver, abi_tag, plat_name) else: plat_name = self.plat_name if plat_name is None: plat_name = get_platform() plat_name = plat_name.replace('-', '_').replace('.', '_') impl_name = get_abbr_impl() # PEP 3149 -- no SOABI in Py 2 # For PyPy? # "pp%s%s" % (sys.pypy_version_info.major, # sys.pypy_version_info.minor) abi_tag = sysconfig.get_config_vars().get('SOABI', abi_tag) if abi_tag.startswith('cpython-'): abi_tag = 'cp' + abi_tag.rsplit('-', 1)[-1] tag = (impl_name + impl_ver, abi_tag, plat_name) # XXX switch to this alternate implementation for non-pure: assert tag == supported_tags[0] return tag
def python_is_optimized(): cflags = sysconfig.get_config_vars()["PY_CFLAGS"] final_opt = "" for opt in cflags.split(): if opt.startswith("-O"): final_opt = opt return final_opt not in ("", "-O0", "-Og")
def test_version_int(self): source = self.mkdtemp() target = self.mkdtemp() expected = self.write_sample_scripts(source) cmd = self.get_build_scripts_cmd(target, [os.path.join(source, fn) for fn in expected]) cmd.finalize_options() # http://bugs.python.org/issue4524 # # On linux-g++-32 with command line `./configure --enable-ipv6 # --with-suffix=3`, python is compiled okay but the build scripts # failed when writing the name of the executable old = sysconfig.get_config_vars().get('VERSION') sysconfig._CONFIG_VARS['VERSION'] = 4 try: cmd.run() finally: if old is not None: sysconfig._CONFIG_VARS['VERSION'] = old built = os.listdir(target) for name in expected: self.assertTrue(name in built)
def test_main(): cflags = sysconfig.get_config_vars()["PY_CFLAGS"] final_opt = "" for opt in cflags.split(): if opt.startswith("-O"): final_opt = opt if final_opt and final_opt != "-O0": raise unittest.SkipTest("Python was built with compiler optimizations, " "tests can't reliably succeed") run_unittest(PrettyPrintTests, PyListTests, StackNavigationTests, PyBtTests, PyPrintTests, PyLocalsTests)
def get_context(self): ret = {} try: ret['sysconfig'] = sysconfig.get_config_vars() except: pass try: ret['paths'] = sysconfig.get_paths() except: pass return ret
def customize_compiler(compiler): """Do any platform-specific customization of a CCompiler instance. Mainly needed on Unix, so we can plug in the information that varies across Unices and is stored in Python's Makefile. """ if compiler.compiler_type == "unix": (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \ sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'SO', 'AR', 'ARFLAGS') if 'CC' in os.environ: cc = os.environ['CC'] if 'CXX' in os.environ: cxx = os.environ['CXX'] if 'LDSHARED' in os.environ: ldshared = os.environ['LDSHARED'] if 'CPP' in os.environ: cpp = os.environ['CPP'] else: cpp = cc + " -E" # not always if 'LDFLAGS' in os.environ: ldshared = ldshared + ' ' + os.environ['LDFLAGS'] if 'CFLAGS' in os.environ: cflags = opt + ' ' + os.environ['CFLAGS'] ldshared = ldshared + ' ' + os.environ['CFLAGS'] if 'CPPFLAGS' in os.environ: cpp = cpp + ' ' + os.environ['CPPFLAGS'] cflags = cflags + ' ' + os.environ['CPPFLAGS'] ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] if 'AR' in os.environ: ar = os.environ['AR'] if 'ARFLAGS' in os.environ: archiver = ar + ' ' + os.environ['ARFLAGS'] else: if ar_flags is not None: archiver = ar + ' ' + ar_flags else: # see if its the proper default value # mmm I don't want to backport the makefile archiver = ar + ' rc' cc_cmd = cc + ' ' + cflags compiler.set_executables( preprocessor=cpp, compiler=cc_cmd, compiler_so=cc_cmd + ' ' + ccshared, compiler_cxx=cxx, linker_so=ldshared, linker_exe=cc, archiver=archiver) compiler.shared_lib_extension = so_ext
def build_extensions(self): if sys.platform.startswith("linux"): # Allow a custom C compiler through the environment variables. This # allows Komodo to build using a gcc compiler that's not first on # the path. compiler = os.environ.get('CC') if compiler is not None: import sysconfig (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') args = {} args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags self.compiler.set_executables(**args) elif sys.platform == "darwin": compiler = os.environ.get('CC') if compiler is not None: import sysconfig (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') args = {} # clang does not support the '-std=gnu99' option - so remove it. cflags = cflags.replace('-std=gnu99', '') args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags self.compiler.set_executables(**args) build_ext.build_extensions(self)
def run(args): assert len(args) == 0 import_modules() import time t_start = time.time() from libtbx import introspection import os import sysconfig print "After script imports:" print " wall clock time: %.2f" % (time.time() - t_start) print mb = 1024 * 1024 lib = os.path.join( os.environ["LIBTBX_BUILD"], # intentionally not using libtbx.env for speed "lib") ext_so = [] for node in os.listdir(lib): pylibext = sysconfig.get_config_vars("SO")[0] if (node.endswith("_ext" + pylibext)): ext_so.append(node.split(".")[0]) ext_so.sort(cmp_so) print "Before importing extensions:" vmi = introspection.virtual_memory_info() vmi.show(prefix=" ") prev_vms = vmi.get_bytes('VmSize:') prev_rss = vmi.get_bytes('VmRSS:') print " wall clock time: %.2f" % (time.time() - t_start) print for so in ext_so: t0 = time.time() exec("import %s" % so) vmi = introspection.virtual_memory_info() vms = vmi.get_bytes('VmSize:') rss = vmi.get_bytes('VmRSS:') if (vms is not None) : # won't work on Mac print "%.2f %3.0f %3.0f %s" % ( time.time()-t0, (vms-prev_vms)/mb, (rss-prev_rss)/mb, so) else : assert (sys.platform in ["darwin", "win32"]) print "%.2f %s" % (time.time()-t0, so) prev_vms = vms prev_rss = rss print print "After importing all extensions:" introspection.virtual_memory_info().show(prefix=" ") print " wall clock time: %.2f" % (time.time() - t_start) print
def find_boost_cflags(): # Get Boost dir (code copied from ufc/src/utils/python/ufc_utils/build.py) # Set a default directory for the boost installation if sys.platform == "darwin": # Use Brew as default default = os.path.join(os.path.sep, "usr", "local") else: default = os.path.join(os.path.sep, "usr") # If BOOST_DIR is not set use default directory boost_inc_dir = "" boost_lib_dir = "" boost_math_tr1_lib = "boost_math_tr1" boost_dir = os.getenv("BOOST_DIR", default) boost_is_found = False for inc_dir in ["", "include"]: if os.path.isfile(os.path.join(boost_dir, inc_dir, "boost", "version.hpp")): boost_inc_dir = os.path.join(boost_dir, inc_dir) break libdir_multiarch = "lib/" + sysconfig.get_config_vars().get("MULTIARCH", "") for lib_dir in ["", "lib", libdir_multiarch, "lib64"]: for ext in [".so", "-mt.so", ".dylib", "-mt.dylib"]: _lib = os.path.join(boost_dir, lib_dir, "lib" + boost_math_tr1_lib + ext) if os.path.isfile(_lib): if "-mt" in _lib: boost_math_tr1_lib += "-mt" boost_lib_dir = os.path.join(boost_dir, lib_dir) break if boost_inc_dir != "" and boost_lib_dir != "": boost_is_found = True if boost_is_found: boost_cflags = " -I%s -L%s" % (boost_inc_dir, boost_lib_dir) boost_linkflags = "-l%s" % boost_math_tr1_lib else: boost_cflags = "" boost_linkflags = "" info_red("""The Boost library was not found. If Boost is installed in a nonstandard location, set the environment variable BOOST_DIR. Forms using bessel functions will fail to build. """) return boost_cflags, boost_linkflags
def test_version_int(self): source = self.mkdtemp() target = self.mkdtemp() expected = self.write_sample_scripts(source) cmd = self.get_build_scripts_cmd(target, [os.path.join(source, fn) for fn in expected]) cmd.finalize_options() old = sysconfig.get_config_vars().get("VERSION") sysconfig._CONFIG_VARS["VERSION"] = 4 try: cmd.run() finally: if old is not None: sysconfig._CONFIG_VARS["VERSION"] = old built = os.listdir(target) for name in expected: self.assertIn(name, built) return
def get_python_dynlib(): """ python -c "import utool; print(utool.get_python_dynlib())" get_python_dynlib Returns: ?: dynlib Example: >>> # DOCTEST_DISABLE >>> from utool.util_cplat import * # NOQA >>> dynlib = get_python_dynlib() >>> print(dynlib) /usr/lib/x86_64-linux-gnu/libpython2.7.so """ import sysconfig cfgvars = sysconfig.get_config_vars() dynlib = os.path.join(cfgvars['LIBDIR'], cfgvars['MULTIARCH'], cfgvars['LDLIBRARY']) if not exists(dynlib): dynlib = os.path.join(cfgvars['LIBDIR'], cfgvars['LDLIBRARY']) assert exists(dynlib) return dynlib
def test_ext_fullpath(self): ext = sysconfig.get_config_vars()['SO'] # building lxml.etree inplace #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') #etree_ext = Extension('lxml.etree', [etree_c]) #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) dist = Distribution() cmd = build_ext(dist) cmd.inplace = True cmd.distribution.package_dir = 'src' cmd.distribution.packages = ['lxml', 'lxml.html'] curdir = os.getcwd() wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) path = cmd.get_ext_fullpath('lxml.etree') self.assertEqual(wanted, path) # building lxml.etree not inplace cmd.inplace = False cmd.build_lib = os.path.join(curdir, 'tmpdir') wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) path = cmd.get_ext_fullpath('lxml.etree') self.assertEqual(wanted, path) # building twisted.runner.portmap not inplace build_py = cmd.get_finalized_command('build_py') build_py.package_dir = None cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] path = cmd.get_ext_fullpath('twisted.runner.portmap') wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', 'portmap' + ext) self.assertEqual(wanted, path) # building twisted.runner.portmap inplace cmd.inplace = True path = cmd.get_ext_fullpath('twisted.runner.portmap') wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) self.assertEqual(wanted, path)
def detect_tkinter_darwin(self, inc_dirs, lib_dirs): # The _tkinter module, using frameworks. Since frameworks are quite # different the UNIX search logic is not sharable. from os.path import join, exists framework_dirs = [ '/Library/Frameworks', '/System/Library/Frameworks/', join(os.getenv('HOME'), '/Library/Frameworks') ] sysroot = macosx_sdk_root() # Find the directory that contains the Tcl.framework and Tk.framework # bundles. # XXX distutils should support -F! for F in framework_dirs: # both Tcl.framework and Tk.framework should be present for fw in 'Tcl', 'Tk': if is_macosx_sdk_path(F): if not exists(join(sysroot, F[1:], fw + '.framework')): break else: if not exists(join(F, fw + '.framework')): break else: # ok, F is now directory with both frameworks. Continure # building break else: # Tk and Tcl frameworks not found. Normal "unix" tkinter search # will now resume. return 0 # For 8.4a2, we must add -I options that point inside the Tcl and Tk # frameworks. In later release we should hopefully be able to pass # the -F option to gcc, which specifies a framework lookup path. # include_dirs = [ join(F, fw + '.framework', H) for fw in ('Tcl', 'Tk') for H in ('Headers', 'Versions/Current/PrivateHeaders') ] # For 8.4a2, the X11 headers are not included. Rather than include a # complicated search, this is a hard-coded path. It could bail out # if X11 libs are not found... include_dirs.append('/usr/X11R6/include') frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] # All existing framework builds of Tcl/Tk don't support 64-bit # architectures. cflags = sysconfig.get_config_vars('CFLAGS')[0] archs = re.findall('-arch\s+(\w+)', cflags) tmpfile = os.path.join(self.build_temp, 'tk.arch') if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) # Note: cannot use os.popen or subprocess here, that # requires extensions that are not available here. if is_macosx_sdk_path(F): os.system( "file %s/Tk.framework/Tk | grep 'for architecture' > %s" % (os.path.join(sysroot, F[1:]), tmpfile)) else: os.system( "file %s/Tk.framework/Tk | grep 'for architecture' > %s" % (F, tmpfile)) with open(tmpfile) as fp: detected_archs = [] for ln in fp: a = ln.split()[-1] if a in archs: detected_archs.append(ln.split()[-1]) os.unlink(tmpfile) for a in detected_archs: frameworks.append('-arch') frameworks.append(a) ext = Extension( '_tkinter', ['_tkinter.c', 'tkappinit.c'], define_macros=[('WITH_APPINIT', 1)], include_dirs=include_dirs, libraries=[], extra_compile_args=frameworks[2:], extra_link_args=frameworks, ) self.extensions.append(ext) return 1
def build_project(args): """ Build a dev version of the project. Returns ------- site_dir site-packages directory where it was installed """ import sysconfig root_ok = [ os.path.exists(os.path.join(ROOT_DIR, fn)) for fn in PROJECT_ROOT_FILES ] if not all(root_ok): print("To build the project, run runtests.py in " "git checkout or unpacked source") sys.exit(1) dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv') env = dict(os.environ) cmd = [sys.executable, 'setup.py'] # Always use ccache, if installed env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep)) cvars = sysconfig.get_config_vars() compiler = env.get('CC') or cvars.get('CC', '') if 'gcc' in compiler: # Check that this isn't clang masquerading as gcc. if sys.platform != 'darwin' or 'gnu-gcc' in compiler: # add flags used as werrors warnings_as_errors = ' '.join([ # from tools/travis-test.sh '-Werror=vla', '-Werror=nonnull', '-Werror=pointer-arith', '-Wlogical-op', # from sysconfig '-Werror=unused-function', ]) env['CFLAGS'] = warnings_as_errors + ' ' + env.get('CFLAGS', '') if args.debug or args.gcov: # assume everyone uses gcc/gfortran env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' if args.gcov: env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' env['CC'] = cvars['CC'] + ' --coverage' env['CXX'] = cvars['CXX'] + ' --coverage' env['F77'] = 'gfortran --coverage ' env['F90'] = 'gfortran --coverage ' env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage' env['LDFLAGS'] = " ".join( cvars['LDSHARED'].split()[1:]) + ' --coverage' cmd += ["build"] if args.parallel > 1: cmd += ["-j", str(args.parallel)] if args.warn_error: cmd += ["--warn-error"] if args.cpu_baseline: cmd += ["--cpu-baseline", args.cpu_baseline] if args.cpu_dispatch: cmd += ["--cpu-dispatch", args.cpu_dispatch] if args.disable_optimization: cmd += ["--disable-optimization"] if args.simd_test is not None: cmd += ["--simd-test", args.simd_test] if args.debug_info: cmd += ["build_src", "--verbose-cfg"] # Install; avoid producing eggs so numpy can be imported from dst_dir. cmd += [ 'install', '--prefix=' + dst_dir, '--single-version-externally-managed', '--record=' + dst_dir + 'tmp_install_log.txt' ] from distutils.sysconfig import get_python_lib site_dir = get_python_lib(prefix=dst_dir, plat_specific=True) site_dir_noarch = get_python_lib(prefix=dst_dir, plat_specific=False) # easy_install won't install to a path that Python by default cannot see # and isn't on the PYTHONPATH. Plus, it has to exist. if not os.path.exists(site_dir): os.makedirs(site_dir) if not os.path.exists(site_dir_noarch): os.makedirs(site_dir_noarch) env['PYTHONPATH'] = site_dir + os.pathsep + site_dir_noarch log_filename = os.path.join(ROOT_DIR, 'build.log') if args.show_build_log: ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR) else: log_filename = os.path.join(ROOT_DIR, 'build.log') print("Building, see build.log...") with open(log_filename, 'w') as log: p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log, cwd=ROOT_DIR) try: # Wait for it to finish, and print something to indicate the # process is alive, but only if the log file has grown (to # allow continuous integration environments kill a hanging # process accurately if it produces no output) last_blip = time.time() last_log_size = os.stat(log_filename).st_size while p.poll() is None: time.sleep(0.5) if time.time() - last_blip > 60: log_size = os.stat(log_filename).st_size if log_size > last_log_size: print(" ... build in progress") last_blip = time.time() last_log_size = log_size ret = p.wait() except: p.kill() p.wait() raise if ret == 0: print("Build OK") else: if not args.show_build_log: with open(log_filename, 'r') as f: print(f.read()) print("Build failed!") sys.exit(1) return site_dir, site_dir_noarch
try: import llfuse # Does this version of llfuse support ns precision? have_fuse_mtime_ns = hasattr(llfuse.EntryAttributes, "st_mtime_ns") except ImportError: have_fuse_mtime_ns = False has_lchflags = hasattr(os, "lchflags") # The mtime get/set precision varies on different OS and Python versions if "HAVE_FUTIMENS" in getattr(posix, "_have_functions", []): st_mtime_ns_round = 0 elif "HAVE_UTIMES" in sysconfig.get_config_vars(): st_mtime_ns_round = -6 else: st_mtime_ns_round = -9 if sys.platform.startswith("netbsd"): st_mtime_ns_round = -4 # only >1 microsecond resolution here? # Ensure that the loggers exist for all tests setup_logging() class BaseTestCase(unittest.TestCase): """ """
def run(script, args, build_dir=None, include_dirs=None, verbose=False): """ The main MrHooker entry function. @param script The .pyx script file used to build the LD_PRELOAD library. @param args The command to execute as a subprocess. @param build_dir The build directory, into which shared libraries are stored. If this is None, then the library will be built in a temporary directory and discarded at exit. Otherwise, the directory will keep the build directories. @param include_dirs List of include directories to search for Pyrex header files (.pxd) and C header files. """ if verbose: print "Script path: ", script print "Command: ", args if build_dir is None: print "Build directory:", build_dir, "(temporary directory)" else: print "Build directory:", build_dir print "Include directories:", include_dirs # Locate libpython, in the likely event that the child process does not # load Python itself. import sysconfig libpython = "/".join(sysconfig.get_config_vars("LIBPL", "LDLIBRARY")) # Locate the "_init_preload" shared library. import imp (file_, _init_preload, desc) = imp.find_module("_init_preload", __path__) tempdir = None if not build_dir: # Create a temporary directory to store the shared library in. We'll # delete it when we're done. import tempfile tempdir = tempfile.mkdtemp() build_dir = tempdir try: from pyximport import pyxbuild from Cython.Distutils.extension import Extension # Get the module name from the script file. module_name = os.path.splitext(os.path.basename(script))[0] # Make sure the include directories are absolute, since the extension # may be built in another directory. if include_dirs: include_dirs = map(os.path.abspath, include_dirs) # Create and build an Extension. ext = Extension(module_name, [script], include_dirs=include_dirs) out_fname = pyxbuild.pyx_to_dll( script, ext, build_in_temp=True, pyxbuild_dir=build_dir) env = os.environ.copy() env["LD_PRELOAD"] = " ".join([libpython, out_fname, _init_preload]) env["MRHOOKER_MODULE"] = module_name if verbose: print "Executing command with:\n" + \ " LD_PRELOAD =", env["LD_PRELOAD"] + "\n" + \ " MRHOOKER_MODULE =", module_name + "\n" import subprocess rc = subprocess.call(args, env=env) if verbose: print "Command completed with return code '%d'" % rc return rc finally: if tempdir: import shutil shutil.rmtree(tempdir)
try: with tempfile.NamedTemporaryFile() as file: platform.set_flags(file.name, stat.UF_NODUMP) except OSError: has_lchflags = False try: import llfuse has_llfuse = True or llfuse # avoids "unused import" except ImportError: has_llfuse = False # The mtime get/set precision varies on different OS and Python versions if posix and 'HAVE_FUTIMENS' in getattr(posix, '_have_functions', []): st_mtime_ns_round = 0 elif 'HAVE_UTIMES' in sysconfig.get_config_vars(): st_mtime_ns_round = -6 else: st_mtime_ns_round = -9 if sys.platform.startswith('netbsd'): st_mtime_ns_round = -4 # only >1 microsecond resolution here? @contextmanager def unopened_tempfile(): with tempfile.TemporaryDirectory() as tempdir: yield os.path.join(tempdir, "file") @functools.lru_cache()
PACKAGE_DATA.extend( join(root.replace(str(Path("src") / "bison") + os.path.sep, ''), f) for f in files) elif sys.platform.startswith('linux'): libs = ['dl'] # extra_compile_args += ['-DCYTHON_TRACE=1'] bisondynlibModule = str( Path("src") / "bison" / "c" / "bisondynlib-linux.c") elif sys.platform.startswith('darwin'): libs = ['dl'] bisondynlibModule = str( Path("src") / "bison" / "c" / "bisondynlib-linux.c") import sysconfig v: dict = sysconfig.get_config_vars() v['LDSHARED'] = v['LDSHARED'].replace('-bundle', '-dynamiclib') else: raise RuntimeError(f"Platform '{sys.platform}' is not supported.") # cython SOURCES = [ str(Path("src") / "bison" / "cython" / "bison_.pyx"), str(Path("src") / "bison" / "c" / "bison_callback.c"), bisondynlibModule ] # compile with cython if available try: from Cython.Distutils import build_ext from Cython.Distutils.extension import Extension
def test_EXT_SUFFIX_in_vars(self): import _imp vars = sysconfig.get_config_vars() self.assertIsNotNone(vars['SO']) self.assertEqual(vars['SO'], vars['EXT_SUFFIX']) self.assertEqual(vars['EXT_SUFFIX'], _imp.extension_suffixes()[0])
def detect_modules(self): # On Debian /usr/local is always used, so we don't include it twice # only change this for cross builds for 3.3, issues on Mageia if cross_compiling: self.add_gcc_paths() self.add_multiarch_paths() # Add paths specified in the environment variables LDFLAGS and # CPPFLAGS for header and library files. # We must get the values from the Makefile and not the environment # directly since an inconsistently reproducible issue comes up where # the environment variable is not set even though the value were passed # into configure and stored in the Makefile (issue found on OS X 10.3). for env_var, arg_name, dir_list in ( ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), ('LDFLAGS', '-L', self.compiler.library_dirs), ('CPPFLAGS', '-I', self.compiler.include_dirs)): env_val = sysconfig.get_config_var(env_var) if env_val: # To prevent optparse from raising an exception about any # options in env_val that it doesn't know about we strip out # all double dashes and any dashes followed by a character # that is not for the option we are dealing with. # # Please note that order of the regex is important! We must # strip out double-dashes first so that we don't end up with # substituting "--Long" to "-Long" and thus lead to "ong" being # used for a library directory. env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], ' ', env_val) parser = optparse.OptionParser() # Make sure that allowing args interspersed with options is # allowed parser.allow_interspersed_args = True parser.error = lambda msg: None parser.add_option(arg_name, dest="dirs", action="append") options = parser.parse_args(env_val.split())[0] if options.dirs: for directory in reversed(options.dirs): add_dir_to_list(dir_list, directory) if os.path.normpath(sys.base_prefix) != '/usr' \ and not sysconfig.get_config_var('PYTHONFRAMEWORK'): # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework # (PYTHONFRAMEWORK is set) to avoid # linking problems when # building a framework with different architectures than # the one that is currently installed (issue #7473) add_dir_to_list(self.compiler.library_dirs, sysconfig.get_config_var("LIBDIR")) add_dir_to_list(self.compiler.include_dirs, sysconfig.get_config_var("INCLUDEDIR")) # lib_dirs and inc_dirs are used to search for files; # if a file is found in one of those directories, it can # be assumed that no additional -I,-L directives are needed. if not cross_compiling: lib_dirs = self.compiler.library_dirs + [ '/lib64', '/usr/lib64', '/lib', '/usr/lib', ] inc_dirs = self.compiler.include_dirs + ['/usr/include'] else: lib_dirs = self.compiler.library_dirs[:] inc_dirs = self.compiler.include_dirs[:] exts = [] missing = [] config_h = sysconfig.get_config_h_filename() with open(config_h) as file: config_h_vars = sysconfig.parse_config_h(file) srcdir = sysconfig.get_config_var('srcdir') # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) if host_platform in ['osf1', 'unixware7', 'openunix8']: lib_dirs += ['/usr/ccs/lib'] # HP-UX11iv3 keeps files in lib/hpux folders. if host_platform == 'hp-ux11': lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] if host_platform == 'darwin': # This should work on any unixy platform ;-) # If the user has bothered specifying additional -I and -L flags # in OPT and LDFLAGS we might as well use them here. # # NOTE: using shlex.split would technically be more correct, but # also gives a bootstrap problem. Let's hope nobody uses # directories with whitespace in the name to store libraries. cflags, ldflags = sysconfig.get_config_vars('CFLAGS', 'LDFLAGS') for item in cflags.split(): if item.startswith('-I'): inc_dirs.append(item[2:]) for item in ldflags.split(): if item.startswith('-L'): lib_dirs.append(item[2:]) # Check for MacOS X, which doesn't need libm.a at all math_libs = ['m'] if host_platform == 'darwin': math_libs = [] # XXX Omitted modules: gl, pure, dl, SGI-specific modules # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: if (True or 'gdbm' in dbm_order and self.compiler.find_library_file(lib_dirs, 'gdbm')): exts.append( Extension('_gdbm', ['Modules/_gdbmmodule.c'], libraries=['gdbm'])) else: missing.append('_gdbm') self.extensions.extend(exts) # Call the method for detecting whether _tkinter can be compiled self.detect_tkinter(inc_dirs, lib_dirs) if '_tkinter' not in [e.name for e in self.extensions]: missing.append('_tkinter') return missing
import sys import sysconfig import platform import imp sys.stdout.write('Version: ' + str(sys.version).replace('\n', ' ')) sys.stdout.write('\nVersionNumber: ' + str(sys.version_info.major) + '.' + str(sys.version_info.minor)) if not platform.system() == 'Windows': sys.stdout.write('\nLIBPL: ' + sysconfig.get_config_vars('LIBPL')[0]) sys.stdout.write('\nLIBDIR: ' + sysconfig.get_config_vars('LIBDIR')[0]) sys.stdout.write('\nPREFIX: ' + sysconfig.get_config_vars('prefix')[0]) sys.stdout.write('\nEXEC_PREFIX: ' + sysconfig.get_config_vars('exec_prefix')[0]) sys.stdout.write("\nArchitecture: " + platform.architecture()[0]) try: import numpy sys.stdout.write('\nNumpyPath: ' + str(numpy.__path__[0])) sys.stdout.write('\nNumpyVersion: ' + str(numpy.__version__)) except Exception: pass try: sys.stdout.write('\nTensorflowPath: ' + str(imp.find_module('tensorflow')[1])) except Exception: pass
def build_extensions(self): # Detect which modules should be compiled old_so = self.compiler.shared_lib_extension # Workaround PEP 3149 stuff self.compiler.shared_lib_extension = os.environ.get("SO", ".so") try: missing = self.detect_modules() finally: self.compiler.shared_lib_extension = old_so # Remove modules that are present on the disabled list extensions = [ ext for ext in self.extensions if ext.name not in disabled_module_list ] # move ctypes to the end, it depends on other modules ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) if "_ctypes" in ext_map: ctypes = extensions.pop(ext_map["_ctypes"]) extensions.append(ctypes) self.extensions = extensions # Fix up the autodetected modules, prefixing all the source files # with Modules/. srcdir = sysconfig.get_config_var('srcdir') if not srcdir: # Maybe running on Windows but not using CYGWIN? raise ValueError("No source directory; cannot proceed.") srcdir = os.path.abspath(srcdir) moddirlist = [os.path.join(srcdir, 'Modules')] # Fix up the paths for scripts, too self.distribution.scripts = [ os.path.join(srcdir, filename) for filename in self.distribution.scripts ] # Python header files headers = [sysconfig.get_config_h_filename()] headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) for ext in self.extensions[:]: ext.sources = [ find_module_file(filename, moddirlist) for filename in ext.sources ] if ext.depends is not None: ext.depends = [ find_module_file(filename, moddirlist) for filename in ext.depends ] else: ext.depends = [] # re-compile extensions if a header file has been changed ext.depends.extend(headers) # If a module has already been built statically, # don't build it here if ext.name in sys.builtin_module_names: self.extensions.remove(ext) # Parse Modules/Setup and Modules/Setup.local to figure out which # modules are turned on in the file. remove_modules = [] for filename in ('Modules/Setup', 'Modules/Setup.local'): input = text_file.TextFile(filename, join_lines=1) while 1: line = input.readline() if not line: break line = line.split() remove_modules.append(line[0]) input.close() for ext in self.extensions[:]: if ext.name in remove_modules: self.extensions.remove(ext) # When you run "make CC=altcc" or something similar, you really want # those environment variables passed into the setup.py phase. Here's # a small set of useful ones. compiler = os.environ.get('CC') args = {} # unfortunately, distutils doesn't let us provide separate C and C++ # compilers if compiler is not None: (ccshared, cppflags, cflags) = \ sysconfig.get_config_vars('CCSHARED', 'CPPFLAGS', 'CFLAGS') cppflags = ' '.join( [f for f in cppflags.split() if not f.startswith('-I')]) args[ 'compiler_so'] = compiler + ' ' + ccshared + ' ' + cppflags + ' ' + cflags self.compiler.set_executables(**args) build_ext.build_extensions(self) longest = max([len(e.name) for e in self.extensions]) if self.failed: longest = max(longest, max([len(name) for name in self.failed])) def print_three_column(lst): lst.sort(key=str.lower) # guarantee zip() doesn't drop anything while len(lst) % 3: lst.append("") for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): print("%-*s %-*s %-*s" % (longest, e, longest, f, longest, g)) if missing: print() print("Python build finished, but the necessary bits to build " "these modules were not found:") print_three_column(missing) print("To find the necessary bits, look in setup.py in" " detect_modules() for the module's name.") print() if self.failed: failed = self.failed[:] print() print("Failed to build these modules:") print_three_column(failed) print()
An object in a reference cycle will never have zero references, and so must be garbage collected. If one or more objects in the cycle have __del__ methods, the gc refuses to guess an order, and leaves the cycle uncollected.""" def __init__(self, partner=None): if partner is None: self.partner = Uncollectable(partner=self) else: self.partner = partner def __tp_del__(self): pass if sysconfig.get_config_vars().get('PY_CFLAGS', ''): BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) else: # Usually, sys.gettotalrefcount() is only present if Python has been # compiled in debug mode. If it's missing, expect that Python has # been released in release mode: with NDEBUG defined. BUILD_WITH_NDEBUG = (not hasattr(sys, 'gettotalrefcount')) ### Tests ############################################################################### class GCTests(unittest.TestCase): def test_list(self): l = []
from distutils.extension import Extension from distutils.core import setup from Cython.Build import build_ext # LINK_FILES = ['fit_box_mod.o','fit_box_mod_wrapper.o'] # LINK_FILES = [f for f in os.listdir("/s/tchlux/TestingData/fmodpy_fit_box_mod") if (f[-2:] == ".o")] LINK_FILES = [f for f in os.listdir() if (f[-2:] == ".o")] MODULE_NAME = 'fit_box_mod' CYTHON_SOURCE = ['fit_box_mod.pyx'] COMPILE_ARGS = ['-fPIC', '-O3'] LINK_ARGS = ['-llapack', '-lblas'] # This might not work on all systems, need to investigate further os.environ["CC"] = "gfortran" # Get the linker options used to build python linker_options = sysconfig.get_config_vars().get("BLDSHARED", "").split(" ")[1:] # Set the linker, with appropriate options os.environ["LDSHARED"] = "gfortran " + " ".join(linker_options) print("SETUP_PY: Compiling extension module with '%s'." % os.environ["CC"]) ext_modules = [ Extension(MODULE_NAME, CYTHON_SOURCE, extra_compile_args=COMPILE_ARGS, extra_link_args=LINK_ARGS + LINK_FILES) ] print("SETUP_PY: Extension module read, calling setup...") setup(
def build_project(args): """ Build a dev version of the project. Returns ------- site_dir site-packages directory where it was installed """ root_ok = [ os.path.exists(os.path.join(ROOT_DIR, fn)) for fn in PROJECT_ROOT_FILES ] if not all(root_ok): print("To build the project, run runtests.py in " "git checkout or unpacked source") sys.exit(1) dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv') env = dict(os.environ) cmd = [sys.executable, 'setup.py'] # Always use ccache, if installed env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep)) if args.debug or args.gcov: # assume everyone uses gcc/gfortran env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' if args.gcov: from sysconfig import get_config_vars cvars = get_config_vars() env['OPT'] = '-O0 -ggdb' env['FOPT'] = '-O0 -ggdb' env['CC'] = env.get('CC', cvars['CC']) + ' --coverage' env['CXX'] = env.get('CXX', cvars['CXX']) + ' --coverage' env['F77'] = 'gfortran --coverage ' env['F90'] = 'gfortran --coverage ' env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage' env['LDFLAGS'] = " ".join(cvars['LDSHARED'].split()[1:]) +\ ' --coverage' cmd += ['build'] if args.parallel > 1: cmd += ['-j', str(args.parallel)] # Install; avoid producing eggs so SciPy can be imported from dst_dir. cmd += [ 'install', '--prefix=' + dst_dir, '--single-version-externally-managed', '--record=' + dst_dir + 'tmp_install_log.txt' ] from sysconfig import get_path py_path = get_path('platlib') site_dir = os.path.join(dst_dir, get_path_suffix(py_path, 3)) # easy_install won't install to a path that Python by default cannot see # and isn't on the PYTHONPATH. Plus, it has to exist. if not os.path.exists(site_dir): os.makedirs(site_dir) env['PYTHONPATH'] = os.pathsep.join((site_dir, env.get('PYTHONPATH', ''))) log_filename = os.path.join(ROOT_DIR, 'build.log') start_time = datetime.datetime.now() if args.show_build_log: ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR) else: log_filename = os.path.join(ROOT_DIR, 'build.log') print("Building, see build.log...") with open(log_filename, 'w') as log: p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log, cwd=ROOT_DIR) try: # Wait for it to finish, and print something to indicate the # process is alive, but only if the log file has grown (to # allow continuous integration environments kill a hanging # process accurately if it produces no output) last_blip = time.time() last_log_size = os.stat(log_filename).st_size while p.poll() is None: time.sleep(0.5) if time.time() - last_blip > 60: log_size = os.stat(log_filename).st_size if log_size > last_log_size: elapsed = datetime.datetime.now() - start_time print(" ... build in progress ({0} " "elapsed)".format(elapsed)) last_blip = time.time() last_log_size = log_size ret = p.wait() except: # noqa: E722 p.terminate() raise elapsed = datetime.datetime.now() - start_time if ret == 0: print("Build OK ({0} elapsed)".format(elapsed)) else: if not args.show_build_log: with open(log_filename, 'r') as f: print(f.read()) print("Build failed! ({0} elapsed)".format(elapsed)) sys.exit(1) return site_dir
from setuptools import setup, find_packages, Extension from codecs import open import io import os import os.path import os import sys import sysconfig pjoin = os.path.join here = os.path.abspath(os.path.dirname(__file__)) PREFIX = sysconfig.get_config_vars()["prefix"] name = "aat" CPU_COUNT = os.cpu_count() # *************************************** # # Numpy build path and compiler toolchain # # *************************************** # try: # enable numpy faster compiler from numpy.distutils.ccompiler import CCompiler_compile import distutils.ccompiler distutils.ccompiler.CCompiler.compile = CCompiler_compile os.environ["NPY_NUM_BUILD_JOBS"] = str(CPU_COUNT) except ImportError: pass # no numpy def get_version(file, name="__version__"): path = os.path.realpath(file)
# sysconfig_get_config_vars_by_name.py import sysconfig bases = sysconfig.get_config_vars('base', 'platbase', 'userbase') print('Directory base:') for b in bases: print(' ', b)
def run(self): """Run the install.""" install.run(self) # Copy so we do not overwrite the user's environment later. env = os.environ.copy() # Sometimes we want to manually build the C++ (like in Docker) if env.get('MOE_NO_BUILD_CPP', 'False') == 'True': return package_dir = os.path.join(self.install_lib, 'moe') build_dir = os.path.join(package_dir, 'build') cmake_path = find_path( MoeExecutable( env_var='MOE_CMAKE_PATH', exe_name='cmake', )) cmake_options = env.get('MOE_CMAKE_OPTS', '') if cmake_options == '': print("MOE_CMAKE_OPTS not set. Passing no extra args to cmake.") else: print("Passing '{0:s}' args from MOE_CMAKE_OPTS to cmake.".format( cmake_options)) # Set env dict with cc and/or cxx path # To find C/C++ compilers, we first try read MOE_CC/CXX_PATH and if they exist, we write to CC/CXX. # Then we read and pass CC/CXX to cmake if they are set. if 'MOE_CC_PATH' in env: env['CC'] = env['MOE_CC_PATH'] if 'MOE_CXX_PATH' in env: env['CXX'] = env['MOE_CXX_PATH'] if 'CC' in env: print("Passing CC={0:s} to cmake.".format(env['CC'])) if 'CXX' in env: print("Passing CXX={0:s} to cmake.".format(env['CXX'])) # rm the build directory if it exists if os.path.exists(build_dir): shutil.rmtree(build_dir) os.mkdir(build_dir) local_build_dir = os.path.join(here, 'moe', 'build') if os.path.exists(local_build_dir): shutil.rmtree(local_build_dir) os.mkdir(local_build_dir) cpp_location = os.path.join(here, 'moe', 'optimal_learning', 'cpp') # Reformat options string: options & args that are separated by whitespace on the command line # must be passed to subprocess.Popen in separate list elements. cmake_options_split = shlex.split(cmake_options) # Get info on Python paths (for the currently running Python) that we need to discover # header/library locations for MOE includepy, libdir, instsoname = sysconfig.get_config_vars( 'INCLUDEPY', 'LIBDIR', 'INSTSONAME') # The meaning of 'LIBDIR' and 'INSTSONAME' is platform-specific. Handle the various cases. # Initial value: 'LIBDIR' usually contains what we want, but one OSX case requires a different path. moe_python_library_base = libdir if not sys.platform.startswith('darwin'): # For other platforms (only Linux is tested!) libdir + instsoname yields the shared object location. if not sys.platform.startswith('linux'): warnings.warn( "Automatic path discovery untested outside of OSX and Linux. Taking our best guess.\n" "Please check PYTHON_INCLUDE_DIR and PYTHON_LIBRARY paths below." ) else: # on OSX versions that we tested against, LIBDIR looks like: # /opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib # and INSTSONAME looks like: # Python.framework/Versions/2.7/Python # What we want is: # /opt/local/Library/Frameworks/Python.framework/Versions/2.7/Python # This is easy in Linux b/c INSTSONAME is the name of a dynamic library file # in foo/bar/lib whereas in OSX it's a partial path. # Instead, remove the overlapping part of both paths to construct the result. instsoname_components = instsoname.rsplit('/', 1) framework_subpath = instsoname_components[ 0] # Probably is similar to 'Python.framework/Versions/2.7' libdir_base, libdir_framework_subpath, libdir_subpath = libdir.rpartition( framework_subpath) if not libdir_base and not libdir_framework_subpath: # Did not find framework_subpath; the expected overlap isn't there so just join libdir + instsoname warnings.warn( "Unexpected OSX library paths.\n" "Please check PYTHON_INCLUDE_DIR and PYTHON_LIBRARY paths below." ) else: # Found framework_subpath overlap so skip the overlapped components moe_python_library_base = libdir_base moe_python_include_dir = includepy moe_python_library = os.path.join(moe_python_library_base, instsoname) # Print the Python paths we found so that the user can verify them if something goes wrong. print( 'PYTHON_INCLUDE_DIR (Expected full path to where Python.h is found): {0:s}' .format(moe_python_include_dir)) print( 'PYTHON_LIBRARY (Expected path to Python shared object; e.g., libpython2.7.so or .dylib): {0:s}' .format(moe_python_library)) # Build the full cmake command using properly tokenized options cmake_full_command = [ cmake_path, '-DMOE_PYTHON_INCLUDE_DIR=' + moe_python_include_dir, '-DMOE_PYTHON_LIBRARY=' + moe_python_library, ] cmake_full_command.extend(cmake_options_split) cmake_full_command.append(cpp_location) # Run cmake proc = subprocess.Popen( cmake_full_command, cwd=local_build_dir, env=env, ) proc.wait() # Compile everything proc = subprocess.Popen(["make"], cwd=local_build_dir, env=env) proc.wait() GPP_so = os.path.join(local_build_dir, 'GPP.so') build_init = os.path.join(local_build_dir, '__init__.py') shutil.copyfile(GPP_so, os.path.join(build_dir, 'GPP.so')) shutil.copyfile(build_init, os.path.join(build_dir, '__init__.py'))
def test_get_platform(self): # windows XP, 32bits os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Intel)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win32') # windows XP, amd64 os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Amd64)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-amd64') # macbook os.name = 'posix' sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') sys.platform = 'darwin' self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-ppc') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-ppc64') finally: sys.maxsize = maxint self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-i386') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-x86_64') finally: sys.maxsize = maxint # macbook with fat binaries (fat, universal or fat64) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-intel') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ( '-arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat3') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ( '-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-universal') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat64') for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3' % arch) self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch) # linux debian sarge os.name = 'posix' sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') sys.platform = 'linux2' self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) self.assertEqual(get_platform(), 'linux-i686')
def test_SO_in_vars(self): vars = sysconfig.get_config_vars() self.assertIsNotNone(vars['SO']) self.assertEqual(vars['SO'], vars['EXT_SUFFIX'])
import re import sysconfig from subprocess import check_output from setuptools import setup, Extension, find_packages from Cython.Build import cythonize import numpy extra_compile_args = ['-std=c++11'] # Super hacky way of determining if clang or gcc is being used CC = sysconfig.get_config_vars().get('CC', 'gcc').split(' ')[0] out = check_output([CC, '--version']) if re.search('apple *llvm', str(out.lower())): extra_compile_args.append('-stdlib=libc++') extensions = [ Extension( "riemann.data.graph_dataset", ["riemann/data/graph_dataset.pyx"], include_dirs=[numpy.get_include(), "."], extra_compile_args=extra_compile_args, language='c++', ) ] ext_modules = cythonize(extensions) install_requires = ["Cython", 'torch', 'numpy'] setup(name='riemanniannlp', version='0.1',
if error.errno == errno.ENOENT: logger.exception('Error getting SHA1:\n{0}'.format( ' '.join(cmd))) else: raise else: webpi_sha1 = webpi_sha1_output.rsplit( '\r\n', 2)[-2].split(' ', 1)[0] finally: os.chdir(cwd) msdeploy_url_template = getattr( distribution, 'msdeploy_url_template', None) if not msdeploy_url_template: msdeploy_url_template = self.msdeploy_url_template kwargs = sysconfig.get_config_vars() kwargs.update(distribution.metadata.__dict__) distribution.msdeploy_url = msdeploy_url_template.format( letter=distribution.msdeploy_file[0], msdeploy_file=distribution.msdeploy_file, msdeploy_package=distribution.msdeploy_package, msdeploy_package_url=distribution.msdeploy_package_url, **kwargs) distribution.webpi_size = int(round(webpi_size / 1024.0)) distribution.webpi_sha1 = webpi_sha1 return distribution def add_dist(self, name): pkg_dist = pkg_resources.get_distribution(name) pkg_info = pkg_dist.get_metadata('PKG-INFO')
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2010 Doug Hellmann. All rights reserved. # """All configuration variables. """ #end_pymotw_header import sysconfig bases = sysconfig.get_config_vars('base', 'platbase', 'userbase') print 'Base directories:' for b in bases: print ' ', b
# PANDAS_CI=1 is set by ci/setup_env.sh if os.environ.get("PANDAS_CI", "0") == "1": extra_compile_args.append("-Werror") if debugging_symbols_requested: extra_compile_args.append("-g") extra_compile_args.append("-UNDEBUG") extra_compile_args.append("-O0") # Build for at least macOS 10.9 when compiling on a 10.9 system or above, # overriding CPython distuitls behaviour which is to target the version that # python was built for. This may be overridden by setting # MACOSX_DEPLOYMENT_TARGET before calling setup.py if is_platform_mac(): if "MACOSX_DEPLOYMENT_TARGET" not in os.environ: current_system = platform.mac_ver()[0] python_target = get_config_vars().get("MACOSX_DEPLOYMENT_TARGET", current_system) target_macos_version = "10.9" parsed_macos_version = parse_version(target_macos_version) if (parse_version(str(python_target)) < parsed_macos_version and parse_version(current_system) >= parsed_macos_version): os.environ["MACOSX_DEPLOYMENT_TARGET"] = target_macos_version if sys.version_info[:2] == (3, 8): # GH 33239 extra_compile_args.append("-Wno-error=deprecated-declarations") # https://github.com/pandas-dev/pandas/issues/35559 extra_compile_args.append("-Wno-error=unreachable-code") # enable coverage by building cython files by setting the environment variable # "PANDAS_CYTHON_COVERAGE" (with a Truthy value) or by running build_ext # with `--with-cython-coverage`enabled
if sys.platform in ('win32', 'win64'): emit('target_system', 'msw') else: # ASSume. emit('target_system', 'posix') # build identifier (unused) emit('uuid', str(uuid.uuid1())) emit('python', sys.executable) pyversion = sysconfig.get_config_var('VERSION') emit('python_version', pyversion) pyabi = sysconfig.get_config_var('ABIFLAGS') or '' emit('python_abi', pyabi) pyspec = 'python' + pyversion + pyabi emit('python_cflags', ' '.join([ '-I' + sysconfig.get_config_var('INCLUDEPY'), ] )) emit('project_version', project.version) libdir = sysconfig.get_config_var('LIBDIR') libpy = '-l' + pyspec emit('python_ldflags', ' '.join([ '-L' + libdir, libpy, ] + sysconfig.get_config_vars('SHLIBS', 'SYSLIBS', 'LDFLAGS') ))
def build_extension(self, ext): import pybind11 suffix = get_config_vars()["EXT_SUFFIX"] python_includes = get_paths()["include"] pybind_includes = pybind11.get_include() extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) # required for auto-detection of auxiliary "native" libs if not extdir.endswith(os.path.sep): extdir += os.path.sep cmake_args = [ "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=" + extdir, "-DPYTHON_EXECUTABLE=" + sys.executable, "-DPYBIND_INCLUDES=" + pybind_includes, "-DPYTHON_INCLUDES=" + python_includes, "-DSUFFIX=" + suffix, ] cfg = "Debug" if self.debug else "Release" build_args = ["--config", cfg] if platform.system() == "Windows": cmake_args += [ "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}".format(cfg.upper(), extdir) ] if sys.maxsize > 2 ** 32: cmake_args += ["-A", "x64"] build_args += ["--", "/m"] else: cmake_args += [ "-DCMAKE_BUILD_TYPE=" + cfg, "-O3", ] build_args += ["--", "-j2"] env = os.environ.copy() env["CXXFLAGS"] = '{} -DVERSION_INFO=\\"{}\\"'.format( env.get("CXXFLAGS", ""), self.distribution.get_version() ) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) out = subprocess.Popen( ["cmake", ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env, stdout=subprocess.PIPE, ) result = out.communicate()[0] print(result.decode()) out = subprocess.Popen( ["cmake", "--build", ".", "--target", "psvWave_cpp"] + build_args, cwd=self.build_temp, ) result = out.communicate() print(result)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import setuptools import sysconfig def read(filename): """Utility function that returns a files contents.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() # Work around: -Wstrict-prototypes flag is invalid for C++. (opt, ) = sysconfig.get_config_vars("OPT") if opt: os.environ["OPT"] = " ".join(flag for flag in opt.split() if flag != "-Wstrict-prototypes") # Module description. setuptools.setup( name="python-chess", version="0.0.5", author="Niklas Fiekas", author_email="*****@*****.**", description="A chess library.", long_description=read("README.rst"), license="GPL3", keywords="chess fen pgn polyglot", url="http://github.com/niklasf/python-chess",
# coding=utf-8 # 使用sysconfig import sysconfig print sysconfig.get_config_var('Py_ENABLE_SHARED') print sysconfig.get_config_var('LIBDIR') print sysconfig.get_config_vars('AR', "CXX") print sysconfig.get_scheme_names() print sysconfig.get_path_names() print sysconfig.get_python_version() print sysconfig.get_platform() # return true if current python installation was built from source print sysconfig.is_python_build() print sysconfig.get_config_h_filename() print sysconfig._get_makefile_filename()
INCLUDE.append(numpy.get_include()) except ImportError: print ("numpy is required") raise except: # go safely through the rest and make your bundle assert sys.platform == "win32" try: os.environ['CFLAGS'] except KeyError: os.environ['CFLAGS'] = "" try: import sysconfig compiler = sysconfig.get_config_vars()['CC'] compiler = os.environ['CC'] except: pass # Flag for numpy os.environ['CFLAGS'] += " -Wno-unused-function" # Mute the ugly trick for value/value* os.environ['CFLAGS'] += " -Wno-int-conversion" os.environ['CFLAGS'] += " -Wno-incompatible-pointer-types" # Compiler specific if compiler == "clang": # Other warning on a Python flag (not my fault...) os.environ['CFLAGS'] += " -Wno-unknown-warning-option"
# in the file named "LICENSE.txt" included with this software distribution # and also available online as http://sbml.org/software/libsbml/license.html # ----------------------------------------------------------------------- -->*/ import glob import os import sys import shutil import platform from sysconfig import get_config_vars from setuptools import setup, Extension current_dir = os.path.dirname(os.path.realpath(__file__)) # remove -Wstrict-prototypes (opt, ) = get_config_vars('OPT') if opt is not None: os.environ['OPT'] = " ".join(flag for flag in opt.split() if flag != '-Wstrict-prototypes') # we need to switch the __init__.py file based on the python version # as python 3 uses a different syntax for metaclasses if sys.version_info >= (3, 0): # this is python 3.x if os.path.exists(current_dir + '/libsbml/__init__.py'): os.remove(current_dir + '/libsbml/__init__.py') shutil.copyfile(current_dir + '/script/libsbml3.py', current_dir + '/libsbml/__init__.py') else: # this is an older python if os.path.exists(current_dir + '/libsbml/__init__.py'):
class Uncollectable(object): """Create a reference cycle with multiple __del__ methods. An object in a reference cycle will never have zero references, and so must be garbage collected. If one or more objects in the cycle have __del__ methods, the gc refuses to guess an order, and leaves the cycle uncollected.""" def __init__(self, partner=None): if partner is None: self.partner = Uncollectable(partner=self) else: self.partner = partner def __tp_del__(self): pass if sysconfig.get_config_vars().get('PY_CFLAGS', ''): BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) else: # Usually, sys.gettotalrefcount() is only present if Python has been # compiled in debug mode. If it's missing, expect that Python has # been released in release mode: with NDEBUG defined. BUILD_WITH_NDEBUG = (not hasattr(sys, 'gettotalrefcount')) ### Tests ############################################################################### class GCTests(unittest.TestCase): def test_list(self): l = [] l.append(l) gc.collect()
def finalize_options(self): """Finalizes options.""" # This method (and its helpers, like 'finalize_unix()', # 'finalize_other()', and 'select_scheme()') is where the default # installation directories for modules, extension modules, and # anything else we care to install from a Python module # distribution. Thus, this code makes a pretty important policy # statement about how third-party stuff is added to a Python # installation! Note that the actual work of installation is done # by the relatively simple 'install_*' commands; they just take # their orders from the installation directory options determined # here. # Check for errors/inconsistencies in the options; first, stuff # that's wrong on any platform. if ((self.prefix or self.exec_prefix or self.home) and (self.install_base or self.install_platbase)): raise DistutilsOptionError( "must supply either prefix/exec-prefix/home or " + "install-base/install-platbase -- not both") if self.home and (self.prefix or self.exec_prefix): raise DistutilsOptionError( "must supply either home or prefix/exec-prefix -- not both") if self.user and (self.prefix or self.exec_prefix or self.home or self.install_base or self.install_platbase): raise DistutilsOptionError( "can't combine user with prefix, " "exec_prefix/home, or install_(plat)base") # Next, stuff that's wrong (or dubious) only on certain platforms. if os.name != "posix": if self.exec_prefix: self.warn("exec-prefix option ignored on this platform") self.exec_prefix = None # Now the interesting logic -- so interesting that we farm it out # to other methods. The goal of these methods is to set the final # values for the install_{lib,scripts,data,...} options, using as # input a heady brew of prefix, exec_prefix, home, install_base, # install_platbase, user-supplied versions of # install_{purelib,platlib,lib,scripts,data,...}, and the # install schemes. Phew! self.dump_dirs("pre-finalize_{unix,other}") if os.name == 'posix': self.finalize_unix() else: self.finalize_other() self.dump_dirs("post-finalize_{unix,other}()") # Expand configuration variables, tilde, etc. in self.install_base # and self.install_platbase -- that way, we can use $base or # $platbase in the other installation directories and not worry # about needing recursive variable expansion (shudder). py_version = sys.version.split()[0] (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') try: abiflags = sys.abiflags except AttributeError: # sys.abiflags may not be defined on all platforms. abiflags = '' self.config_vars = { 'dist_name': self.distribution.get_name(), 'dist_version': self.distribution.get_version(), 'dist_fullname': self.distribution.get_fullname(), 'py_version': py_version, 'py_version_short': '%d.%d' % sys.version_info[:2], 'py_version_nodot': '%d%d' % sys.version_info[:2], 'sys_prefix': prefix, 'prefix': prefix, 'sys_exec_prefix': exec_prefix, 'exec_prefix': exec_prefix, 'abiflags': abiflags, 'platlibdir': getattr(sys, 'platlibdir', 'lib'), 'implementation_lower': _get_implementation().lower(), 'implementation': _get_implementation(), 'platsubdir': sysconfig.get_config_var('platsubdir'), } if HAS_USER_SITE: self.config_vars['userbase'] = self.install_userbase self.config_vars['usersite'] = self.install_usersite self.expand_basedirs() self.dump_dirs("post-expand_basedirs()") # Now define config vars for the base directories so we can expand # everything else. self.config_vars['base'] = self.install_base self.config_vars['platbase'] = self.install_platbase self.config_vars['installed_base'] = ( sysconfig.get_config_vars()['installed_base']) if DEBUG: from pprint import pprint print("config vars:") pprint(self.config_vars) # Expand "~" and configuration variables in the installation # directories. self.expand_dirs() self.dump_dirs("post-expand_dirs()") # Create directories in the home dir: if self.user: self.create_home_path() # Pick the actual directory to install all modules to: either # install_purelib or install_platlib, depending on whether this # module distribution is pure or not. Of course, if the user # already specified install_lib, use their selection. if self.install_lib is None: if self.distribution.has_ext_modules(): # has extensions: non-pure self.install_lib = self.install_platlib else: self.install_lib = self.install_purelib # Convert directories from Unix /-separated syntax to the local # convention. self.convert_paths('lib', 'purelib', 'platlib', 'scripts', 'data', 'headers', 'userbase', 'usersite') # Deprecated # Well, we're not actually fully completely finalized yet: we still # have to deal with 'extra_path', which is the hack for allowing # non-packagized module distributions (hello, Numerical Python!) to # get their own directories. self.handle_extra_path() self.install_libbase = self.install_lib # needed for .pth file self.install_lib = os.path.join(self.install_lib, self.extra_dirs) # If a new root directory was supplied, make all the installation # dirs relative to it. if self.root is not None: self.change_roots('libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers') self.dump_dirs("after prepending root") # Find out the build directories, ie. where to install from. self.set_undefined_options('build', ('build_base', 'build_base'), ('build_lib', 'build_lib'))
def test_get_config_vars(self): cvars = get_config_vars() self.assertIsInstance(cvars, dict) self.assertTrue(cvars)
suffix = sysconfig.get_config_var('EXT_SUFFIX') if suffix is None: suffix = ".so" # Try to get git hash try: import subprocess ghash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("ascii") ghash_arg = "-DGITHASH="+ghash.strip() except: ghash_arg = "-DGITHASH=895c45a0ae3a8d0441688dbdd3201744614fffc3" #GITHASHAUTOUPDATE extra_link_args=[] if sys.platform == 'darwin': from distutils import sysconfig vars = sysconfig.get_config_vars() vars['LDSHARED'] = vars['LDSHARED'].replace('-bundle', '-shared') extra_link_args=['-Wl,-install_name,@rpath/librebound'+suffix] libreboundmodule = Extension('librebound', sources = [ 'src/rebound.c', 'src/integrator_ias15.c', 'src/integrator_whfast.c', 'src/integrator_hermes.c', 'src/integrator_mercurius.c', 'src/integrator_leapfrog.c', 'src/integrator_janus.c', 'src/integrator_sei.c', 'src/integrator.c', 'src/gravity.c', 'src/boundary.c',
def log_startup_info(): global _pre_log_buffer if len(_pre_log_buffer) > 0: _log.info('early startup log buffer:') for line in _pre_log_buffer: _log.info(' ' + line) del _pre_log_buffer _log.info('GNUmed client version [%s] on branch [%s]', current_client_version, current_client_branch) _log.info('Platform: %s', platform.uname()) _log.info(('Python %s on %s (%s)' % (sys.version, sys.platform, os.name)).replace('\n', '<\\n>')) try: import lsb_release _log.info('lsb_release: %s', lsb_release.get_distro_information()) except ImportError: pass _log.info('module <sys> info:') attrs2skip = ['__doc__', 'copyright', '__name__', '__spec__'] for attr_name in dir(sys): if attr_name in attrs2skip: continue if attr_name.startswith('set'): continue attr = getattr(sys, attr_name) if not attr_name.startswith('get'): _log.info('%s: %s', attr_name.rjust(30), attr) continue if callable(attr): try: _log.info('%s: %s', attr_name.rjust(30), attr()) except Exception: _log.exception('%s: <cannot log>', attr_name.rjust(30)) continue _log.info('module <platform> info:') attrs2skip = ['__doc__', '__copyright__', '__name__', '__spec__', '__cached__', '__builtins__'] for attr_name in dir(platform): if attr_name in attrs2skip: continue if attr_name.startswith('set'): continue attr = getattr(platform, attr_name) if callable(attr): if attr_name.startswith('_'): _log.info('%s: %s', attr_name.rjust(30), attr) continue try: _log.info('%s: %s', attr_name.rjust(30), attr()) except Exception: _log.exception('%s: <cannot log>', attr_name.rjust(30)) continue _log.info('%s: %s', attr_name.rjust(30), attr) continue _log.info('module <os> info:') for n in os.confstr_names: _log.info('%s: %s', ('confstr[%s]' % n).rjust(40), os.confstr(n)) for n in os.sysconf_names: try: _log.info('%s: %s', ('sysconf[%s]' % n).rjust(40), os.sysconf(n)) except Exception: _log.exception('%s: <invalid> ??', ('sysconf[%s]' % n).rjust(30)) os_attrs = ['name', 'ctermid', 'getcwd', 'get_exec_path', 'getegid', 'geteuid', 'getgid', 'getgroups', 'getlogin', 'getpgrp', 'getpid', 'getppid', 'getresuid', 'getresgid', 'getuid', 'supports_bytes_environ', 'uname', 'get_terminal_size', 'pathconf_names', 'times', 'cpu_count', 'curdir', 'pardir', 'sep', 'altsep', 'extsep', 'pathsep', 'defpath', 'linesep', 'devnull'] for attr_name in os_attrs: attr = getattr(os, attr_name) if callable(attr): try: _log.info('%s: %s', attr_name.rjust(40), attr()) except Exception as exc: _log.error('%s: a callable, but call failed (%s)', attr_name.rjust(40), exc) continue _log.info('%s: %s', attr_name.rjust(40), attr) _log.info('process environment:') for key, val in os.environ.items(): _log.info(' %s: %s' % (('${%s}' % key).rjust(40), val)) import sysconfig _log.info('module <sysconfig> info:') _log.info(' platform [%s] -- python version [%s]', sysconfig.get_platform(), sysconfig.get_python_version()) _log.info(' sysconfig.get_paths():') paths = sysconfig.get_paths() for path in paths: _log.info('%s: %s', path.rjust(40), paths[path]) _log.info(' sysconfig.get_config_vars():') conf_vars = sysconfig.get_config_vars() for var in conf_vars: _log.info('%s: %s', var.rjust(45), conf_vars[var])
#!/usr/bin/env python3 # encoding: utf-8 # # Copyright (c) 2010 Doug Hellmann. All rights reserved. # """All configuration variables. """ #end_pymotw_header import sysconfig config_values = sysconfig.get_config_vars() print('Found {} configuration settings'.format(len(config_values.keys()))) print('\nSome highlights:\n') print(' Installation prefixes:') print(' prefix={prefix}'.format(**config_values)) print(' exec_prefix={exec_prefix}'.format(**config_values)) print('\n Version info:') print(' py_version={py_version}'.format(**config_values)) print(' py_version_short={py_version_short}'.format(**config_values)) print(' py_version_nodot={py_version_nodot}'.format(**config_values)) print('\n Base directories:') print(' base={base}'.format(**config_values)) print(' platbase={platbase}'.format(**config_values)) print(' userbase={userbase}'.format(**config_values)) print(' srcdir={srcdir}'.format(**config_values))
def test_get_platform(self): # windows XP, 32bits os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Intel)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win32') # windows XP, amd64 os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Amd64)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-amd64') # windows XP, itanium os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Itanium)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-ia64') # macbook os.name = 'posix' sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') sys.platform = 'darwin' self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-ppc') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-ppc64') finally: sys.maxsize = maxint self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-i386') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-x86_64') finally: sys.maxsize = maxint # macbook with fat binaries (fat, universal or fat64) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-intel') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat3') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-universal') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat64') for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3' % arch) self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch) # linux debian sarge os.name = 'posix' sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') sys.platform = 'linux2' self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) self.assertEqual(get_platform(), 'linux-i686')
from setuptools import setup, find_packages, Extension from codecs import open import io import os import os.path import os import sys import sysconfig pjoin = os.path.join here = os.path.abspath(os.path.dirname(__file__)) PREFIX = sysconfig.get_config_vars()['prefix'] name = 'aat' CPU_COUNT = os.cpu_count() # *************************************** # # Numpy build path and compiler toolchain # # *************************************** # try: # enable numpy faster compiler from numpy.distutils.ccompiler import CCompiler_compile import distutils.ccompiler distutils.ccompiler.CCompiler.compile = CCompiler_compile os.environ['NPY_NUM_BUILD_JOBS'] = str(CPU_COUNT) except ImportError: pass # no numpy def get_version(file, name='__version__'): path = os.path.realpath(file) version_ns = {}
def test_get_platform(self): # windows XP, 32bits os.name = "nt" sys.version = "2.4.4 (#71, Oct 18 2006, 08:34:43) " "[MSC v.1310 32 bit (Intel)]" sys.platform = "win32" self.assertEqual(get_platform(), "win32") # windows XP, amd64 os.name = "nt" sys.version = "2.4.4 (#71, Oct 18 2006, 08:34:43) " "[MSC v.1310 32 bit (Amd64)]" sys.platform = "win32" self.assertEqual(get_platform(), "win-amd64") # windows XP, itanium os.name = "nt" sys.version = "2.4.4 (#71, Oct 18 2006, 08:34:43) " "[MSC v.1310 32 bit (Itanium)]" sys.platform = "win32" self.assertEqual(get_platform(), "win-ia64") # macbook os.name = "posix" sys.version = "2.5 (r25:51918, Sep 19 2006, 08:49:13) " "\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]" sys.platform = "darwin" self._set_uname( ( "Darwin", "macziade", "8.11.1", ("Darwin Kernel Version 8.11.1: " "Wed Oct 10 18:23:28 PDT 2007; " "root:xnu-792.25.20~1/RELEASE_I386"), "PowerPC", ) ) _osx_support._remove_original_values(get_config_vars()) get_config_vars()["MACOSX_DEPLOYMENT_TARGET"] = "10.3" get_config_vars()["CFLAGS"] = "-fno-strict-aliasing -DNDEBUG -g " "-fwrapv -O3 -Wall -Wstrict-prototypes" maxint = sys.maxint try: sys.maxint = 2147483647 self.assertEqual(get_platform(), "macosx-10.3-ppc") sys.maxint = 9223372036854775807 self.assertEqual(get_platform(), "macosx-10.3-ppc64") finally: sys.maxint = maxint self._set_uname( ( "Darwin", "macziade", "8.11.1", ("Darwin Kernel Version 8.11.1: " "Wed Oct 10 18:23:28 PDT 2007; " "root:xnu-792.25.20~1/RELEASE_I386"), "i386", ) ) _osx_support._remove_original_values(get_config_vars()) get_config_vars()["MACOSX_DEPLOYMENT_TARGET"] = "10.3" get_config_vars()["CFLAGS"] = "-fno-strict-aliasing -DNDEBUG -g " "-fwrapv -O3 -Wall -Wstrict-prototypes" maxint = sys.maxint try: sys.maxint = 2147483647 self.assertEqual(get_platform(), "macosx-10.3-i386") sys.maxint = 9223372036854775807 self.assertEqual(get_platform(), "macosx-10.3-x86_64") finally: sys.maxint = maxint # macbook with fat binaries (fat, universal or fat64) _osx_support._remove_original_values(get_config_vars()) get_config_vars()["MACOSX_DEPLOYMENT_TARGET"] = "10.4" get_config_vars()["CFLAGS"] = ( "-arch ppc -arch i386 -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" ) self.assertEqual(get_platform(), "macosx-10.4-fat") _osx_support._remove_original_values(get_config_vars()) get_config_vars()["CFLAGS"] = ( "-arch x86_64 -arch i386 -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" ) self.assertEqual(get_platform(), "macosx-10.4-intel") _osx_support._remove_original_values(get_config_vars()) get_config_vars()["CFLAGS"] = ( "-arch x86_64 -arch ppc -arch i386 -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" ) self.assertEqual(get_platform(), "macosx-10.4-fat3") _osx_support._remove_original_values(get_config_vars()) get_config_vars()["CFLAGS"] = ( "-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" ) self.assertEqual(get_platform(), "macosx-10.4-universal") _osx_support._remove_original_values(get_config_vars()) get_config_vars()["CFLAGS"] = ( "-arch x86_64 -arch ppc64 -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" ) self.assertEqual(get_platform(), "macosx-10.4-fat64") for arch in ("ppc", "i386", "x86_64", "ppc64"): _osx_support._remove_original_values(get_config_vars()) get_config_vars()["CFLAGS"] = ( "-arch %s -isysroot " "/Developer/SDKs/MacOSX10.4u.sdk " "-fno-strict-aliasing -fno-common " "-dynamic -DNDEBUG -g -O3" % (arch,) ) self.assertEqual(get_platform(), "macosx-10.4-%s" % (arch,)) # linux debian sarge os.name = "posix" sys.version = "2.3.5 (#1, Jul 4 2007, 17:28:59) " "\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]" sys.platform = "linux2" self._set_uname(("Linux", "aglae", "2.6.21.1dedibox-r7", "#1 Mon Apr 30 17:25:38 CEST 2007", "i686")) self.assertEqual(get_platform(), "linux-i686")
print('wooOptions.ompThreads =', str(wooOptions.ompThreads)) warnings.warn( 'ompThreads==%d ignored, using %d since ompCores are specified.' % (wooOptions.ompThreads, len(cc))) wooOptions.ompThreads = len(cc) wooOsEnviron['GOMP_CPU_AFFINITY'] = ' '.join([str(cc[0])] + [str(c) for c in cc]) wooOsEnviron['OMP_NUM_THREADS'] = str(len(cc)) elif wooOptions.ompThreads: wooOsEnviron['OMP_NUM_THREADS'] = str(wooOptions.ompThreads) elif 'OMP_NUM_THREADS' not in os.environ: import multiprocessing wooOsEnviron['OMP_NUM_THREADS'] = str(multiprocessing.cpu_count()) import sysconfig soSuffix = sysconfig.get_config_vars()['SO'] #if WIN and 'TERM' in os.environ: # # unbuffered output on windows, in case we're in a real terminal # # http://stackoverflow.com/a/881751 # import msvcrt # msvcrt.setmode(sys.stdout.fileno(),os.O_BINARY) # # QUIRKS # # not in Windows, and not when running without X (the check is not very reliable if not WIN and (wooOptions.quirks & wooOptions.quirkIntel) and 'DISPLAY' in os.environ: import os, subprocess try: vgas = subprocess.check_output("LC_ALL=C lspci | grep VGA",
def test_get_platform(self): # windows XP, 32bits os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Intel)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win32') # windows XP, amd64 os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Amd64)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-amd64') # windows XP, itanium os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Itanium)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-ia64') # macbook os.name = 'posix' sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') sys.platform = 'darwin' self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC')) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxint try: sys.maxint = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-ppc') sys.maxint = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-ppc64') finally: sys.maxint = maxint self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxint try: sys.maxint = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-i386') sys.maxint = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-x86_64') finally: sys.maxint = maxint # macbook with fat binaries (fat, universal or fat64) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat') get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-intel') get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat3') get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-universal') get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat64') for arch in ('ppc', 'i386', 'ppc64', 'x86_64'): get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3'%(arch,)) self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,)) # macosx with ARCHFLAGS set and empty _CONFIG_VARS os.environ['ARCHFLAGS'] = '-arch i386' sysconfig._CONFIG_VARS = None # this will attempt to recreate the _CONFIG_VARS based on environment # variables; used to check a problem with the PyPy's _init_posix # implementation; see: issue 705 get_config_vars() # linux debian sarge os.name = 'posix' sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') sys.platform = 'linux2' self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) self.assertEqual(get_platform(), 'linux-i686')
python_min_ver = (3, 6, 0) python_ver = (version_info.major, version_info.minor, version_info.micro) if python_ver < python_min_ver: txt = 'Python version {0}.{1}.{2} ' \ 'lower than the required version >= {3}.{4}.{5}.' warnings.warn(txt.format(*(python_ver + python_min_ver))) # The next block ensures that we build a link-time linkable dynamic library for # OSX builds instead of a bundle. # # Snippet from http://stackoverflow.com/a/32765319/2299947 if sys.platform == 'darwin': vars = sysconfig.get_config_vars() vars['LDSHARED'] = vars['LDSHARED'].replace('-bundle', '-dynamiclib') # If we're installing via a wheel or not is_building_tick = any(arg in ( "build", "build_ext", "bdist", "bdist_wheel", "develop", ) for arg in sys.argv) # Obtain the numpy include directory. # This logic works across numpy versions. numpy_available = False numpy_include = ""
# sysconfig_get_config_vars.py import sysconfig config_values = sysconfig.get_config_vars() print('Trovate {} impostazioni di configurazione'.format( len(config_values.keys()))) print('\nAlcune salienti:\n') print(' Prefissi di installazione:') print(' prefix={prefix}'.format(**config_values)) print(' exec_prefix={exec_prefix}'.format(**config_values)) print('\n Info di versione:') print(' py_version={py_version}'.format(**config_values)) print(' py_version_short={py_version_short}'.format( **config_values)) print(' py_version_nodot={py_version_nodot}'.format( **config_values)) print('\n Directory base:') print(' base={base}'.format(**config_values)) print(' platbase={platbase}'.format(**config_values)) print(' userbase={userbase}'.format(**config_values)) print(' srcdir={srcdir}'.format(**config_values)) print('\n Flag di Compilatore e linker:') print(' LDFLAGS={LDFLAGS}'.format(**config_values)) print(' BASECFLAGS={BASECFLAGS}'.format(**config_values)) print(' Py_ENABLE_SHARED={Py_ENABLE_SHARED}'.format(
def do_custom_build(self, env): # We're using a system freetype if options.get('system_freetype'): return tarball = f'freetype-{LOCAL_FREETYPE_VERSION}.tar.gz' src_path = get_and_extract_tarball( urls=[ (f'https://downloads.sourceforge.net/project/freetype' f'/freetype2/{LOCAL_FREETYPE_VERSION}/{tarball}'), (f'https://download.savannah.gnu.org/releases/freetype' f'/{tarball}') ], sha=LOCAL_FREETYPE_HASH, dirname=f'freetype-{LOCAL_FREETYPE_VERSION}', ) if sys.platform == 'win32': libfreetype = 'libfreetype.lib' else: libfreetype = 'libfreetype.a' if (src_path / 'objs' / '.libs' / libfreetype).is_file(): return # Bail out because we have already built FreeType. print(f"Building freetype in {src_path}") if sys.platform != 'win32': # compilation on non-windows env = { **env, **{ var: value for var, value in sysconfig.get_config_vars().items() if var in {"CC", "CFLAGS", "CXX", "CXXFLAGS", "LD", "LDFLAGS"} }, } env["CFLAGS"] = env.get("CFLAGS", "") + " -fPIC" configure = [ "./configure", "--with-zlib=no", "--with-bzip2=no", "--with-png=no", "--with-harfbuzz=no", "--enable-static", "--disable-shared" ] host = sysconfig.get_config_var('BUILD_GNU_TYPE') if host is not None: # May be unset on PyPy. configure.append(f"--host={host}") subprocess.check_call(configure, env=env, cwd=src_path) if 'GNUMAKE' in env: make = env['GNUMAKE'] elif 'MAKE' in env: make = env['MAKE'] else: try: output = subprocess.check_output(['make', '-v'], stderr=subprocess.DEVNULL) except subprocess.CalledProcessError: output = b'' if b'GNU' not in output and b'makepp' not in output: make = 'gmake' else: make = 'make' subprocess.check_call([make], env=env, cwd=src_path) else: # compilation on windows shutil.rmtree(src_path / "objs", ignore_errors=True) msbuild_platform = ( 'x64' if platform.architecture()[0] == '64bit' else 'Win32') base_path = Path("build/freetype-2.6.1/builds/windows") vc = 'vc2010' sln_path = ( base_path / vc / "freetype.sln" ) # https://developercommunity.visualstudio.com/comments/190992/view.html (sln_path.parent / "Directory.Build.props").write_text(""" <Project> <PropertyGroup> <!-- The following line *cannot* be split over multiple lines. --> <WindowsTargetPlatformVersion>$([Microsoft.Build.Utilities.ToolLocationHelper]::GetLatestSDKTargetPlatformVersion('Windows', '10.0'))</WindowsTargetPlatformVersion> </PropertyGroup> </Project> """) # It is not a trivial task to determine PlatformToolset to plug it # into msbuild command, and Directory.Build.props will not override # the value in the project file. # The DefaultPlatformToolset is from Microsoft.Cpp.Default.props with open(base_path / vc / "freetype.vcxproj", 'r+b') as f: toolset_repl = b'PlatformToolset>$(DefaultPlatformToolset)<' vcxproj = f.read().replace(b'PlatformToolset>v100<', toolset_repl) assert toolset_repl in vcxproj, ( 'Upgrading Freetype might break this') f.seek(0) f.truncate() f.write(vcxproj) cc = get_ccompiler() cc.initialize() # Get msbuild in the %PATH% of cc.spawn. cc.spawn(["msbuild", str(sln_path), "/t:Clean;Build", f"/p:Configuration=Release;Platform={msbuild_platform}"]) # Move to the corresponding Unix build path. (src_path / "objs" / ".libs").mkdir() # Be robust against change of FreeType version. lib_path, = (src_path / "objs" / vc / msbuild_platform).glob( "freetype*.lib") shutil.copy2(lib_path, src_path / "objs/.libs/libfreetype.lib")
import unittest from attic.helpers import st_mtime_ns from attic.xattr import get_all try: import llfuse # Does this version of llfuse support ns precision? have_fuse_mtime_ns = hasattr(llfuse.EntryAttributes, 'st_mtime_ns') except ImportError: have_fuse_mtime_ns = False # The mtime get/set precison varies on different OS and Python versions if 'HAVE_FUTIMENS' in getattr(posix, '_have_functions', []): st_mtime_ns_round = 0 elif 'HAVE_UTIMES' in sysconfig.get_config_vars(): st_mtime_ns_round = -6 else: st_mtime_ns_round = -9 has_mtime_ns = sys.version >= '3.3' utime_supports_fd = os.utime in getattr(os, 'supports_fd', {}) class AtticTestCase(unittest.TestCase): """ """ assert_in = unittest.TestCase.assertIn assert_not_in = unittest.TestCase.assertNotIn assert_equal = unittest.TestCase.assertEqual