def test_iter_ordering(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) whl = WheelPackage('psutil-0.6.1-cp%s-none-%s.whl' % ( pi.python.replace('.', ''), get_build_platform().replace('-', '_').replace('.', '_').lower())) req = Requirement.parse('psutil') assert list(FakeObtainer([tgz, egg, whl]).iter(req)) == [whl, egg, tgz] assert list(FakeObtainer([egg, tgz, whl]).iter(req)) == [whl, egg, tgz]
def test_sorter_sort(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) whl = WheelPackage('psutil-0.6.1-cp%s-none-%s.whl' % ( pi.python.replace('.', ''), get_build_platform().replace('-', '_').replace('.', '_').lower())) assert Sorter().sort([tgz, egg, whl]) == [whl, egg, tgz] assert Sorter().sort([egg, tgz, whl]) == [whl, egg, tgz] # test unknown type sorter = Sorter(precedence=(EggPackage, WheelPackage)) assert sorter.sort([egg, tgz, whl], filter=False) == [egg, whl, tgz] assert sorter.sort([egg, tgz, whl], filter=True) == [egg, whl]
def finalize_options(self): ei_cmd = self.get_finalized_command('egg_info') self.egg_info = ei_cmd.egg_info if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'venv') if self.no_plat_name: self.plat_name = '' elif self.plat_name is None: self.plat_name = get_build_platform() if self.requirements is None and os.path.exists('requirements.txt'): self.requirements = 'requirements.txt' if self.no_archive_root: self.archive_root = '.' elif self.archive_root is None: self.archive_root = self.distribution.get_fullname() elif not self.archive_root: self.archive_root = '.' self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) basename = self.distribution.get_fullname() if self.plat_name: basename = '%s.%s-py%s' % (basename, self.plat_name, get_python_version()) self.venv_output = os.path.join(self.dist_dir, basename + '.tar.gz')
def get_library_dirs(): from pkg_resources import get_build_platform library_dirs = [] if get_build_platform() in ('win32', 'win-amd64'): library_dirs.append(os.path.join(os.getcwd(), 'pyfftw')) library_dirs.append(os.path.join(sys.prefix, 'bin')) if 'PYFFTW_LIB_DIR' in os.environ: library_dirs.append(os.environ['PYFFTW_LIB_DIR']) library_dirs.append(os.path.join(sys.prefix, 'lib')) if get_build_platform().startswith('freebsd'): library_dirs.append('/usr/local/lib') return library_dirs
def finalize_options(self): ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") self.egg_info = ei_cmd.egg_info if self.bdist_dir is None: bdist_base = self.get_finalized_command("bdist").bdist_base self.bdist_dir = os.path.join(bdist_base, "egg") if self.plat_name is None: self.plat_name = get_build_platform() self.set_undefined_options("bdist", ("dist_dir", "dist_dir")) if self.egg_output is None: # Compute filename of the output egg basename = Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version, get_python_version(), self.distribution.has_ext_modules() and self.plat_name, ).egg_name() self.egg_output = os.path.join(self.dist_dir, basename + ".egg")
def _package_name(self): egg_name = self._dist.egg_name() if self._dist.platform and not egg_name.endswith(self._dist.platform): egg_name = egg_name + '-' + self._dist.platform elif self._native_deps(): egg_name = egg_name + '-' + get_build_platform() return egg_name + '.egg'
def get_library_dirs(): from pkg_resources import get_build_platform library_dirs = [] if get_build_platform() in ("win32", "win-amd64"): library_dirs.append(os.path.join(os.getcwd(), "pyfftw")) return library_dirs
def get_library_dirs(): from pkg_resources import get_build_platform library_dirs = [] if get_build_platform() in ('win32', 'win-amd64'): library_dirs.append(os.path.join(os.getcwd(), 'pyfftw')) return library_dirs
def test_iter_ordering(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) req = Requirement.parse('psutil') assert list(FakeObtainer([tgz, egg]).iter(req)) == [egg, tgz] assert list(FakeObtainer([egg, tgz]).iter(req)) == [egg, tgz]
def get_package_data(): from pkg_resources import get_build_platform package_data = {} if get_build_platform() in ("win32", "win-amd64"): package_data["pyfftw"] = ["libfftw3-3.dll", "libfftw3l-3.dll", "libfftw3f-3.dll"] return package_data
def get_extensions(): from distutils.extension import Extension # will use static linking if STATIC_FFTW_DIR defined static_fftw_path = os.environ.get('STATIC_FFTW_DIR', None) link_static_fftw = static_fftw_path is not None common_extension_args = { 'include_dirs': get_include_dirs(), 'library_dirs': get_library_dirs()} try: from Cython.Build import cythonize sources = [os.path.join(os.getcwd(), 'pyfftw', 'pyfftw.pyx')] have_cython = True except ImportError as e: # no cython sources = [os.path.join(os.getcwd(), 'pyfftw', 'pyfftw.c')] if not os.path.exists(sources[0]): raise ImportError( str(e) + '. ' + 'Cython is required to build the initial .c file.') have_cython = False libraries = get_libraries() if link_static_fftw: from pkg_resources import get_build_platform if get_build_platform() in ('win32', 'win-amd64'): lib_pre = '' lib_ext = '.lib' else: lib_pre = 'lib' lib_ext = '.a' extra_link_args = [] for lib in libraries: extra_link_args.append( os.path.join(static_fftw_path, lib_pre + lib + lib_ext)) common_extension_args['extra_link_args'] = extra_link_args common_extension_args['libraries'] = [] else: # otherwise we use dynamic libraries common_extension_args['extra_link_args'] = [] common_extension_args['libraries'] = libraries ext_modules = [ Extension('pyfftw.pyfftw', sources=sources, **common_extension_args)] if have_cython: return cythonize(ext_modules) else: return ext_modules
def get_include_dirs(): import numpy from pkg_resources import get_build_platform include_dirs = [os.path.join(os.getcwd(), "include"), os.path.join(os.getcwd(), "pyfftw"), numpy.get_include()] if get_build_platform() in ("win32", "win-amd64"): include_dirs.append(os.path.join(os.getcwd(), "include", "win")) return include_dirs
def get_libraries(): from pkg_resources import get_build_platform if get_build_platform() in ("win32", "win-amd64"): libraries = ["libfftw3-3", "libfftw3f-3", "libfftw3l-3"] else: libraries = ["fftw3", "fftw3f", "fftw3l", "fftw3_threads", "fftw3f_threads", "fftw3l_threads"] return libraries
def get_include_dirs(): import numpy from pkg_resources import get_build_platform include_dirs = [os.path.join(os.getcwd(), 'include'), os.path.join(os.getcwd(), 'pyfftw'), numpy.get_include(), os.path.join(sys.prefix, 'include')] if 'PYFFTW_INCLUDE' in os.environ: include_dirs.append(os.environ['PYFFTW_INCLUDE']) if get_build_platform() in ('win32', 'win-amd64'): include_dirs.append(os.path.join(os.getcwd(), 'include', 'win')) if get_build_platform().startswith('freebsd'): include_dirs.append('/usr/local/include') return include_dirs
def lib_full_name(self, root_lib): # TODO use self.compiler.library_filename from pkg_resources import get_build_platform if get_build_platform() in ('win32', 'win-amd64'): lib_pre = '' lib_ext = '.lib' else: lib_pre = 'lib' lib_ext = '.a' return os.path.join(self.static_fftw_dir, lib_pre + root_lib + lib_ext)
def get_package_data(): from pkg_resources import get_build_platform package_data = {} if get_build_platform() in ('win32', 'win-amd64'): package_data['pyfftw'] = [ 'libfftw3-3.dll', 'libfftw3l-3.dll', 'libfftw3f-3.dll'] return package_data
def get_libraries(): from pkg_resources import get_build_platform if get_build_platform() in ('win32', 'win-amd64'): libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3'] else: libraries = ['fftw3', 'fftw3f', 'fftw3l', 'fftw3_threads', 'fftw3f_threads', 'fftw3l_threads'] return libraries
def make_env_section(self): env = collections.OrderedDict() env['platform'] = pkg_resources.get_build_platform() # There is a trailing whitespace in sys.version, strip so that YAML can # use literal formatting. env['python'] = LiteralString('\n'.join(line.strip() for line in sys.version.split('\n'))) env['framework'] = qiime2.__version__ env['plugins'] = self.plugins env['python-packages'] = self.capture_env() return env
def get_include_dirs(): import numpy from pkg_resources import get_build_platform include_dirs = [os.path.join(os.getcwd(), 'include'), os.path.join(os.getcwd(), 'pyfftw'), numpy.get_include()] if get_build_platform() in ('win32', 'win-amd64'): include_dirs.append(os.path.join(os.getcwd(), 'include', 'win')) return include_dirs
def egg_name(self): wheel = self.wheel name = pkg_resources.safe_name(wheel.name) version = pkg_resources.safe_version(wheel.version) pyver = 'py%d.%d' % sys.version_info[:2] bits = [pkg_resources.to_filename(name), pkg_resources.to_filename(version), pyver] if any(abi != 'none' or arch != 'any' for pyver, abi, arch in wheel.tags): # not pure python bits.append(pkg_resources.get_build_platform()) return '-'.join(bits) + '.egg'
def ensure_bdist(self, req): log.debug('ensure bdist: %s', req) self._pi_local.find_packages(req) dists = [ dist for dist in self._pi_local[req.key] if dist in req ] bdists = [ dist for dist in dists if dist.location.endswith('.egg') and dist.platform in (None, get_build_platform()) ] assert dists, 'No distributions found for %s' % req if bdists: log.debug('Found bdist: %s', bdists[0].location) if not bdists: bdists = self.compile_bdist(dists[0]) log.debug('Compiled bdist: %s', bdists[0].location) return bdists[0]
def get_package_data(): from pkg_resources import get_build_platform package_data = {} if get_build_platform() in ('win32', 'win-amd64'): if 'PYFFTW_WIN_CONDAFORGE' in os.environ: # fftw3.dll, fftw3f.dll will already be on the path (via the # conda environment's \bin subfolder) pass else: # as download from http://www.fftw.org/install/windows.html package_data['pyfftw'] = [ 'libfftw3-3.dll', 'libfftw3l-3.dll', 'libfftw3f-3.dll'] return package_data
def compile_bdist(self, dist): build_dir = self._build_cache / dist.project_name + '-' + dist.version if build_dir.exists(): build_dir.rmtree() unpack_archive(dist.location, self._build_cache) dest_dir = self._local_cache / dist.project_name result = [] self._build_an_egg(build_dir) fns = (build_dir / 'dist').listdir() assert len(fns) == 1, ( "Don't know what to do with multiple files in the dist dir") pathname, ext = path(fns[0]).splitext() dest_fn = dest_dir / (fns[0]).basename() path(fns[0]).copy(dest_fn) result.append(dist.clone( location=dest_fn, platform=get_build_platform())) return result
def init(local_run_value, package_path): u'''Initialize path values for sdaps ''' global local_run, build_dir, lib_build_dir, source_dir, prefix # Initialize local_run local_run = local_run_value base_dir = os.path.split(os.path.abspath(package_path))[0] if local_run: source_dir = base_dir from pkg_resources import get_build_platform from distutils.sysconfig import get_python_version # Initialize gettext init_gettext(os.path.join( base_dir, 'build', 'mo')) # Initialize build_dir build_dir = os.path.join(base_dir, 'build', 'share', 'sdaps') # Initialize build_dir lib_build_dir = os.path.join( base_dir, 'build', 'lib.%s-%s' % (get_build_platform(), get_python_version()), 'sdaps') else: # Look for the data in the parent directories path = base_dir while True: if os.path.exists(os.path.join(path, 'share', 'sdaps')): prefix = path break new_path = os.path.split(path)[0] assert not path == new_path, "could not find locales" # Wir wären oben angekommen path = new_path # Initialize gettext init_gettext(os.path.join(prefix, 'share', 'locale'))
def init(local_run_value, package_path): u'''Initialize path values for sdaps ''' global local_run, build_dir, lib_build_dir, source_dir, prefix # Initialize local_run local_run = local_run_value base_dir = os.path.split(os.path.abspath(package_path))[0] if local_run: source_dir = base_dir from pkg_resources import get_build_platform from distutils.sysconfig import get_python_version # Initialize gettext init_gettext(os.path.join( base_dir, 'build', 'mo')) # Initialize build_dir build_dir = os.path.join(base_dir, 'build', 'share', 'sdaps') # Initialize build_dir lib_build_dir = os.path.join( base_dir, 'build', 'lib.%s-%s' % (get_build_platform(), get_python_version()), 'sdaps') else: # Initialize prefix # Gehe von base_dir aus solange nach oben, bis path/share/sdaps # existiert path = base_dir while True: new_path = os.path.split(path)[0] assert not path == new_path # Wir wären oben angekommen path = new_path if os.path.exists(os.path.join(path, 'share', 'sdaps')): prefix = path break # Initialize gettext init_gettext(os.path.join(prefix, 'share', 'locale'))
except Exception, exc: print "Couldn't execute 'dakota -v':", exc sys.exit(1) fields = stdout.split() if len(fields) >= 3 and \ fields[0].upper() == 'DAKOTA' and fields[1] == 'version': dakota_version = fields[2] else: print "Can't parse version from DAKOTA output %r" % stdout print " stderr output:", stderr sys.exit(1) wrapper_version = '1' egg_dir = 'pyDAKOTA-%s_%s-py%s-%s.egg' % (dakota_version, wrapper_version, sys.version[0:3], get_build_platform()) # Assuming standard prefix-based install. dakota_install = os.path.dirname( os.path.dirname( find_executable('dakota'))) dakota_bin = os.path.join(dakota_install, 'bin') dakota_include = os.path.join(dakota_install, 'include') dakota_lib = os.path.join(dakota_install, 'lib') if not os.path.exists(dakota_bin) or \ not os.path.exists(dakota_include) or \ not os.path.exists(dakota_lib): print "Can't find", dakota_bin, 'or', dakota_include, 'or', dakota_lib, ', bummer' sys.exit(1) # Read make macros from `install_dir`/include/Makefile.export.Dakota.
def finalize_options(self): build_ext.finalize_options(self) if self.compiler is None: compiler = get_default_compiler() else: compiler = self.compiler cfg_vars = sysconfig.get_config_vars() # Hack around OSX setting a -m flag if "macosx" in get_build_platform() and "CFLAGS" in cfg_vars: print("System C-flags:") print(cfg_vars["CFLAGS"]) cflags = [] for flag in cfg_vars["CFLAGS"].split(): if flag in ["-m", "-isysroot"]: continue # Remove sdk links if flag.endswith(".sdk"): continue cflags.append(flag) cfg_vars["CFLAGS"] = " ".join(cflags) print("Editted C-flags:") print(cfg_vars["CFLAGS"]) # Remove unsupported C-flags unsupported_flags = [ "-fuse-linker-plugin", "-ffat-lto-objects", "-flto-partition=none"] for key in ["CFLAGS", "LDFLAGS", "LDSHARED"]: if key in cfg_vars: print("System {0}:".format(key)) print(cfg_vars[key]) flags = [] for flag in cfg_vars[key].split(): if flag in unsupported_flags: continue flags.append(flag) cfg_vars[key] = " ".join(flags) print("Editted {0}:".format(key)) print(cfg_vars[key]) if compiler == 'msvc': # Add msvc specific hacks # Sort linking issues with init exported symbols def _get_export_symbols(self, ext): return ext.export_symbols build_ext.get_export_symbols = _get_export_symbols if (sys.version_info.major, sys.version_info.minor) < (3, 3): # The check above is a nasty hack. We're using the python # version as a proxy for the MSVC version. 2008 doesn't # have stdint.h, so is needed. 2010 does. # # We need to add the path to msvc includes msvc_2008_path = ( os.path.join(os.getcwd(), 'include', 'msvc_2008')) if self.include_dirs is not None: self.include_dirs.append(msvc_2008_path) else: self.include_dirs = [msvc_2008_path] elif (sys.version_info.major, sys.version_info.minor) < (3, 5): # Actually, it seems that appveyor doesn't have a stdint that # works, so even for 2010 we use our own (hacked) version # of stdint. # This should be pretty safe in whatever case. msvc_2010_path = ( os.path.join(os.getcwd(), 'include', 'msvc_2010')) if self.include_dirs is not None: self.include_dirs.append(msvc_2010_path) else: self.include_dirs = [msvc_2010_path] # We need to prepend lib to all the library names _libraries = [] for each_lib in self.libraries: _libraries.append('lib' + each_lib) self.libraries = _libraries
def print_system_info(): print("Platform: " + pkg_resources.get_build_platform()) print("Python version: " + str(sys.version))
def ensure_coverage_importable(): try: from distutils.version import StrictVersion import coverage if (StrictVersion(coverage.__version__) < StrictVersion('3.7') or not coverage.collector.CTracer): del sys.modules['coverage'] del coverage else: return except ImportError: if sys.platform.startswith('win'): # In order to compile the coverage module on Windows we need to set the # 'VS90COMNTOOLS' environment variable. This usually point to the # installation folder of VS2008 but we can fake it to make it point to the # version of the toolchain checked in depot_tools. # # This variable usually point to the $(VsInstallDir)\Common7\Tools but is # only used to access %VS90COMNTOOLS%/../../VC/vcvarsall.bat and therefore # any valid directory respecting this structure can be used. vc_path = os.path.join(DEPOT_TOOLS_DIR, 'win_toolchain', 'vs2013_files', 'VC', 'bin') # If the toolchain isn't available then ask the user to fetch chromium in # order to install it. if not os.path.isdir(vc_path): print textwrap.dedent(""" You probably don't have the Windows toolchain in your depot_tools checkout. Install it by running: fetch chromium """) sys.exit(1) os.environ['VS90COMNTOOLS'] = vc_path try: import setuptools # pylint: disable=W0612 except ImportError: print textwrap.dedent(""" No compatible system-wide python-coverage package installed, and setuptools is not installed either. Please obtain setuptools by: Debian/Ubuntu: sudo apt-get install python-setuptools python-dev OS X: https://pypi.python.org/pypi/setuptools#unix-including-mac-os-x-curl Other: https://pypi.python.org/pypi/setuptools#installation-instructions """) sys.exit(1) from pkg_resources import get_build_platform try: # Python 2.7 or >= 3.2 from sysconfig import get_python_version except ImportError: from distutils.sysconfig import get_python_version cov_dir = os.path.join(BASE_DIR, 'third_party', 'coverage-3.7.1') cov_egg = os.path.join(cov_dir, 'dist', 'coverage-3.7.1-py%s-%s.egg' % ( get_python_version(), get_build_platform())) # The C-compiled coverage engine is WAY faster (and less buggy) than the pure # python version, so we build the dist_egg if necessary. if not os.path.exists(cov_egg): import subprocess print 'Building Coverage 3.7.1' p = subprocess.Popen([sys.executable, 'setup.py', 'bdist_egg'], cwd=cov_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: print 'Error while building :(' print stdout print stderr if sys.platform.startswith('linux'): print textwrap.dedent(""" You probably don't have the 'python-dev' package installed. Install it by running: sudo apt-get install python-dev """) else: print textwrap.dedent(""" I'm not sure what's wrong, but your system seems incapable of building python extensions. Please fix that by installing a Python with headers and the approprite command-line build tools for your platform. """) sys.exit(1) sys.path.insert(0, cov_egg)
class bdist_egg(Command): description = "create an \"egg\" distribution" user_options = [ ('bdist-dir=', 'b', "temporary directory for creating the distribution"), ('plat-name=', 'p', "platform name to embed in generated filenames " "(default: %s)" % get_build_platform()), ('exclude-source-files', None, "remove all .py files from the generated egg"), ('keep-temp', 'k', "keep the pseudo-installation tree around after " + "creating the distribution archive"), ('dist-dir=', 'd', "directory to put final built distributions in"), ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), ] boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files'] def initialize_options(self): self.bdist_dir = None self.plat_name = None self.keep_temp = 0 self.dist_dir = None self.skip_build = 0 self.egg_output = None self.exclude_source_files = None def finalize_options(self): ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") self.egg_info = ei_cmd.egg_info if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'egg') if self.plat_name is None: self.plat_name = get_build_platform() self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) if self.egg_output is None: # Compute filename of the output egg basename = Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version, get_python_version(), self.distribution.has_ext_modules() and self.plat_name).egg_name() self.egg_output = os.path.join(self.dist_dir, basename + '.egg') def do_install_data(self): # Hack for packages that install data to install's --install-lib self.get_finalized_command('install').install_lib = self.bdist_dir site_packages = os.path.normcase(os.path.realpath(get_python_lib())) old, self.distribution.data_files = self.distribution.data_files, [] for item in old: if isinstance(item, tuple) and len(item) == 2: if os.path.isabs(item[0]): realpath = os.path.realpath(item[0]) normalized = os.path.normcase(realpath) if normalized == site_packages or normalized.startswith( site_packages + os.sep): item = realpath[len(site_packages) + 1:], item[1] # XXX else: raise ??? self.distribution.data_files.append(item) try: log.info("installing package data to %s" % self.bdist_dir) self.call_command('install_data', force=0, root=None) finally: self.distribution.data_files = old def get_outputs(self): return [self.egg_output] def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: kw.setdefault(dirname, self.bdist_dir) kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd def run(self): # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s" % self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None cmd = self.call_command('install_lib', warn_dir=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s" % ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') to_compile.extend(self.make_init_files()) if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s" % script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) native_libs = os.path.join(self.egg_info, "native_libs.txt") if all_outputs: log.info("writing %s" % native_libs) if not self.dry_run: libs_file = open(native_libs, 'wt') libs_file.write('\n'.join(all_outputs)) libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): log.info("removing %s" % native_libs) if not self.dry_run: os.unlink(native_libs) self.copy_metadata_to(egg_info) write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()) if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." ) if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works getattr(self.distribution, 'dist_files', []).append( ('bdist_egg', get_python_version(), self.egg_output)) def zap_pyfiles(self): log.info("Removing .py files from temporary directory") for base, dirs, files in walk_egg(self.bdist_dir): for name in files: if name.endswith('.py'): path = os.path.join(base, name) log.debug("Deleting %s", path) os.unlink(path) def zip_safe(self): safe = getattr(self.distribution, 'zip_safe', None) if safe is not None: return safe log.warn("zip_safe flag not set; analyzing archive contents...") return analyze_egg(self.bdist_dir, self.stubs) def make_init_files(self): """Create missing package __init__ files""" init_files = [] for base, dirs, files in walk_egg(self.bdist_dir): if base == self.bdist_dir: # don't put an __init__ in the root continue for name in files: if name.endswith('.py'): if '__init__.py' not in files: pkg = base[len(self.bdist_dir) + 1:].replace( os.sep, '.') if self.distribution.has_contents_for(pkg): log.warn("Creating missing __init__.py for %s", pkg) filename = os.path.join(base, '__init__.py') if not self.dry_run: f = open(filename, 'w') f.write(NS_PKG_STUB) f.close() init_files.append(filename) break else: # not a package, don't traverse to subdirectories dirs[:] = [] return init_files def copy_metadata_to(self, target_dir): prefix = os.path.join(self.egg_info, '') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target) def get_ext_outputs(self): """Get a list of relative paths to C extensions in the output distro""" all_outputs = [] ext_outputs = [] paths = {self.bdist_dir: ''} for base, dirs, files in os.walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) for filename in dirs: paths[os.path.join(base, filename)] = paths[base] + filename + '/' if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs
class bdist_egg(Command): description = "create an \"egg\" distribution" user_options = [ ('bdist-dir=', 'b', "temporary directory for creating the distribution"), ('plat-name=', 'p', "platform name to embed in generated filenames " "(default: %s)" % get_build_platform()), ('exclude-source-files', None, "remove all .py files from the generated egg"), ('keep-temp', 'k', "keep the pseudo-installation tree around after " + "creating the distribution archive"), ('dist-dir=', 'd', "directory to put final built distributions in"), ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), ] boolean_options = [ 'keep-temp', 'skip-build', 'exclude-source-files' ] def initialize_options(self): self.bdist_dir = None self.plat_name = None self.keep_temp = 0 self.dist_dir = None self.skip_build = 0 self.egg_output = None self.exclude_source_files = None def finalize_options(self): ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") self.egg_info = ei_cmd.egg_info if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'egg') if self.plat_name is None: self.plat_name = get_build_platform() self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) if self.egg_output is None: # Compute filename of the output egg basename = Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version, get_python_version(), self.distribution.has_ext_modules() and self.plat_name ).egg_name() self.egg_output = os.path.join(self.dist_dir, basename + '.egg') def do_install_data(self): # Hack for packages that install data to install's --install-lib self.get_finalized_command('install').install_lib = self.bdist_dir site_packages = os.path.normcase(os.path.realpath(_get_purelib())) old, self.distribution.data_files = self.distribution.data_files, [] for item in old: if isinstance(item, tuple) and len(item) == 2: if os.path.isabs(item[0]): realpath = os.path.realpath(item[0]) normalized = os.path.normcase(realpath) if normalized == site_packages or normalized.startswith( site_packages + os.sep ): item = realpath[len(site_packages) + 1:], item[1] # XXX else: raise ??? self.distribution.data_files.append(item) try: log.info("installing package data to %s", self.bdist_dir) self.call_command('install_data', force=0, root=None) finally: self.distribution.data_files = old def get_outputs(self): return [self.egg_output] def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: kw.setdefault(dirname, self.bdist_dir) kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd def run(self): # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s", self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s", ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s", script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: log.info("writing %s", native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') libs_file.write('\n'.join(all_outputs)) libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): log.info("removing %s", native_libs) if not self.dry_run: os.unlink(native_libs) write_safety_flag( os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() ) if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." ) if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run, mode=self.gen_header()) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works getattr(self.distribution, 'dist_files', []).append( ('bdist_egg', get_python_version(), self.egg_output)) def zap_pyfiles(self): log.info("Removing .py files from temporary directory") for base, dirs, files in walk_egg(self.bdist_dir): for name in files: path = os.path.join(base, name) if name.endswith('.py'): log.debug("Deleting %s", path) os.unlink(path) if base.endswith('__pycache__'): path_old = path pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc' m = re.match(pattern, name) path_new = os.path.join( base, os.pardir, m.group('name') + '.pyc') log.info( "Renaming file from [%s] to [%s]" % (path_old, path_new)) try: os.remove(path_new) except OSError: pass os.rename(path_old, path_new) def zip_safe(self): safe = getattr(self.distribution, 'zip_safe', None) if safe is not None: return safe log.warn("zip_safe flag not set; analyzing archive contents...") return analyze_egg(self.bdist_dir, self.stubs) def gen_header(self): epm = EntryPoint.parse_map(self.distribution.entry_points or '') ep = epm.get('setuptools.installation', {}).get('eggsecutable') if ep is None: return 'w' # not an eggsecutable, do it the usual way. if not ep.attrs or ep.extras: raise DistutilsSetupError( "eggsecutable entry point (%r) cannot have 'extras' " "or refer to a module" % (ep,) ) pyver = sys.version[:3] pkg = ep.module_name full = '.'.join(ep.attrs) base = ep.attrs[0] basename = os.path.basename(self.egg_output) header = ( "#!/bin/sh\n" 'if [ `basename $0` = "%(basename)s" ]\n' 'then exec python%(pyver)s -c "' "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " "from %(pkg)s import %(base)s; sys.exit(%(full)s())" '" "$@"\n' 'else\n' ' echo $0 is not the correct name for this egg file.\n' ' echo Please rename it back to %(basename)s and try again.\n' ' exec false\n' 'fi\n' ) % locals() if not self.dry_run: mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) f = open(self.egg_output, 'w') f.write(header) f.close() return 'a' def copy_metadata_to(self, target_dir): "Copy metadata (egg info) to the target_dir" # normalize the path (so that a forward-slash in egg_info will # match using startswith below) norm_egg_info = os.path.normpath(self.egg_info) prefix = os.path.join(norm_egg_info, '') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target) def get_ext_outputs(self): """Get a list of relative paths to C extensions in the output distro""" all_outputs = [] ext_outputs = [] paths = {self.bdist_dir: ''} for base, dirs, files in sorted_walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) for filename in dirs: paths[os.path.join(base, filename)] = (paths[base] + filename + '/') if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs
def get_extensions(no_mkl=False): from distutils.extension import Extension if READ_THE_DOCS: return [] # will use static linking if STATIC_FFTW_DIR defined static_fftw_path = os.environ.get('STATIC_FFTW_DIR', None) link_static_fftw = static_fftw_path is not None common_extension_args = { 'include_dirs': get_include_dirs(), 'library_dirs': get_library_dirs()} sources = [os.path.join('eqcorrscan', 'utils', 'src', 'multi_corr.c'), os.path.join('eqcorrscan', 'utils', 'src', 'time_corr.c'), os.path.join('eqcorrscan', 'utils', 'src', 'find_peaks.c'), os.path.join('eqcorrscan', 'utils', 'src', 'distance_cluster.c')] exp_symbols = export_symbols("eqcorrscan/utils/src/libutils.def") if get_build_platform() not in ('win32', 'win-amd64'): if get_build_platform().startswith('freebsd'): # Clang uses libomp, not libgomp extra_link_args = ['-lm', '-lomp'] else: extra_link_args = ['-lm', '-lgomp'] extra_compile_args = ['-fopenmp'] if all(arch not in get_build_platform() for arch in ['arm', 'aarch']): extra_compile_args.extend(['-msse2', '-ftree-vectorize']) else: extra_link_args = [] extra_compile_args = ['/openmp', '/TP'] libraries = get_libraries() if link_static_fftw: if get_build_platform() in ('win32', 'win-amd64'): lib_pre = '' lib_ext = '.lib' else: lib_pre = 'lib' lib_ext = '.a' for lib in libraries: extra_link_args.append( os.path.join(static_fftw_path, lib_pre + lib + lib_ext)) common_extension_args['extra_link_args'] = extra_link_args common_extension_args['libraries'] = [] common_extension_args['extra_compile_args'] = extra_compile_args common_extension_args['export_symbols'] = exp_symbols else: # otherwise we use dynamic libraries common_extension_args['extra_link_args'] = extra_link_args common_extension_args['extra_compile_args'] = extra_compile_args common_extension_args['export_symbols'] = exp_symbols if no_mkl: mkl = None else: mkl = get_mkl() if mkl is not None: # use MKL if we have it common_extension_args['include_dirs'].extend(mkl[0]) common_extension_args['library_dirs'].extend(mkl[1]) common_extension_args['libraries'] = mkl[2] else: common_extension_args['libraries'] = libraries ext_modules = [ Extension('eqcorrscan.utils.lib.libutils', sources=sources, **common_extension_args)] return ext_modules
def get_egg_name(dist): pkg_dist = pkg_resources.Distribution( None, None, dist.get_name(), dist.get_version(), distutils.sysconfig.get_python_version(), pkg_resources.get_build_platform()) return pkg_dist.egg_name()
def ensure_coverage_importable(): try: from distutils.version import StrictVersion import coverage if (StrictVersion(coverage.__version__) < StrictVersion('3.7') or not coverage.collector.CTracer): del sys.modules['coverage'] del coverage else: return except ImportError: if sys.platform.startswith('win'): # In order to compile the coverage module on Windows we need to set the # 'VS90COMNTOOLS' environment variable. This usually point to the # installation folder of VS2008 but we can fake it to make it point to the # version of the toolchain checked in depot_tools. # # This variable usually point to the $(VsInstallDir)\Common7\Tools but is # only used to access %VS90COMNTOOLS%/../../VC/vcvarsall.bat and therefore # any valid directory respecting this structure can be used. vc_path = os.path.join(DEPOT_TOOLS_DIR, 'win_toolchain', 'vs2013_files', 'VC', 'bin') # If the toolchain isn't available then ask the user to fetch chromium in # order to install it. if not os.path.isdir(vc_path): print textwrap.dedent(""" You probably don't have the Windows toolchain in your depot_tools checkout. Install it by running: fetch chromium """) sys.exit(1) os.environ['VS90COMNTOOLS'] = vc_path try: import setuptools # pylint: disable=W0612 except ImportError: print textwrap.dedent(""" No compatible system-wide python-coverage package installed, and setuptools is not installed either. Please obtain setuptools by: Debian/Ubuntu: sudo apt-get install python-setuptools python-dev OS X: https://pypi.python.org/pypi/setuptools#unix-including-mac-os-x-curl Other: https://pypi.python.org/pypi/setuptools#installation-instructions """) sys.exit(1) from pkg_resources import get_build_platform try: # Python 2.7 or >= 3.2 from sysconfig import get_python_version except ImportError: from distutils.sysconfig import get_python_version cov_dir = os.path.join(BASE_DIR, 'third_party', 'coverage-3.7.1') cov_egg = os.path.join( cov_dir, 'dist', 'coverage-3.7.1-py%s-%s.egg' % (get_python_version(), get_build_platform())) # The C-compiled coverage engine is WAY faster (and less buggy) than the pure # python version, so we build the dist_egg if necessary. if not os.path.exists(cov_egg): import subprocess print 'Building Coverage 3.7.1' p = subprocess.Popen([sys.executable, 'setup.py', 'bdist_egg'], cwd=cov_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: print 'Error while building :(' print stdout print stderr if sys.platform.startswith('linux'): print textwrap.dedent(""" You probably don't have the 'python-dev' package installed. Install it by running: sudo apt-get install python-dev """) else: print textwrap.dedent(""" I'm not sure what's wrong, but your system seems incapable of building python extensions. Please fix that by installing a Python with headers and the approprite command-line build tools for your platform. """) sys.exit(1) sys.path.insert(0, cov_egg)
except ImportError as e: sources = [os.path.join(os.getcwd(), 'pyfftw', 'pyfftw.c')] if not os.path.exists(sources[0]): raise ImportError(str(e) + '. ' + 'Cython is required to build the initial .c file.') # We can't cythonize, but that's ok as it's been done already. from setuptools.command.build_ext import build_ext include_dirs = [os.path.join(os.getcwd(), 'include'), os.path.join(os.getcwd(), 'pyfftw'), numpy.get_include()] library_dirs = [] package_data = {} if get_build_platform() in ('win32', 'win-amd64'): libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3'] include_dirs.append(os.path.join(os.getcwd(), 'include', 'win')) library_dirs.append(os.path.join(os.getcwd(), 'pyfftw')) package_data['pyfftw'] = [ 'libfftw3-3.dll', 'libfftw3l-3.dll', 'libfftw3f-3.dll'] else: libraries = ['fftw3', 'fftw3f', 'fftw3l', 'fftw3_threads', 'fftw3f_threads', 'fftw3l_threads'] class custom_build_ext(build_ext): def finalize_options(self): build_ext.finalize_options(self) if self.compiler is None:
from pkg_resources import get_build_platform from setuptools import find_packages from Cython.Build import cythonize import distutils import numpy as np import os import sys include_dirs = [ np.get_include(), os.path.join(os.getcwd(), 'fftw'), os.path.join(os.getcwd(), 'source/') ] if get_build_platform() == 'win32': if sys.version_info[0] == 3: library_dirs = ['fftw/win32'] elif sys.version_info[0] == 2: library_dirs = ['fftw/win32/dll'] libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3'] elif get_build_platform() == 'win-amd64': if sys.version_info[0] == 3: library_dirs = ['fftw/win64'] elif sys.version_info[0] == 2: library_dirs = ['fftw/win64/dll'] libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3'] else: library_dirs = []
except Exception, exc: print "Couldn't execute 'dakota -v':", exc sys.exit(1) fields = stdout.split() if len(fields) >= 3 and \ fields[0].upper() == 'DAKOTA' and fields[1] == 'version': dakota_version = fields[2] else: print "Can't parse version from DAKOTA output %r" % stdout print " stderr output:", stderr sys.exit(1) wrapper_version = '1' egg_dir = 'carolina-%s_%s-py%s-%s.egg' % (dakota_version, wrapper_version, sys.version[0:3], get_build_platform()) # Assuming standard prefix-based install. dakota_install = os.path.dirname(os.path.dirname(find_executable('dakota'))) dakota_bin = os.path.join(dakota_install, 'bin') dakota_include = os.path.join(dakota_install, 'include') dakota_lib = os.path.join(dakota_install, 'lib') if not os.path.exists(dakota_bin) or \ not os.path.exists(dakota_include) or \ not os.path.exists(dakota_lib): print "Can't find", dakota_bin, 'or', dakota_include, 'or', dakota_lib, ', bummer' sys.exit(1) # Read make macros from `install_dir`/include/Makefile.export.Dakota. dakota_macros = {} with open(os.path.join(dakota_install, 'include',
if hasattr(os, 'add_dll_directory'): os.add_dll_directory(extra_dll_dir) if os.sys.platform == 'win32': __PyscesConfigDefault = PyscesConfig.__DefaultWin else: __PyscesConfigDefault = PyscesConfig.__DefaultPosix if DEBUG: print(time.strftime('1-%H:%M:%S')) eggdir = 'pysces-%s-py%s.%s-%s.egg' % ( __version__, os.sys.version_info[0], os.sys.version_info[1], get_build_platform(), ) for path in os.sys.path: chkPath = path.split(os.path.sep)[-1] if chkPath == 'pysces' and path != os.getcwd(): if os.path.isdir(os.path.join(path, 'pysces')): # for in-place development with setup.py develop install_dir = os.path.join(path, 'pysces') else: install_dir = path inipath = os.path.join(install_dir, 'pyscfg.ini') break elif chkPath == eggdir: install_dir = os.path.join(path, 'pysces') inipath = os.path.join(install_dir, 'pyscfg.ini') break
#tools.download_and_extract(download_file) from pkg_resources import get_build_platform import numpy as np sources = ['_wrapper.pyx','../../csources/ace_20121029/ace.c','../../csources/ace_20121029/acecli.c','../../csources/ace_20121029/basic.c','../../csources/ace_20121029/imageio.c'] include_dirs=[ np.get_include(),'.'] libraries = [] extra_compile_args=['-std=c99','-lm','-DUSE_LIBPNG'] libraries=[] library_dirs=[] if get_build_platform() in ('win32', 'win-amd64'): include_dirs+=['../../thirdparties/fftw','../../thirdparties/libpng'] libraries+=['libfftw3-3','libfftw3f-3','libfftw3l-3'] libraries+=['libpng12'] library_dirs+=['../../thirdparties/fftw'] library_dirs+=['../../thirdparties/libpng'] else: extra_compile_args+=['-lfftw3f'] libraries+=['fftw3f'] libraries+=['libpng'] if __name__ == '__main__': extensions = Extension('_wrapper',sources,extra_compile_args=extra_compile_args, libraries=libraries, library_dirs= library_dirs) from Cython.Build import cythonize
class bdist_spk(Command): description = "create an \"spk\" distribution" user_options = [ ('bdist-dir=', 'b', "temporary directory for creating the distribution"), ('plat-name=', 'p', "platform name to embed in generated filenames " "(default: %s)" % get_build_platform()), ('exclude-source-files', None, "remove all .py files from the generated spk"), ('keep-temp', 'k', "keep the pseudo-installation tree around after " + "creating the distribution archive"), ('dist-dir=', 'd', "directory to put final built distributions in"), ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), ] boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files'] def initialize_options(self): self.bdist_dir = None self.plat_name = None self.keep_temp = 0 self.dist_dir = None self.skip_build = 0 self.egg_output = None self.exclude_source_files = True def finalize_options(self): # ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") # self.egg_info = ei_cmd.egg_info if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'spk') if self.plat_name is None: self.plat_name = get_build_platform() self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) self.egg_output = os.path.join(self.dist_dir, self.distribution.get_name() + '.spk') def do_install_data(self): # Hack for packages that install data to install's --install-lib self.get_finalized_command('install').install_lib = self.bdist_dir site_packages = os.path.normcase(os.path.realpath(_get_purelib())) old, self.distribution.data_files = self.distribution.data_files, [] for item in old: if isinstance(item, tuple) and len(item) == 2: if os.path.isabs(item[0]): realpath = os.path.realpath(item[0]) normalized = os.path.normcase(realpath) if normalized == site_packages or normalized.startswith( site_packages + os.sep): item = realpath[len(site_packages) + 1:], item[1] # XXX else: raise ??? self.distribution.data_files.append(item) try: log.info("installing package data to %s" % self.bdist_dir) self.call_command('install_data', force=0, root=None) finally: self.distribution.data_files = old def get_outputs(self): return [self.egg_output] def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: kw.setdefault(dirname, self.bdist_dir) kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd def byte_compile(self, base_path, path): log.info("Compiling %s" % path) relpath = path.relative_to(base_path) ext = path.suffix module = 'apps.{}'.format( str(relpath.as_posix()).replace(ext, '').replace('/', '.')) is_pkg = False if path.stem == '__init__': module = module.replace('.__init__', '') is_pkg = True print(module) filename = sha256(module.encode('ascii')).hexdigest() data = compile(path.read_bytes(), '<app | {} | {}>'.format(self.distribution.get_name(), relpath.as_posix()), 'exec', dont_inherit=False, optimize=2) bincode = marshal.dumps(data) with open(os.path.join(self.bdist_dir, filename), 'wb') as ofile: ofile.write(bincode) metadata = {'module': module, 'filename': filename, 'is_pkg': is_pkg} return metadata def run(self): # Generate metadata first # self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s" % self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0, compile=False, optimize=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s" % ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') modules = [] bdist_path = Path(self.bdist_dir) for item in bdist_path.glob('**/*.py'): modules.append(self.byte_compile(bdist_path, item)) app_config = dict( name=self.distribution.get_name(), title=self.distribution.attrs.get('title'), logo=os.path.basename(self.distribution.attrs.get('logo')) if self.distribution.attrs.get('logo') else None, compatibility=self.distribution.attrs.get('compatibility'), description=self.distribution.get_description(), version=self.distribution.get_version(), developer=dict( email=self.distribution.get_author_email(), name=self.distribution.get_author(), website=self.distribution.get_url(), ), uappid=self.distribution.attrs.get('uappid'), category=self.distribution.get_keywords(), configuration=self.distribution.attrs.get('configuration')) manifest = dict(app_config=app_config, modules=modules) with open(os.path.join(self.bdist_dir, 'manifest.json'), 'w') as ofile: json.dump(manifest, ofile) # Add the logo in the package if self.distribution.attrs.get('logo'): shutil.copy(self.distribution.attrs.get('logo'), self.bdist_dir) if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run, mode='w') if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) def zap_pyfiles(self): log.info("Removing .py files from temporary directory") for base, dirs, files in walk_spk(self.bdist_dir): for name in files: if name.endswith('.py'): path = os.path.join(base, name) log.debug("Deleting %s", path) os.unlink(path) def get_ext_outputs(self): """Get a list of relative paths to C extensions in the output distro""" all_outputs = [] ext_outputs = [] paths = {self.bdist_dir: ''} for base, dirs, files in os.walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) for filename in dirs: paths[os.path.join(base, filename)] = (paths[base] + filename + '/') if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs
QSTEM is a program for quantitative image simulation in electron microscopy, including TEM, STEM and CBED image simulation. This project interfaces the QSTEM code with Python and the Atomic Simulation Environment (ASE) to provide a single environment for building models, simulating and analysing images. This package requires that the FFTW library has already been installed. """ include_dirs = [ np.get_include(), os.path.join(os.getcwd(), 'fftw'), os.path.join(os.getcwd(), 'source/'), ] # Detect Anaconda and use Anaconda's FFTW on Windows. is_conda = os.path.exists(os.path.join(sys.prefix, 'conda-meta')) is_windows = get_build_platform() in ['win32', 'win-amd64'] is_mac = 'macosx' in get_build_platform() if is_mac: is_highsierra_or_older = platform.mac_ver()[0] < '10.14' if not is_conda and get_build_platform() == 'win32': # 32-bit Windows and not Anaconda: Use FFTW packaged with PyQSTEM if sys.version_info[0] == 3: library_dirs = ['fftw/win32'] elif sys.version_info[0] == 2: library_dirs = ['fftw/win32/dll'] libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3'] elif not is_conda and get_build_platform() == 'win-amd64': # 64-bit Windows and not Anaconda: Use FFTW packaged with PyQSTEM if sys.version_info[0] == 3: library_dirs = ['fftw/win64']
except Exception, exc: print "Couldn't execute 'dakota -v':", exc sys.exit(1) fields = stdout.split() if len(fields) >= 3 and \ fields[0].upper() == 'DAKOTA' and fields[1] == 'version': dakota_version = fields[2] else: print "Can't parse version from DAKOTA output %r" % stdout print " stderr output:", stderr sys.exit(1) wrapper_version = '1' egg_dir = 'pyDAKOTA-%s_%s-py%s-%s.egg' % ( dakota_version, wrapper_version, sys.version[0:3], get_build_platform()) # Assuming standard prefix-based install. dakota_install = os.path.dirname(os.path.dirname(find_executable('dakota'))) dakota_bin = os.path.join(dakota_install, 'bin') dakota_include = os.path.join(dakota_install, 'include') dakota_lib = os.path.join(dakota_install, 'lib') if not os.path.exists(dakota_bin) or \ not os.path.exists(dakota_include) or \ not os.path.exists(dakota_lib): print "Can't find", dakota_bin, 'or', dakota_include, 'or', dakota_lib, ', bummer' sys.exit(1) # Read make macros from `install_dir`/include/Makefile.export.Dakota. dakota_macros = {} with open(os.path.join(dakota_install, 'include', 'Makefile.export.Dakota'),