import os, sys from distutils.core import setup from distutils.errors import DistutilsError from py2exe2msi import __version__ as VERSION PACKAGE_NAME = 'py2exe2msi' if sys.version_info < (2, 6): raise DistutilsError('this package requires Python 2.6 or later') setup( name=PACKAGE_NAME, version=VERSION, description= 'An easy way to create Windows standalone applications in Python', author='Artem Andreev', author_email='*****@*****.**', url='http://code.google.com/p/py2exe2msi/', packages=['py2exe2msi'], long_description='''py2exe2msi is an extension to distutils which creates MSI packages for py2exe-compiled applications''', classifiers=[ 'License :: OSI Approved', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: Microsoft', 'Operating System :: Microsoft :: Windows', 'Topic :: Software Development', 'Topic :: Software Development :: Build Tools', 'Topic :: System', 'Topic :: System :: Archiving',
module += '.' for name in sys.modules: if name.startswith(module): del_modules.append(name) list(map(sys.modules.__delitem__, del_modules)) test = unittest.main( None, None, self._argv, testLoader=self._resolve_as_ep(self.test_loader), testRunner=self._resolve_as_ep(self.test_runner), exit=False, ) if not test.result.wasSuccessful(): msg = 'Test failed: %s' % test.result self.announce(msg, log.ERROR) raise DistutilsError(msg) @property def _argv(self): return ['unittest'] + self.test_args @staticmethod def _resolve_as_ep(val): """ Load the indicated attribute value, called, as a as if it were specified as an entry point. """ if val is None: return parsed = EntryPoint.parse("x=" + val) return parsed.resolve()()
def run(self): import subprocess import shutil import zipfile import os import urllib import StringIO from base64 import standard_b64encode import httplib import urlparse # Extract the package name from distutils metadata meta = self.distribution.metadata name = meta.get_name() # Run sphinx if os.path.exists('doc/_build'): shutil.rmtree('doc/_build') os.mkdir('doc/_build') p = subprocess.Popen(['make', 'html'], cwd='doc') exit = p.wait() if exit != 0: raise DistutilsError("sphinx-build failed") # Collect sphinx output if not os.path.exists('dist'): os.mkdir('dist') zf = zipfile.ZipFile('dist/%s-docs.zip'%(name,), 'w', compression=zipfile.ZIP_DEFLATED) for toplevel, dirs, files in os.walk('doc/_build/html'): for fn in files: fullname = os.path.join(toplevel, fn) relname = os.path.relpath(fullname, 'doc/_build/html') print ("%s -> %s"%(fullname, relname)) zf.write(fullname, relname) zf.close() # Upload the results, this code is based on the distutils # 'upload' command. content = open('dist/%s-docs.zip'%(name,), 'rb').read() data = { ':action': 'doc_upload', 'name': name, 'content': ('%s-docs.zip'%(name,), content), } auth = "Basic " + standard_b64encode(self.username + ":" + self.password) boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary end_boundary = sep_boundary + '--' body = StringIO.StringIO() for key, value in data.items(): if not isinstance(value, list): value = [value] for value in value: if isinstance(value, tuple): fn = ';filename="%s"'%(value[0]) value = value[1] else: fn = '' body.write(sep_boundary) body.write('\nContent-Disposition: form-data; name="%s"'%key) body.write(fn) body.write("\n\n") body.write(value) body.write(end_boundary) body.write('\n') body = body.getvalue() self.announce("Uploading documentation to %s"%(self.repository,), log.INFO) schema, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) if schema == 'http': http = httplib.HTTPConnection(netloc) elif schema == 'https': http = httplib.HTTPSConnection(netloc) else: raise AssertionError("unsupported schema "+schema) data = '' loglevel = log.INFO try: http.connect() http.putrequest("POST", url) http.putheader('Content-type', 'multipart/form-data; boundary=%s'%boundary) http.putheader('Content-length', str(len(body))) http.putheader('Authorization', auth) http.endheaders() http.send(body) except socket.error: e = socket.exc_info()[1] self.announce(str(e), log.ERROR) return r = http.getresponse() if r.status in (200, 301): self.announce('Upload succeeded (%s): %s' % (r.status, r.reason), log.INFO) else: self.announce('Upload failed (%s): %s' % (r.status, r.reason), log.ERROR) print ('-'*75) print (r.read()) print ('-'*75)
def build_a_library(self, build_info, lib_name, libraries): # default compilers compiler = self.compiler fcompiler = self._f_compiler sources = build_info.get('sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources \ = filter_sources(sources) requiref90 = not not fmodule_sources or \ build_info.get('language', 'c') == 'f90' # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') build_info['source_languages'] = source_languages lib_file = compiler.library_filename(lib_name, output_dir=self.build_clib) depends = sources + build_info.get('depends', []) if not (self.force or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", lib_name) return else: log.info("building '%s' library", lib_name) config_fc = build_info.get('config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script ' 'for fortran compiler: %s' % (config_fc, )) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=requiref90, c_compiler=self.compiler) if fcompiler is not None: dist = self.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("library %s has Fortran sources" " but no Fortran compiler found" % (lib_name)) if fcompiler is not None: fcompiler.extra_f77_compile_args = build_info.get( 'extra_f77_compile_args') or [] fcompiler.extra_f90_compile_args = build_info.get( 'extra_f90_compile_args') or [] macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') if include_dirs is None: include_dirs = [] extra_postargs = build_info.get('extra_compiler_args') or [] include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = build_info.get('module_dirs') or [] module_build_dir = os.path.dirname(lib_file) if requiref90: self.mkpath(module_build_dir) if compiler.compiler_type == 'msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] objects = [] if c_sources: log.info("compiling C sources") objects = compiler.compile(c_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) objects.extend(cxx_objects) if f_sources or fmodule_sources: extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs, module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) if requiref90 and self._f_compiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs) else: f_objects = [] if f_objects and not fcompiler.can_ccompiler_link(compiler): # Default linker cannot link Fortran object files, and results # need to be wrapped later. Instead of creating a real static # library, just keep track of the object files. listfn = os.path.join(self.build_clib, lib_name + '.fobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in f_objects)) listfn = os.path.join(self.build_clib, lib_name + '.cobjects') with open(listfn, 'w') as f: f.write("\n".join(os.path.abspath(obj) for obj in objects)) # create empty "library" file for dependency tracking lib_fname = os.path.join(self.build_clib, lib_name + compiler.static_lib_extension) with open(lib_fname, 'wb') as f: pass else: # assume that default linker is suitable for # linking Fortran object files objects.extend(f_objects) compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug) # fix library dependencies clib_libraries = build_info.get('libraries', []) for lname, binfo in libraries: if lname in clib_libraries: clib_libraries.extend(binfo.get('libraries', [])) if clib_libraries: build_info['libraries'] = clib_libraries
def parse_requirement_arg(spec): try: return Requirement.parse(spec) except ValueError: raise DistutilsError( "Not a URL, existing file, or requirement spec: %r" % (spec, ))
def get_common_options(build_ext): cpp_flags = get_cpp_flags(build_ext) mpi_flags = get_mpi_flags() gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE') if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \ gpu_allreduce != 'DDL': raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported ' 'values are "", "MPI", "NCCL", "DDL".' % gpu_allreduce) gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER') if gpu_allgather and gpu_allgather != 'MPI': raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported ' 'values are "", "MPI".' % gpu_allgather) gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST') if gpu_broadcast and gpu_broadcast != 'MPI': raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported ' 'values are "", "MPI".' % gpu_broadcast) if gpu_allreduce or gpu_allgather or gpu_broadcast: have_cuda = True cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags) else: have_cuda = False cuda_include_dirs = cuda_lib_dirs = [] if gpu_allreduce == 'NCCL': have_nccl = True nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals( build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags) else: have_nccl = False nccl_include_dirs = nccl_lib_dirs = nccl_libs = [] if gpu_allreduce == 'DDL': have_ddl = True ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs() else: have_ddl = False ddl_include_dirs = ddl_lib_dirs = [] MACROS = [] INCLUDES = [] SOURCES = [] COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags) LINK_FLAGS = shlex.split(mpi_flags) LIBRARY_DIRS = [] LIBRARIES = [] if have_cuda: MACROS += [('HAVE_CUDA', '1')] INCLUDES += cuda_include_dirs LIBRARY_DIRS += cuda_lib_dirs LIBRARIES += ['cudart'] if have_nccl: MACROS += [('HAVE_NCCL', '1')] INCLUDES += nccl_include_dirs LINK_FLAGS += ['-Wl,--version-script=hide_nccl.lds'] LIBRARY_DIRS += nccl_lib_dirs LIBRARIES += nccl_libs if have_ddl: MACROS += [('HAVE_DDL', '1')] INCLUDES += ddl_include_dirs LIBRARY_DIRS += ddl_lib_dirs LIBRARIES += ['ddl', 'ddl_pack'] if gpu_allreduce: MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])] if gpu_allgather: MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])] if gpu_broadcast: MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])] return dict(MACROS=MACROS, INCLUDES=INCLUDES, SOURCES=SOURCES, COMPILE_FLAGS=COMPILE_FLAGS, LINK_FLAGS=LINK_FLAGS, LIBRARY_DIRS=LIBRARY_DIRS, LIBRARIES=LIBRARIES)
def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef, )) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type == 'msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language == 'f90': fcompiler = self._f90_compiler elif ext.language == 'f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = ( ext.extra_f77_compile_args or []) if hasattr( ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = ( ext.extra_f90_compile_args or []) if hasattr( ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " "but no Fortran linker found, using default linker" % (ext.name)) if ext.language == 'c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends': ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() # filtering C dispatch-table sources when optimization is not disabled, # otherwise treated as normal sources. copt_c_sources = [] copt_baseline_flags = [] copt_macros = [] if not self.disable_optimization: bsrc_dir = self.get_finalized_command("build_src").build_src dispatch_hpath = os.path.join("numpy", "distutils", "include") dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath) include_dirs.append(dispatch_hpath) copt_build_src = None if self.inplace else bsrc_dir copt_c_sources = [ c_sources.pop(c_sources.index(src)) for src in c_sources[:] if src.endswith(".dispatch.c") ] copt_baseline_flags = self.compiler_opt.cpu_baseline_flags() else: copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1)) c_objects = [] if copt_c_sources: log.info("compiling C dispatch-able sources") c_objects += self.compiler_opt.try_dispatch( copt_c_sources, output_dir=output_dir, src_dir=copt_build_src, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if c_sources: log.info("compiling C sources") c_objects += self.compiler.compile(c_sources, output_dir=output_dir, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args + copt_baseline_flags, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros + copt_macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args + copt_baseline_flags, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname(self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options(module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f) == os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if f_objects and not fcompiler.can_ccompiler_link(self.compiler): unlinkable_fobjects = f_objects objects = c_objects else: unlinkable_fobjects = [] objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language == 'c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object if fcompiler is not None: objects, libraries = self._process_unlinkable_fobjects( objects, libraries, fcompiler, library_dirs, unlinkable_fobjects) linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language)
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" user_agent = _tmpl.format( py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) def parse_requirement_arg(spec): try: <<<<<<< HEAD return Requirement.bbc_parse(spec) ======= return Requirement.parse(spec) >>>>>>> 241b678... create predictions except ValueError as e: raise DistutilsError( "Not a URL, existing file, or requirement spec: %r" % (spec,) ) from e def parse_bdist_wininst(name): """Return (base,pyversion) or (None,None) for possible .exe name""" lower = name.lower() base, py_ver, plat = None, None, None if lower.endswith('.exe'): if lower.endswith('.win32.exe'): base = name[:-10] plat = 'win32' elif lower.startswith('.win32-py', -16): py_ver = name[-7:-4]
pass # no-op def open_url(self, url, warning=None): if url.startswith('file:'): return local_open(url) try: return open_with_auth(url) except urllib2.HTTPError, v: return v except urllib2.URLError, v: reason = v.reason except httplib.HTTPException, v: reason = "%s: %s" % (v.__doc__ or v.__class__.__name__, v) if warning: self.warn(warning, reason) else: raise DistutilsError("Download error for %s: %s" % (url, reason)) def _download_url(self, scheme, url, tmpdir): # Determine download filename # name, fragment = egg_info_for_url(url) if name: while '..' in name: name = name.replace('..', '.').replace('\\', '_') else: name = "__downloaded__" # default if URL has no path contents if name.endswith('.egg.zip'): name = name[:-4] # strip the extra .zip before download filename = os.path.join(tmpdir, name)
def run(self): if not has_system_lib(): raise DistutilsError( "This library is not usable in 'develop' mode when using the " 'bundled libsecp256k1. See README for details.') _develop.run(self)
long_description = """\ This package creates a quaternion type in python, and further enables numpy to create and manipulate arrays of quaternions. The usual algebraic operations (addition and multiplication) are available, along with numerous properties like norm and various types of distance measures between two quaternions. There are also additional functions like "squad" and "slerp" interpolation, and conversions to and from axis-angle, matrix, and Euler-angle representations of rotations. The core of the code is written in C for speed. """ if __name__ == "__main__": import numpy from setuptools import setup, Extension # from distutils.core import setup, Extension from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') extension = Extension( name= 'quaternion.numpy_quaternion', # This is the name of the object file that will be compiled sources=['quaternion.c', 'numpy_quaternion.c'], extra_compile_args=['/O2' if on_windows else '-O3'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], include_dirs=[numpy.get_include()]) setup( name='numpy-quaternion', # Uploaded to pypi under this name packages=['quaternion'], # This is the actual package name package_dir={'quaternion': ''}, ext_modules=[extension], version=version, install_requires=[ 'numpy>=1.13',
def fetch_build_egg(dist, req): # noqa: C901 # is too complex (16) # FIXME """Fetch an egg needed for building. Use pip/wheel to fetch/build a wheel.""" warnings.warn( "setuptools.installer is deprecated. Requirements should " "be satisfied by a PEP 517 installer.", SetuptoolsDeprecationWarning, ) # Warn if wheel is not available try: pkg_resources.get_distribution('wheel') except pkg_resources.DistributionNotFound: dist.announce('WARNING: The wheel package is not available.', log.WARN) # Ignore environment markers; if supplied, it is required. req = strip_marker(req) # Take easy_install options into account, but do not override relevant # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll # take precedence. opts = dist.get_option_dict('easy_install') if 'allow_hosts' in opts: raise DistutilsError('the `allow-hosts` option is not supported ' 'when using pip to install requirements.') quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ if 'PIP_INDEX_URL' in os.environ: index_url = None elif 'index_url' in opts: index_url = opts['index_url'][1] else: index_url = None find_links = ( _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts else [] ) if dist.dependency_links: find_links.extend(dist.dependency_links) eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) environment = pkg_resources.Environment() for egg_dist in pkg_resources.find_distributions(eggs_dir): if egg_dist in req and environment.can_add(egg_dist): return egg_dist with tempfile.TemporaryDirectory() as tmpdir: cmd = [ sys.executable, '-m', 'pip', '--disable-pip-version-check', 'wheel', '--no-deps', '-w', tmpdir, ] if quiet: cmd.append('--quiet') if index_url is not None: cmd.extend(('--index-url', index_url)) for link in find_links or []: cmd.extend(('--find-links', link)) # If requirement is a PEP 508 direct URL, directly pass # the URL to pip, as `req @ url` does not work on the # command line. cmd.append(req.url or str(req)) try: subprocess.check_call(cmd) except subprocess.CalledProcessError as e: raise DistutilsError(str(e)) from e wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) dist_location = os.path.join(eggs_dir, wheel.egg_name()) wheel.install_as_egg(dist_location) dist_metadata = pkg_resources.PathMetadata( dist_location, os.path.join(dist_location, 'EGG-INFO')) dist = pkg_resources.Distribution.from_filename( dist_location, metadata=dist_metadata) return dist
return sub.returncode, stdout, stderr def distutils_exec_process(cmdline, silent, input=None, **kwargs): try: returncode, stdout, stderr = exec_process(cmdline, silent, input, **kwargs) except OSError, e: if e.errno == 2: raise DistutilsError('"%s" is not present on this system' % cmdline[0]) else: raise if returncode != 0: raise DistutilsError( 'Got return value %d while executing "%s", stderr output was:\n%s' % (returncode, " ".join(cmdline), stderr.rstrip("\n"))) return stdout def get_makefile_variables(makefile): """Returns all variables in a makefile as a dict""" stdout = distutils_exec_process( [get_make_cmd(), "-f", makefile, "-pR", makefile], True) return dict(tup for tup in re.findall("(^[a-zA-Z]\w+)\s*:?=\s*(.*)$", stdout, re.MULTILINE)) def get_svn_repo_url(svn_dir): environment = dict((name, value) for name, value in os.environ.iteritems() if name != 'LANG' and not name.startswith('LC_'))
def run(self): raise DistutilsError(self.description)
def fully_define_extension(build_ext): tf_include_dirs = get_tf_include_dirs() tf_lib_dirs = get_tf_lib_dirs() tf_libs = get_tf_libs(build_ext, tf_lib_dirs) tf_abi = get_tf_abi(build_ext, tf_include_dirs, tf_lib_dirs, tf_libs) mpi_flags = get_mpi_flags() gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE') if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL': raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported ' 'values are "", "MPI", "NCCL".' % gpu_allreduce) gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER') if gpu_allgather and gpu_allgather != 'MPI': raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported ' 'values are "", "MPI".' % gpu_allgather) gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST') if gpu_broadcast and gpu_broadcast != 'MPI': raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported ' 'values are "", "MPI".' % gpu_broadcast) if gpu_allreduce or gpu_allgather or gpu_broadcast: have_cuda = True cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext) else: have_cuda = False cuda_include_dirs = cuda_lib_dirs = [] if gpu_allreduce == 'NCCL': have_nccl = True nccl_include_dirs, nccl_lib_dirs = get_nccl_dirs( build_ext, cuda_include_dirs, cuda_lib_dirs) else: have_nccl = False nccl_include_dirs = nccl_lib_dirs = [] MACROS = [] INCLUDES = tf_include_dirs SOURCES = [ 'horovod/tensorflow/mpi_message.cc', 'horovod/tensorflow/mpi_ops.cc', 'horovod/tensorflow/timeline.cc' ] COMPILE_FLAGS = ['-std=c++11', '-fPIC', '-O2'] + shlex.split(mpi_flags) LINK_FLAGS = shlex.split(mpi_flags) LIBRARY_DIRS = tf_lib_dirs LIBRARIES = tf_libs if tf_abi: COMPILE_FLAGS += ['-D%s=%s' % tf_abi] if have_cuda: MACROS += [('HAVE_CUDA', '1')] INCLUDES += cuda_include_dirs LIBRARY_DIRS += cuda_lib_dirs LIBRARIES = ['cudart'] if have_nccl: MACROS += [('HAVE_NCCL', '1')] INCLUDES += nccl_include_dirs LIBRARY_DIRS += nccl_lib_dirs LIBRARIES = ['nccl'] if gpu_allreduce: MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])] if gpu_allgather: MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])] if gpu_broadcast: MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])] tensorflow_mpi_lib.define_macros = MACROS tensorflow_mpi_lib.include_dirs = INCLUDES tensorflow_mpi_lib.sources = SOURCES tensorflow_mpi_lib.extra_compile_args = COMPILE_FLAGS tensorflow_mpi_lib.extra_link_args = LINK_FLAGS tensorflow_mpi_lib.library_dirs = LIBRARY_DIRS tensorflow_mpi_lib.libraries = LIBRARIES
return local_open(url) try: return open_with_auth(url) except (ValueError, httplib.InvalidURL), v: msg = ' '.join([str(arg) for arg in v.args]) if warning: self.warn(warning, msg) else: raise DistutilsError('%s %s' % (url, msg)) except urllib2.HTTPError, v: return v except urllib2.URLError, v: if warning: self.warn(warning, v.reason) else: raise DistutilsError("Download error for %s: %s" % (url, v.reason)) except httplib.BadStatusLine, v: if warning: self.warn(warning, v.line) else: raise DistutilsError('%s returned a bad status line. ' 'The server might be down, %s' % \ (url, v.line)) except httplib.HTTPException, v: if warning: self.warn(warning, v) else: raise DistutilsError("Download error for %s: %s" % (url, v)) def _download_url(self, scheme, url, tmpdir): # Determine download filename
# This is a bit hackish: we are setting a global variable so that the main # numpy __init__ can detect if it is being loaded by the setup routine, to # avoid attempting to load components that aren't built yet. While ugly, it's # a lot more robust than what was previously being used. __builtin__.__NUMPY_SETUP__ = True # DO NOT REMOVE numpy.distutils IMPORT ! This is necessary for numpy.distutils' # monkey patching to work. import numpy.distutils from distutils.errors import DistutilsError try: import numscons except ImportError, e: msg = ["You cannot build numpy with scons without the numscons package "] msg.append("(Failure was: %s)" % e) raise DistutilsError('\n'.join(msg)) def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path, setup_name = 'setupscons.py') config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_subpackage('numpy') config.add_data_files(('numpy','*.txt'), ('numpy','COMPATIBILITY'), ('numpy','site.cfg.example'),
def get_common_options(build_ext): cpp_flags = get_cpp_flags(build_ext) link_flags = get_link_flags(build_ext) mpi_flags = get_mpi_flags() gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE') if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \ gpu_allreduce != 'DDL': raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported ' 'values are "", "MPI", "NCCL", "DDL".' % gpu_allreduce) gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER') if gpu_allgather and gpu_allgather != 'MPI': raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported ' 'values are "", "MPI".' % gpu_allgather) gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST') if gpu_broadcast and gpu_broadcast != 'MPI': raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported ' 'values are "", "MPI".' % gpu_broadcast) if gpu_allreduce or gpu_allgather or gpu_broadcast: have_cuda = True cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags) else: have_cuda = False cuda_include_dirs = cuda_lib_dirs = [] if gpu_allreduce == 'NCCL': have_nccl = True nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals( build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags) else: have_nccl = False nccl_include_dirs = nccl_lib_dirs = nccl_libs = [] if gpu_allreduce == 'DDL': have_ddl = True ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs() else: have_ddl = False ddl_include_dirs = ddl_lib_dirs = [] if (gpu_allreduce == 'NCCL' and (gpu_allgather == 'MPI' or gpu_broadcast == 'MPI') and not os.environ.get('HOROVOD_ALLOW_MIXED_GPU_IMPL')): raise DistutilsError('You should not mix NCCL and MPI GPU due to a possible deadlock.\n' 'If you\'re sure you want to mix them, set the ' 'HOROVOD_ALLOW_MIXED_GPU_IMPL environment variable to \'1\'.') MACROS = [] INCLUDES = [] SOURCES = ['horovod/common/common.cc', 'horovod/common/mpi_message.cc', 'horovod/common/half.cc', 'horovod/common/operations.cc', 'horovod/common/timeline.cc'] COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags) LINK_FLAGS = link_flags + shlex.split(mpi_flags) LIBRARY_DIRS = [] LIBRARIES = [] if have_cuda: MACROS += [('HAVE_CUDA', '1')] INCLUDES += cuda_include_dirs LIBRARY_DIRS += cuda_lib_dirs LIBRARIES += ['cudart'] if have_nccl: MACROS += [('HAVE_NCCL', '1')] INCLUDES += nccl_include_dirs LIBRARY_DIRS += nccl_lib_dirs LIBRARIES += nccl_libs if have_ddl: MACROS += [('HAVE_DDL', '1')] INCLUDES += ddl_include_dirs LIBRARY_DIRS += ddl_lib_dirs LIBRARIES += ['ddl', 'ddl_pack'] if gpu_allreduce: MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])] if gpu_allgather: MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])] if gpu_broadcast: MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])] return dict(MACROS=MACROS, INCLUDES=INCLUDES, SOURCES=SOURCES, COMPILE_FLAGS=COMPILE_FLAGS, LINK_FLAGS=LINK_FLAGS, LIBRARY_DIRS=LIBRARY_DIRS, LIBRARIES=LIBRARIES)
def f2py_sources(self, sources, extension): new_sources = [] f2py_sources = [] f_sources = [] f2py_targets = {} target_dirs = [] ext_name = extension.name.split('.')[-1] skip_f2py = 0 for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyf': # F2PY interface file if self.inplace: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) if os.path.isfile(source): name = get_f2py_modulename(source) if name != ext_name: raise DistutilsSetupError( 'mismatch of extension names: %s ' 'provides %r but expected %r' % (source, name, ext_name)) target_file = os.path.join(target_dir, name + 'module.c') else: log.debug(' source %s does not exist: skipping f2py\'ing.' \ % (source)) name = ext_name skip_f2py = 1 target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %smodule.c was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.info(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) f2py_sources.append(source) f2py_targets[source] = target_file new_sources.append(target_file) elif fortran_ext_match(ext): f_sources.append(source) else: new_sources.append(source) if not (f2py_sources or f_sources): return new_sources for d in target_dirs: self.mkpath(d) f2py_options = extension.f2py_options + self.f2py_opts if self.distribution.libraries: for name, build_info in self.distribution.libraries: if name in extension.libraries: f2py_options.extend(build_info.get('f2py_options', [])) log.info("f2py options: %s" % (f2py_options)) if f2py_sources: if len(f2py_sources) != 1: raise DistutilsSetupError( 'only one .pyf file is allowed per extension module but got'\ ' more: %r' % (f2py_sources,)) source = f2py_sources[0] target_file = f2py_targets[source] target_dir = os.path.dirname(target_file) or '.' depends = [source] + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py: %s" % (source)) import numpy1.f2py numpy1.f2py.run_main(f2py_options + ['--build-dir', target_dir, source]) else: log.debug(" skipping '%s' f2py interface (up-to-date)" % (source)) else: #XXX TODO: --inplace support for sdist command if is_sequence(extension): name = extension[0] else: name = extension.name target_dir = os.path.join(*([self.build_src]\ +name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) depends = f_sources + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py:> %s" % (target_file)) self.mkpath(target_dir) import numpy1.f2py numpy1.f2py.run_main(f2py_options + ['--lower', '--build-dir', target_dir] + \ ['-m', ext_name] + f_sources) else: log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\ % (target_file)) if not os.path.isfile(target_file): raise DistutilsError("f2py target file %r not generated" % (target_file, )) build_dir = os.path.join(self.build_src, target_dir) target_c = os.path.join(build_dir, 'fortranobject.c') target_h = os.path.join(build_dir, 'fortranobject.h') log.info(" adding '%s' to sources." % (target_c)) new_sources.append(target_c) if build_dir not in extension.include_dirs: log.info(" adding '%s' to include_dirs." % (build_dir)) extension.include_dirs.append(build_dir) if not skip_f2py: import numpy1.f2py d = os.path.dirname(numpy1.f2py.__file__) source_c = os.path.join(d, 'src', 'fortranobject.c') source_h = os.path.join(d, 'src', 'fortranobject.h') if newer(source_c, target_c) or newer(source_h, target_h): self.mkpath(os.path.dirname(target_c)) self.copy_file(source_c, target_c) self.copy_file(source_h, target_h) else: if not os.path.isfile(target_c): raise DistutilsSetupError("f2py target_c file %r not found" % (target_c, )) if not os.path.isfile(target_h): raise DistutilsSetupError("f2py target_h file %r not found" % (target_h, )) for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: filename = os.path.join(target_dir, ext_name + name_ext) if os.path.isfile(filename): log.info(" adding '%s' to sources." % (filename)) f_sources.append(filename) return new_sources + f_sources
class PackageIndex(Environment): """A distribution index that scans web pages for download URLs""" def __init__(self,index_url="http://www.python.org/pypi",hosts=('*',),*args,**kw): Environment.__init__(self,*args,**kw) self.index_url = index_url + "/"[:not index_url.endswith('/')] self.scanned_urls = {} self.fetched_urls = {} self.package_pages = {} self.allows = re.compile('|'.join(map(translate,hosts))).match self.to_scan = [] def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" url = fix_sf_url(url) if url in self.scanned_urls and not retrieve: return self.scanned_urls[url] = True if not URL_SCHEME(url): self.process_filename(url) return else: dists = list(distros_for_url(url)) if dists: if not self.url_ok(url): return self.debug("Found link: %s", url) if dists or not retrieve or url in self.fetched_urls: map(self.add, dists) return # don't need the actual page if not self.url_ok(url): self.fetched_urls[url] = True return self.info("Reading %s", url) f = self.open_url(url) self.fetched_urls[url] = self.fetched_urls[f.url] = True if 'html' not in f.headers.get('content-type', '').lower(): f.close() # not html, we can't process it return base = f.url # handle redirects page = f.read() f.close() if url.startswith(self.index_url) and getattr(f,'code',None)!=404: page = self.process_index(url, page) for match in HREF.finditer(page): link = urlparse.urljoin(base, match.group(1)) self.process_url(link) def process_filename(self, fn, nested=False): # process filenames or directories if not os.path.exists(fn): self.warn("Not found: %s", fn) return if os.path.isdir(fn) and not nested: path = os.path.realpath(fn) for item in os.listdir(path): self.process_filename(os.path.join(path,item), True) dists = distros_for_filename(fn) if dists: self.debug("Found: %s", fn) map(self.add, dists) def url_ok(self, url, fatal=False): if self.allows(urlparse.urlparse(url)[1]): return True msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n" if fatal: raise DistutilsError(msg % url) else: self.warn(msg, url) def process_index(self,url,page): """Process the contents of a PyPI page""" def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = map( urllib2.unquote, link[len(self.index_url):].split('/') ) if len(parts)==2 and '#' not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(),{})[link] = True return to_filename(pkg), to_filename(ver) return None, None # process an index page into the package-page index for match in HREF.finditer(page): scan( urlparse.urljoin(url, match.group(1)) ) pkg, ver = scan(url) # ensure this page is in the page index if pkg: # process individual package page for new_url in find_external_links(url, page): # Process the found URL base, frag = egg_info_for_url(new_url) if base.endswith('.py') and not frag: if ver: new_url+='#egg=%s-%s' % (pkg,ver) else: self.need_version_info(url) self.scan_url(new_url) return PYPI_MD5.sub( lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page ) else: return "" # no sense double-scanning non-package pages def need_version_info(self, url): self.scan_all( "Page at %s links to .py file(s) without version info; an index " "scan is required.", url ) def scan_all(self, msg=None, *args): if self.index_url not in self.fetched_urls: if msg: self.warn(msg,*args) self.info( "Scanning index of all packages (this may take a while)" ) self.scan_url(self.index_url) def find_packages(self, requirement): self.scan_url(self.index_url + requirement.unsafe_name+'/') if not self.package_pages.get(requirement.key): # Fall back to safe version of the name self.scan_url(self.index_url + requirement.project_name+'/') if not self.package_pages.get(requirement.key): # We couldn't find the target package, so search the index page too self.not_found_in_index(requirement) for url in list(self.package_pages.get(requirement.key,())): # scan each page that might be related to the desired package self.scan_url(url) def obtain(self, requirement, installer=None): self.prescan(); self.find_packages(requirement) for dist in self[requirement.key]: if dist in requirement: return dist self.debug("%s does not match %s", requirement, dist) return super(PackageIndex, self).obtain(requirement,installer) def check_md5(self, cs, info, filename, tfp): if re.match('md5=[0-9a-f]{32}$', info): self.debug("Validating md5 checksum for %s", filename) if cs.hexdigest()<>info[4:]: tfp.close() os.unlink(filename) raise DistutilsError( "MD5 validation failed for "+os.path.basename(filename)+ "; possible download problem?" ) def add_find_links(self, urls): """Add `urls` to the list that will be prescanned for searches""" for url in urls: if ( self.to_scan is None # if we have already "gone online" or not URL_SCHEME(url) # or it's a local file/directory or url.startswith('file:') or list(distros_for_url(url)) # or a direct package link ): # then go ahead and process it now self.scan_url(url) else: # otherwise, defer retrieval till later self.to_scan.append(url) def prescan(self): """Scan urls scheduled for prescanning (e.g. --find-links)""" if self.to_scan: map(self.scan_url, self.to_scan) self.to_scan = None # from now on, go ahead and process immediately def not_found_in_index(self, requirement): if self[requirement.key]: # we've seen at least one distro meth, msg = self.info, "Couldn't retrieve index page for %r" else: # no distros seen for this name, might be misspelled meth, msg = (self.warn, "Couldn't find index page for %r (maybe misspelled?)") meth(msg, requirement.unsafe_name) self.scan_all() def download(self, spec, tmpdir): """Locate and/or download `spec` to `tmpdir`, returning a local path `spec` may be a ``Requirement`` object, or a string containing a URL, an existing local filename, or a project/version requirement spec (i.e. the string form of a ``Requirement`` object). If it is the URL of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is automatically created alongside the downloaded file. If `spec` is a ``Requirement`` object or a string containing a project/version requirement spec, this method returns the location of a matching distribution (possibly after downloading it to `tmpdir`). If `spec` is a locally existing file or directory name, it is simply returned unchanged. If `spec` is a URL, it is downloaded to a subpath of `tmpdir`, and the local filename is returned. Various errors may be raised if a problem occurs during downloading. """ if not isinstance(spec,Requirement): scheme = URL_SCHEME(spec) if scheme: # It's a url, download it to tmpdir found = self._download_url(scheme.group(1), spec, tmpdir) base, fragment = egg_info_for_url(spec) if base.endswith('.py'): found = self.gen_setup(found,fragment,tmpdir) return found elif os.path.exists(spec): # Existing file or directory, just return it return spec else: try: spec = Requirement.parse(spec) except ValueError: raise DistutilsError( "Not a URL, existing file, or requirement spec: %r" % (spec,) ) return getattr(self.fetch_distribution(spec, tmpdir),'location',None) def fetch_distribution(self, requirement, tmpdir, force_scan=False, source=False, develop_ok=False ): """Obtain a distribution suitable for fulfilling `requirement` `requirement` must be a ``pkg_resources.Requirement`` instance. If necessary, or if the `force_scan` flag is set, the requirement is searched for in the (online) package index as well as the locally installed packages. If a distribution matching `requirement` is found, the returned distribution's ``location`` is the value you would have gotten from calling the ``download()`` method with the matching distribution's URL or filename. If no matching distribution is found, ``None`` is returned. If the `source` flag is set, only source distributions and source checkout links will be considered. Unless the `develop_ok` flag is set, development and system eggs (i.e., those using the ``.egg-info`` format) will be ignored. """ # process a Requirement self.info("Searching for %s", requirement) skipped = {} def find(req): # Find a matching distribution; may be called more than once for dist in self[req.key]: if dist.precedence==DEVELOP_DIST and not develop_ok: if dist not in skipped: self.warn("Skipping development or system egg: %s",dist) skipped[dist] = 1 continue if dist in req and (dist.precedence<=SOURCE_DIST or not source): self.info("Best match: %s", dist) return dist.clone( location=self.download(dist.location, tmpdir) ) if force_scan: self.prescan() self.find_packages(requirement) dist = find(requirement) if dist is None and self.to_scan is not None: self.prescan() dist = find(requirement) if dist is None and not force_scan: self.find_packages(requirement) dist = find(requirement) if dist is None: self.warn( "No local packages or download links found for %s%s", (source and "a source distribution of " or ""), requirement, ) return dist def fetch(self, requirement, tmpdir, force_scan=False, source=False): """Obtain a file suitable for fulfilling `requirement` DEPRECATED; use the ``fetch_distribution()`` method now instead. For backward compatibility, this routine is identical but returns the ``location`` of the downloaded distribution instead of a distribution object. """ dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) if dist is not None: return dist.location return None def gen_setup(self, filename, fragment, tmpdir): match = EGG_FRAGMENT.match(fragment) dists = match and [d for d in interpret_distro_name(filename, match.group(1), None) if d.version ] or [] if len(dists)==1: # unambiguous ``#egg`` fragment basename = os.path.basename(filename) # Make sure the file has been downloaded to the temp dir. if os.path.dirname(filename) != tmpdir: dst = os.path.join(tmpdir, basename) from setuptools.command.easy_install import samefile if not samefile(filename, dst): shutil.copy2(filename, dst) filename=dst file = open(os.path.join(tmpdir, 'setup.py'), 'w') file.write( "from setuptools import setup\n" "setup(name=%r, version=%r, py_modules=[%r])\n" % ( dists[0].project_name, dists[0].version, os.path.splitext(basename)[0] ) ) file.close() return filename elif match: raise DistutilsError( "Can't unambiguously interpret project/version identifier %r; " "any dashes in the name or version should be escaped using " "underscores. %r" % (fragment,dists) ) else: raise DistutilsError( "Can't process plain .py files without an '#egg=name-version'" " suffix to enable automatic setup script generation." ) dl_blocksize = 8192 def _download_to(self, url, filename): self.url_ok(url,True) # raises error if not allowed self.info("Downloading %s", url) # Download the file fp, tfp, info = None, None, None try: if '#' in url: url, info = url.split('#', 1) fp = self.open_url(url) if isinstance(fp, urllib2.HTTPError): raise DistutilsError( "Can't download %s: %s %s" % (url, fp.code,fp.msg) ) cs = md5() headers = fp.info() blocknum = 0 bs = self.dl_blocksize size = -1 if "content-length" in headers: size = int(headers["Content-Length"]) self.reporthook(url, filename, blocknum, bs, size) tfp = open(filename,'wb') while True: block = fp.read(bs) if block: cs.update(block) tfp.write(block) blocknum += 1 self.reporthook(url, filename, blocknum, bs, size) else: break if info: self.check_md5(cs, info, filename, tfp) return headers finally: if fp: fp.close() if tfp: tfp.close() def reporthook(self, url, filename, blocknum, blksize, size): pass # no-op def retry_sf_download(self, url, filename): try: return self._download_to(url, filename) except (KeyboardInterrupt,SystemExit): raise except: scheme, server, path, param, query, frag = urlparse.urlparse(url) if server!='dl.sourceforge.net': raise mirror = get_sf_ip() while _sf_mirrors: self.warn("Download failed: %s", sys.exc_info()[1]) url = urlparse.urlunparse((scheme, mirror, path, param, '', frag)) try: return self._download_to(url, filename) except: _sf_mirrors.remove(mirror) # don't retry the same mirror mirror = get_sf_ip() raise # fail if no mirror works def open_url(self, url): if url.startswith('file:'): return local_open(url) try: request = urllib2.Request(url) request.add_header('User-Agent', user_agent) return urllib2.urlopen(request) except urllib2.HTTPError, v: return v except urllib2.URLError, v: raise DistutilsError("Download error: %s" % v.reason)
def fully_define_extension(build_ext): check_tf_version() tf_compile_flags, tf_link_flags = get_tf_flags(build_ext) gpu_allreduce = os.environ.get('T_RING_GPU_ALLREDUCE') if gpu_allreduce and gpu_allreduce != 'TCP' and gpu_allreduce != 'RDMA': raise DistutilsError('T_RING_GPU_ALLREDUCE=%s is invalid, supported ' 'values are "", "TCP", "RDMA".' % gpu_allreduce) gpu_allgather = os.environ.get('T_RING_GPU_ALLGATHER') if gpu_allgather and gpu_allgather != 'TCP' and gpu_allgather != 'RDMA': raise DistutilsError('T_RING_GPU_ALLGATHER=%s is invalid, supported ' 'values are "", "TCP", "RDMA".' % gpu_allgather) gpu_broadcast = os.environ.get('BCBUE_GPU_BROADCAST') if gpu_broadcast and gpu_broadcast != 'TCP' and gpu_broadcast != 'RDMA': raise DistutilsError('T_RING_GPU_BROADCAST=%s is invalid, supported ' 'values are "", "TCP", "RDMA".' % gpu_broadcast) if gpu_allreduce or gpu_allgather or gpu_broadcast: have_cuda = True cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext) else: have_cuda = False cuda_include_dirs = cuda_lib_dirs = [] comm_type = os.environ.get('COM_TYPE') #if gpu_allcomm_typereduce == 'RDMA': if comm_type == 'RDMA': have_rdma = True rdma_include_dirs, rdma_lib_dirs, rdma_link_flags = get_rdma_dirs( build_ext) else: have_rdma = False rdma_include_dirs = rdma_lib_dirs = rdma_link_flags = [] MACROS = [] INCLUDES = [] SOURCES = [ 't_ring/tensorflow/MyRing.cpp', 't_ring/tensorflow/ring_ops.cpp' ] if have_rdma: SOURCES += ['t_ring/tensorflow/rdma_t.cpp'] COMPILE_FLAGS = ['-std=c++11', '-fPIC', '-Os'] + tf_compile_flags LINK_FLAGS = tf_link_flags LIBRARY_DIRS = [] LIBRARIES = [] if have_cuda: MACROS += [('HAVE_CUDA', '1')] INCLUDES += cuda_include_dirs LIBRARY_DIRS += cuda_lib_dirs LIBRARIES += ['cudart'] if have_rdma: MACROS += [('HAVE_RDMA', '1')] INCLUDES += rdma_include_dirs LIBRARY_DIRS += rdma_lib_dirs LINK_FLAGS += rdma_link_flags #LIBRARIES += ['rdma'] if gpu_allreduce: MACROS += [('T_RING_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])] if gpu_allgather: MACROS += [('T_RING_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])] if gpu_broadcast: MACROS += [('T_RING_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])] tensorflow_ring_lib.define_macros = MACROS tensorflow_ring_lib.include_dirs = INCLUDES tensorflow_ring_lib.sources = SOURCES tensorflow_ring_lib.extra_compile_args = COMPILE_FLAGS tensorflow_ring_lib.extra_link_args = LINK_FLAGS tensorflow_ring_lib.library_dirs = LIBRARY_DIRS tensorflow_ring_lib.libraries = LIBRARIES
def remove_gpl(pyfile, dry_run=0): """Remove the GPL license form a Python source file Raise DistutilsError when a problem is found. """ start = "you can redistribute it and/or modify" end = "MA 02110-1301 USA" intentionally_empty = "# Following empty comments are intentional.\n" pb_generated = "Generated by the protocol buffer compiler" # skip files on ignore list. for ignore in IGNORE_FILES: if pyfile.endswith(ignore): log.warn("Ignoring file %s from remove gpl license", pyfile) return result = [] removed = 0 num_lines = 0 try: fp = open(pyfile, "r") except IOError as exc: if exc.errno == errno.ENOENT: # file does not exists, nothing to do return raise line = fp.readline() num_lines += 1 done = False while line: if line.startswith(intentionally_empty): # We already removed the GPL license return if pb_generated in line: # Generated file by Protocol Buffer compiler return if line.strip().endswith(start) and not done: log.info("removing GPL license from %s", pyfile) result.append(intentionally_empty) removed += 1 line = fp.readline() num_lines += 1 while line: result.append("#\n") removed += 1 line = fp.readline() num_lines += 1 if line.strip().endswith(end): done = True line = fp.readline() num_lines += 1 result.append("# End empty comments.\n") removed += 1 break result.append(line) line = fp.readline() num_lines += 1 fp.close() result.append("\n") if removed != GPL_NOTICE_LINENR and num_lines > 2: msg = ("Problem removing GPL license. Removed %d lines from " "file %s" % (removed, pyfile)) raise DistutilsError(msg) elif removed != GPL_NOTICE_LINENR: log.warn("file %s does not have gpl license on header", pyfile) if not dry_run: fp = open(pyfile, "w") fp.writelines(result) fp.close()
def fetch_build_egg(dist, req): """Fetch an egg needed for building. Use pip/wheel to fetch/build a wheel.""" # Check pip is available. try: pkg_resources.get_distribution('pip') except pkg_resources.DistributionNotFound: dist.announce( 'WARNING: The pip package is not available, falling back ' 'to EasyInstall for handling setup_requires/test_requires; ' 'this is deprecated and will be removed in a future version.', log.WARN ) return _legacy_fetch_build_egg(dist, req) # Warn if wheel is not. try: pkg_resources.get_distribution('wheel') except pkg_resources.DistributionNotFound: dist.announce('WARNING: The wheel package is not available.', log.WARN) # Ignore environment markers; if supplied, it is required. req = strip_marker(req) # Take easy_install options into account, but do not override relevant # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll # take precedence. opts = dist.get_option_dict('easy_install') if 'allow_hosts' in opts: raise DistutilsError('the `allow-hosts` option is not supported ' 'when using pip to install requirements.') if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ: quiet = False else: quiet = True if 'PIP_INDEX_URL' in os.environ: index_url = None elif 'index_url' in opts: index_url = opts['index_url'][1] else: index_url = None if 'find_links' in opts: find_links = _fixup_find_links(opts['find_links'][1])[:] else: find_links = [] if dist.dependency_links: find_links.extend(dist.dependency_links) eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) environment = pkg_resources.Environment() for egg_dist in pkg_resources.find_distributions(eggs_dir): if egg_dist in req and environment.can_add(egg_dist): return egg_dist with TemporaryDirectory() as tmpdir: cmd = [ sys.executable, '-m', 'pip', '--disable-pip-version-check', 'wheel', '--no-deps', '-w', tmpdir, ] if quiet: cmd.append('--quiet') if index_url is not None: cmd.extend(('--index-url', index_url)) if find_links is not None: for link in find_links: cmd.extend(('--find-links', link)) # If requirement is a PEP 508 direct URL, directly pass # the URL to pip, as `req @ url` does not work on the # command line. if req.url: cmd.append(req.url) else: cmd.append(str(req)) try: subprocess.check_call(cmd) except subprocess.CalledProcessError as e: raise DistutilsError(str(e)) wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) dist_location = os.path.join(eggs_dir, wheel.egg_name()) wheel.install_as_egg(dist_location) dist_metadata = pkg_resources.PathMetadata( dist_location, os.path.join(dist_location, 'EGG-INFO')) dist = pkg_resources.Distribution.from_filename( dist_location, metadata=dist_metadata) return dist
https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """ # Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path import os import re import sys from distutils.errors import DistutilsError if sys.version_info.major > 2: raise DistutilsError("This package requires Python 2.7") here = path.abspath(path.dirname(__file__)) def get_requires(filename): requirements = [] with open(filename) as req_file: for line in req_file.read().splitlines(): if not line.strip().startswith("#"): requirements.append(line) return requirements project_requirements = get_requires("conans/requirements.txt") project_requirements.extend(get_requires("conans/requirements_server.txt"))
def run(self): import subprocess args = ['mypy', self.get_project_path()] result = subprocess.call(args) if result != 0: raise DistutilsError("mypy exited with status %d" % result)
def get_common_options(build_ext): cpp_flags = get_cpp_flags(build_ext) link_flags = get_link_flags(build_ext) mpi_flags = get_mpi_flags() gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE') if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \ gpu_allreduce != 'DDL': raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported ' 'values are "", "MPI", "NCCL", "DDL".' % gpu_allreduce) gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER') if gpu_allgather and gpu_allgather != 'MPI': raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported ' 'values are "", "MPI".' % gpu_allgather) gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST') if gpu_broadcast and gpu_broadcast != 'MPI': raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported ' 'values are "", "MPI".' % gpu_broadcast) if gpu_allreduce or gpu_allgather or gpu_broadcast: have_cuda = True cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags) else: have_cuda = False cuda_include_dirs = cuda_lib_dirs = [] if gpu_allreduce == 'NCCL': have_nccl = True nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals( build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags) else: have_nccl = False nccl_include_dirs = nccl_lib_dirs = nccl_libs = [] if gpu_allreduce == 'DDL': have_ddl = True ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs(build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags) else: have_ddl = False ddl_include_dirs = ddl_lib_dirs = [] if (gpu_allreduce == 'NCCL' and (gpu_allgather == 'MPI' or gpu_broadcast == 'MPI') and not os.environ.get('HOROVOD_ALLOW_MIXED_GPU_IMPL')): raise DistutilsError('You should not mix NCCL and MPI GPU due to a possible deadlock.\n' 'If you\'re sure you want to mix them, set the ' 'HOROVOD_ALLOW_MIXED_GPU_IMPL environment variable to \'1\'.') MACROS = [('EIGEN_MPL2_ONLY', 1)] INCLUDES = ['third_party/eigen', 'third_party/lbfgs/include', 'third_party/boost/assert/include', 'third_party/boost/config/include', 'third_party/boost/core/include', 'third_party/boost/detail/include', 'third_party/boost/iterator/include', 'third_party/boost/lockfree/include', 'third_party/boost/mpl/include', 'third_party/boost/parameter/include', 'third_party/boost/predef/include', 'third_party/boost/preprocessor/include', 'third_party/boost/static_assert/include', 'third_party/boost/type_traits/include', 'third_party/boost/utility/include', 'third_party/flatbuffers/include'] SOURCES = ['horovod/common/common.cc', 'horovod/common/fusion_buffer_manager.cc', 'horovod/common/half.cc', 'horovod/common/message.cc', 'horovod/common/mpi_context.cc', 'horovod/common/operations.cc', 'horovod/common/parameter_manager.cc', 'horovod/common/response_cache.cc', 'horovod/common/timeline.cc', 'horovod/common/ops/collective_operations.cc', 'horovod/common/ops/mpi_operations.cc', 'horovod/common/ops/operation_manager.cc', 'horovod/common/optim/bayesian_optimization.cc', 'horovod/common/optim/gaussian_process.cc', 'horovod/common/logging.cc'] COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags) LINK_FLAGS = link_flags + shlex.split(mpi_flags) LIBRARY_DIRS = [] LIBRARIES = [] if have_cuda: MACROS += [('HAVE_CUDA', '1')] INCLUDES += cuda_include_dirs SOURCES += ['horovod/common/ops/cuda_operations.cc', 'horovod/common/ops/mpi_cuda_operations.cc'] LIBRARY_DIRS += cuda_lib_dirs LIBRARIES += ['cudart'] if have_nccl: MACROS += [('HAVE_NCCL', '1')] INCLUDES += nccl_include_dirs SOURCES += ['horovod/common/ops/nccl_operations.cc'] LIBRARY_DIRS += nccl_lib_dirs LIBRARIES += nccl_libs if have_ddl: MACROS += [('HAVE_DDL', '1')] INCLUDES += ddl_include_dirs SOURCES += ['horovod/common/ops/ddl_operations.cc'] LIBRARY_DIRS += ddl_lib_dirs LIBRARIES += ['ddl', 'ddl_pack'] if gpu_allreduce: MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])] if gpu_allgather: MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])] if gpu_broadcast: MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])] return dict(MACROS=MACROS, INCLUDES=INCLUDES, SOURCES=SOURCES, COMPILE_FLAGS=COMPILE_FLAGS, LINK_FLAGS=LINK_FLAGS, LIBRARY_DIRS=LIBRARY_DIRS, LIBRARIES=LIBRARIES)
def run(self): raise DistutilsError("not supported on this version of python")
def run(self): """A command's raison d'etre: carry out the action it exists to perform, controlled by the options initialized in 'initialize_options()', customized by other commands, the setup script, the command-line, and config files, and finalized in 'finalize_options()'. All terminal output and filesystem interaction should be done by 'run()'. """ if not self.skip_build: # Make sure install requirements are actually installed if self.distribution.install_requires: self.announce('Installing *install_requires* packages') subprocess.check_call( [sys.executable, '-m', 'pip', 'install', '--user', '-q'] + self.distribution.install_requires) # Ensure metadata is up-to-date self.reinitialize_command('build_py', inplace=0) self.run_command('build_py') bpy_cmd = self.get_finalized_command("build_py") build_path = normalize_path(bpy_cmd.build_lib) # Build extensions self.reinitialize_command('egg_info', egg_base=build_path) self.run_command('egg_info') self.reinitialize_command('build_ext', inplace=0) self.run_command('build_ext') # Make sure test requirements are actually installed if self.distribution.tests_require: self.announce('Installing *test_require* packages') subprocess.check_call( [sys.executable, '-m', 'pip', 'install', '--user', '-q'] + self.distribution.tests_require) ei_cmd = self.get_finalized_command("egg_info") # Get actual package location if self.skip_build: # Retrieve installation directory package_path = normalize_path( get_distribution(PACKAGE_NAME).location) else: # Package was just built... package_path = normalize_path(ei_cmd.egg_base) old_path = sys.path[:] old_modules = sys.modules.copy() try: if self.skip_build: sys.path.pop(0) else: sys.path.insert(0, package_path) working_set.__init__() add_activation_listener(lambda dist: dist.activate()) require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) if not self._run_tests(package_path): raise DistutilsError("Tests failed!") finally: sys.path[:] = old_path sys.modules.clear() sys.modules.update(old_modules) working_set.__init__()
def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant schema, netloc, url, params, query, fragments = \ urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) if schema not in ('http', 'https'): raise AssertionError("unsupported schema " + schema) # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release f = open(filename, 'rb') try: content = f.read() finally: f.close() meta = self.distribution.metadata data = { # action ':action': 'file_upload', 'protocol_version': '1', # identify release 'name': meta.get_name(), 'version': meta.get_version(), # file content 'content': (os.path.basename(filename), content), 'filetype': command, 'pyversion': pyversion, # additional meta-data 'metadata_version': '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), 'author_email': meta.get_contact_email(), 'license': meta.get_licence(), 'description': meta.get_long_description(), 'keywords': meta.get_keywords(), 'platform': meta.get_platforms(), 'classifiers': meta.get_classifiers(), 'download_url': meta.get_download_url(), # PEP 314 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), } data['comment'] = '' # file content digests for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): if digest_cons is None: continue try: data[digest_name] = digest_cons(content).hexdigest() except ValueError: # hash digest not available or blocked by security policy pass if self.sign: with open(filename + ".asc", "rb") as f: data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read()) # set up the authentication user_pass = (self.username + ":" + self.password).encode('ascii') # The exact encoding of the authentication string is debated. # Anyway PyPI only accepts ascii for both username or password. auth = "Basic " + standard_b64encode(user_pass).decode('ascii') # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = b'\r\n--' + boundary.encode('ascii') end_boundary = sep_boundary + b'--\r\n' body = io.BytesIO() for key, value in data.items(): title = '\r\nContent-Disposition: form-data; name="%s"' % key # handle multiple entries for the same name if not isinstance(value, list): value = [value] for value in value: if type(value) is tuple: title += '; filename="%s"' % value[0] value = value[1] else: value = str(value).encode('utf-8') body.write(sep_boundary) body.write(title.encode('utf-8')) body.write(b"\r\n\r\n") body.write(value) body.write(end_boundary) body = body.getvalue() msg = "Submitting %s to %s" % (filename, self.repository) self.announce(msg, log.INFO) # build the Request headers = { 'Content-type': 'multipart/form-data; boundary=%s' % boundary, 'Content-length': str(len(body)), 'Authorization': auth, } request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) status = result.getcode() reason = result.msg except HTTPError as e: status = e.code reason = e.msg except OSError as e: self.announce(str(e), log.ERROR) raise if status == 200: self.announce('Server response (%s): %s' % (status, reason), log.INFO) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) self.announce(msg, log.INFO) else: msg = 'Upload failed (%s): %s' % (status, reason) self.announce(msg, log.ERROR) raise DistutilsError(msg)
def run(self): ext = self.ext_map['xmlsec'] self.debug = os.environ.get('PYXMLSEC_ENABLE_DEBUG', False) self.static = os.environ.get('PYXMLSEC_STATIC_DEPS', False) if self.static or sys.platform == 'win32': self.info('starting static build on {}'.format(sys.platform)) buildroot = Path('build', 'tmp') self.prefix_dir = buildroot / 'prefix' self.prefix_dir.mkdir(parents=True, exist_ok=True) self.prefix_dir = self.prefix_dir.absolute() self.build_libs_dir = buildroot / 'libs' self.build_libs_dir.mkdir(exist_ok=True) self.libs_dir = Path(os.environ.get('PYXMLSEC_LIBS_DIR', 'libs')) self.libs_dir.mkdir(exist_ok=True) if sys.platform == 'win32': self.prepare_static_build_win() elif 'linux' in sys.platform: self.prepare_static_build_linux() else: import pkgconfig try: config = pkgconfig.parse('xmlsec1') except EnvironmentError: raise DistutilsError('Unable to invoke pkg-config.') except pkgconfig.PackageNotFoundError: raise DistutilsError('xmlsec1 is not installed or not in path.') if config is None or not config.get('libraries'): raise DistutilsError('Bad or incomplete result returned from pkg-config.') ext.define_macros.extend(config['define_macros']) ext.include_dirs.extend(config['include_dirs']) ext.library_dirs.extend(config['library_dirs']) ext.libraries.extend(config['libraries']) import lxml ext.include_dirs.extend(lxml.get_include()) ext.define_macros.extend( [('MODULE_NAME', self.distribution.metadata.name), ('MODULE_VERSION', self.distribution.metadata.version)] ) # escape the XMLSEC_CRYPTO macro value, see mehcode/python-xmlsec#141 for (key, value) in ext.define_macros: if key == 'XMLSEC_CRYPTO' and not (value.startswith('"') and value.endswith('"')): ext.define_macros.remove((key, value)) ext.define_macros.append((key, '"{0}"'.format(value))) break if sys.platform == 'win32': ext.extra_compile_args.append('/Zi') else: ext.extra_compile_args.extend( [ '-g', '-std=c99', '-fPIC', '-fno-strict-aliasing', '-Wno-error=declaration-after-statement', '-Werror=implicit-function-declaration', ] ) if self.debug: ext.extra_compile_args.append('-Wall') ext.extra_compile_args.append('-O0') ext.define_macros.append(('PYXMLSEC_ENABLE_DEBUG', '1')) else: ext.extra_compile_args.append('-Os') super(build_ext, self).run()