Example #1
0
    def _setup_extensions(self):
        ext = {e.name: e for e in self.extensions}

        compiler = new_compiler(compiler=self.compiler)
        customize_compiler(compiler)

        def add_dependency(ext, name):
            add_ext_pkg_config_dep(ext, compiler.compiler_type, name)

        def add_pycairo(ext):
            ext.include_dirs += [get_pycairo_include_dir()]

        gi_ext = ext["gi._gi"]
        add_dependency(gi_ext, "glib-2.0")
        add_dependency(gi_ext, "gio-2.0")
        add_dependency(gi_ext, "gobject-introspection-1.0")
        add_dependency(gi_ext, "libffi")
        add_ext_compiler_flags(gi_ext, compiler)

        if WITH_CAIRO:
            gi_cairo_ext = ext["gi._gi_cairo"]
            add_dependency(gi_cairo_ext, "glib-2.0")
            add_dependency(gi_cairo_ext, "gio-2.0")
            add_dependency(gi_cairo_ext, "gobject-introspection-1.0")
            add_dependency(gi_cairo_ext, "libffi")
            add_dependency(gi_cairo_ext, "cairo")
            add_dependency(gi_cairo_ext, "cairo-gobject")
            add_pycairo(gi_cairo_ext)
            add_ext_compiler_flags(gi_cairo_ext, compiler)
Example #2
0
 def __init__(self, debug=False):
     self._compiler = new_compiler()
     log.set_threshold(log.DEBUG if debug else log.INFO)
     customize_compiler(self._compiler)
     self._build_ext = build_ext(Distribution())
     self._build_ext.finalize_options()
     self._py_lib_dirs = self._build_ext.library_dirs
Example #3
0
def _have_sqlite_extension_support():
    import shutil
    import tempfile
    from distutils.ccompiler import new_compiler
    from distutils.sysconfig import customize_compiler

    libraries = ['sqlite3']
    c_code = ('#include <sqlite3.h>\n\n'
              'int main(int argc, char **argv) { return 0; }')
    tmp_dir = tempfile.mkdtemp(prefix='tmp_pw_sqlite3_')
    bin_file = os.path.join(tmp_dir, 'test_pw_sqlite3')
    src_file = bin_file + '.c'
    with open(src_file, 'w') as fh:
        fh.write(c_code)

    compiler = new_compiler()
    customize_compiler(compiler)
    success = False
    try:
        compiler.link_executable(
            compiler.compile([src_file], output_dir=tmp_dir),
            bin_file,
            libraries=['sqlite3'])
    except CCompilerError:
        print('unable to compile sqlite3 C extensions - missing headers?')
    except DistutilsExecError:
        print('unable to compile sqlite3 C extensions - no c compiler?')
    except DistutilsPlatformError:
        print('unable to compile sqlite3 C extensions - platform error')
    else:
        success = True
    shutil.rmtree(tmp_dir)
    return success
Example #4
0
def make_compiler(compiler_cmd='g++'):
    executables = {
        'preprocessor': None,
        'compiler': [compiler_cmd],
        'compiler_so': [compiler_cmd],
        'compiler_cxx': [compiler_cmd],
        'linker_so': [compiler_cmd, "-shared"],
        'linker_exe': [compiler_cmd],
        'archiver': ["ar", "-cr"],
        'ranlib': None,
    }

    compiler = new_compiler(verbose=1)
    compiler.set_executables(**executables)
    compiler.add_include_dir('/usr/local/include')
    compiler.add_include_dir('/usr/include')
    compiler.add_include_dir('/sw/include')
    compiler.add_include_dir(distutils.sysconfig.get_python_inc())
    compiler.add_include_dir('/sw/lib')
    compiler.add_library_dir('/usr/local/lib')
    compiler.add_library_dir('/usr/lib')
    compiler.add_library_dir('.')

    for i in numpy.distutils.misc_util.get_numpy_include_dirs():
        compiler.add_include_dir(i)
    
    output_dir = '.'
        
    compiler.add_library('m')
    compiler.add_library('blas')
    return compiler
Example #5
0
def check_sanity():
    """
    Test if development headers and library for rbd is available by compiling a dummy C program.
    """
    CEPH_SRC_DIR = os.path.join(
        os.path.dirname(os.path.abspath(__file__)),
        '..',
        '..'
    )

    tmp_dir = tempfile.mkdtemp(dir=os.environ.get('TMPDIR', os.path.dirname(__file__)))
    tmp_file = os.path.join(tmp_dir, 'rbd_dummy.c')

    with open(tmp_file, 'w') as fp:
        dummy_prog = textwrap.dedent("""
        #include <stddef.h>
        #include <rbd/librbd.h>
        int main(void) {
            rados_t cluster;
            rados_create(&cluster, NULL);
            return 0;
        }
        """)
        fp.write(dummy_prog)

    compiler = new_compiler()
    distutils.sysconfig.customize_compiler(compiler)

    if {'MAKEFLAGS', 'MFLAGS', 'MAKELEVEL'}.issubset(set(os.environ.keys())):
        # The setup.py has been invoked by a top-level Ceph make.
        # Set the appropriate CFLAGS and LDFLAGS

        compiler.set_include_dirs([os.path.join(CEPH_SRC_DIR, 'include')])
        compiler.set_library_dirs([os.environ.get('CEPH_LIBDIR')])

    try:
        compiler.define_macro('_FILE_OFFSET_BITS', '64')

        link_objects = compiler.compile(
            sources=[tmp_file],
            output_dir=tmp_dir
        )

        compiler.link_executable(
            objects=link_objects,
            output_progname=os.path.join(tmp_dir, 'rbd_dummy'),
            libraries=['rbd', 'rados'],
            output_dir=tmp_dir,
        )

    except CompileError:
        print('\nCompile Error: RBD development headers not found', file=sys.stderr)
        return False
    except LinkError:
        print('\nLink Error: RBD library not found', file=sys.stderr)
        return False
    else:
        return True
    finally:
        shutil.rmtree(tmp_dir)
Example #6
0
    def run(self):
        # This should always work when building a Debian package.
        if os.path.exists("/usr/bin/qmake-qt4"):
            retval = subprocess.call(
                "(cd qt/frontend; qmake-qt4; make)", shell=True)
            if retval:
                raise SystemExit(retval)

            DATA_FILES.append(("lib/checkbox/qt/",
              ["qt/frontend/checkbox-qt-service"]))

        super(checkbox_build, self).run()

        cc = new_compiler()
        for source in self.sources:
            executable = os.path.splitext(source)[0]
            cc.link_executable(
                [source], executable,
                libraries=["rt", "pthread", "nl-3", "nl-genl-3"],
                # Enforce security with CFLAGS + LDFLAGS (see dpkg-buildflags)
                extra_preargs=[
                    "-O2", "-fstack-protector",
                    "--param=ssp-buffer-size=4", "-Wformat",
                    "-Werror=format-security",
                    "-Wl,-Bsymbolic-functions",
                    "-Wl,-z,relro",
                    "-I/usr/include/libnl3"])
Example #7
0
File: setup.py Project: mbr/githome
    def run(self):
        cc = new_compiler()
        customize_compiler(cc)
        o_files = cc.compile(['githome/gh_client.c'])
        cc.link_executable(o_files, 'githome/gh_client')

        install.run(self)  # run normal build command
Example #8
0
File: setup.py Project: zjc5415/pyq
    def run(self):
        for exe in self.distribution.executables:
            exe.include_dirs.append(get_python_inc())
            compiler = new_compiler(  # compiler=self.compiler,
                verbose=self.verbose,
                dry_run=self.dry_run,
                force=self.force)
            customize_compiler(compiler)
            compiler.set_include_dirs(exe.include_dirs)
            for (name, value) in exe.define_macros:
                compiler.define_macro(name, value)

            objects = compiler.compile(exe.sources, output_dir=self.build_temp)

            # This is a hack copied from distutils.commands.build_exe (where it is also called
            # a hack).
            self._build_objects = objects[:]

            library_dirs = [os.path.join(sys.exec_prefix, 'lib')]

            exe_path = join(self.build_lib, exe.name.split('.')[-1])

            compiler.link(CCompiler.EXECUTABLE,
                          objects=objects,
                          output_filename=exe_path,
                          library_dirs=library_dirs,
                          libraries=exe.libraries
                          )
Example #9
0
    def __init__(self, disable_openmp):
        self.msvc = new_compiler().compiler_type == 'msvc'

        if disable_openmp:
            self.openmp_enabled = False
        else:
            self.openmp_enabled, openmp_needs_gomp = self._detect_openmp()
        self.sse3_enabled = self._detect_sse3() if not self.msvc else True
        self.sse41_enabled = self._detect_sse41() if not self.msvc else True

        self.compiler_args_sse2  = ['-msse2'] if not self.msvc else ['/arch:SSE2']
        self.compiler_args_sse3  = ['-mssse3'] if (self.sse3_enabled and not self.msvc) else []

        self.compiler_args_sse41, self.define_macros_sse41 = [], []
        if self.sse41_enabled:
            self.define_macros_sse41 = [('__SSE4__', 1), ('__SSE4_1__', 1)]
            if not self.msvc:
                self.compiler_args_sse41 = ['-msse4']

        if self.openmp_enabled:
            self.compiler_libraries_openmp = ['gomp'] if openmp_needs_gomp else []

            if self.msvc:
                self.compiler_args_openmp = ['/openmp']
            else:
                self.compiler_args_openmp = ['-fopenmp']
        else:
            self.compiler_libraries_openmp = []
            self.compiler_args_openmp = []

        if self.msvc:
            self.compiler_args_opt = ['/O2']
        else:
            self.compiler_args_opt = ['-O3', '-funroll-loops']
Example #10
0
    def test_run(self):
        # can't test on windows
        if sys.platform == "win32":
            return

        pkg_dir, dist = self.create_dist()
        cmd = build_clib(dist)

        foo_c = os.path.join(pkg_dir, "foo.c")
        self.write_file(foo_c, "int main(void) { return 1;}\n")
        cmd.libraries = [("foo", {"sources": [foo_c]})]

        build_temp = os.path.join(pkg_dir, "build")
        os.mkdir(build_temp)
        cmd.build_temp = build_temp
        cmd.build_clib = build_temp

        # before we run the command, we want to make sure
        # all commands are present on the system
        # by creating a compiler and checking its executables
        from distutils.ccompiler import new_compiler, customize_compiler

        compiler = new_compiler()
        customize_compiler(compiler)
        for ccmd in compiler.executables.values():
            if ccmd is None:
                continue
            if find_executable(ccmd[0]) is None:
                return  # can't test

        # this should work
        cmd.run()

        # let's check the result
        self.assertTrue("libfoo.a" in os.listdir(build_temp))
Example #11
0
def find_python_library():
    "Return the full path to the Python library (empty string if not found)"
    pyver = sysconfig.get_python_version()
    libpython_names = [
        "python%s" % pyver.replace(".", ""),
        "python%smu" % pyver,
        "python%sm" % pyver,
        "python%su" % pyver,
        "python%s" % pyver,
        ]
    dirs = [
        "%s/lib" % os.environ.get("PYTHON_DIR", ""),
        "%s" % sysconfig.get_config_vars().get("LIBDIR", ""),
        "/usr/lib/%s" % sysconfig.get_config_vars().get("MULTIARCH", ""),
        "/usr/local/lib",
        "/opt/local/lib",
        "/usr/lib",
        "/usr/lib64",
        ]
    libpython = None
    cc = new_compiler()
    for name in libpython_names:
        libpython = cc.find_library_file(dirs, name)
        if libpython is not None:
            break
    return libpython or ""
Example #12
0
def cc(filenames, dest,
       library_dirs=None, include_dirs=None,
       libraries=None, compile_args=None,
       runtime_lib_dirs=None):
    compiler = ccompiler.new_compiler()

    distutils.sysconfig.customize_compiler(compiler)
    if library_dirs:
        compiler.set_library_dirs(library_dirs)
    if include_dirs:
        compiler.set_include_dirs(include_dirs)
    if libraries:
        compiler.set_libraries(libraries)
    if runtime_lib_dirs:
        compiler.set_runtime_library_dirs(runtime_lib_dirs)

    try:
        objnames = compiler.compile(filenames, output_dir=dest,
                                    extra_postargs=compile_args)
        for obj in objnames:
            execname, ext = os.path.splitext(obj)
            compiler.link_executable(
                [obj], execname, output_dir=dest,
                target_lang="c++",
            )
    except (CompileError, LinkError):
        return False
    return True
Example #13
0
def rmsd_extension():
    openmp_enabled, needs_gomp = detect_openmp()
    compiler_args = ["-msse2" if not detect_sse3() else "-mssse3", "-O3", "-funroll-loops"]
    if new_compiler().compiler_type == "msvc":
        compiler_args.append("/arch:SSE2")

    if openmp_enabled:
        compiler_args.append("-fopenmp")
    compiler_libraries = ["gomp"] if needs_gomp else []
    # compiler_defs = [('USE_OPENMP', None)] if openmp_enabled else []

    rmsd = Extension(
        "mdtraj._rmsd",
        sources=[
            "MDTraj/rmsd/src/theobald_rmsd.c",
            "MDTraj/rmsd/src/rotation.c",
            "MDTraj/rmsd/src/center.c",
            "MDTraj/rmsd/_rmsd." + cython_extension,
        ],
        include_dirs=["MDTraj/rmsd/include", numpy.get_include()],
        extra_compile_args=compiler_args,
        # define_macros=compiler_defs,
        libraries=compiler_libraries,
    )
    return rmsd
Example #14
0
  def run(self):
    """Runs the build extension."""
    compiler = new_compiler(compiler=self.compiler)
    if compiler.compiler_type == "msvc":
      self.define = [
          ("UNICODE", ""),
      ]

    else:
      command = "sh configure --disable-shared-libs"
      output = self._RunCommand(command)

      print_line = False
      for line in output.split("\n"):
        line = line.rstrip()
        if line == "configure:":
          print_line = True

        if print_line:
          print(line)

      self.define = [
          ("HAVE_CONFIG_H", ""),
          ("LOCALEDIR", "\"/usr/share/locale\""),
      ]

    build_ext.run(self)
Example #15
0
 def __init__(self):
     self.debug = False
     self._compiler = new_compiler()
     customize_compiler(self._compiler)
     self._build_ext = build_ext(Distribution())
     self._build_ext.finalize_options()
     self._py_lib_dirs = self._build_ext.library_dirs
Example #16
0
    def run(self):
        from distutils.ccompiler import new_compiler
        if not self.extensions:
            return
        else:
            if self.distribution.has_c_libraries():
                build_clib = self.get_finalized_command('build_clib')
                self.libraries.extend(build_clib.get_library_names() or [])
                self.library_dirs.append(build_clib.build_clib)
            self.compiler = new_compiler(compiler=self.compiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force)
            customize_compiler(self.compiler)
            if os.name == 'nt' and self.plat_name != get_platform():
                self.compiler.initialize(self.plat_name)
            if self.include_dirs is not None:
                self.compiler.set_include_dirs(self.include_dirs)
            if self.define is not None:
                for name, value in self.define:
                    self.compiler.define_macro(name, value)

            if self.undef is not None:
                for macro in self.undef:
                    self.compiler.undefine_macro(macro)

            if self.libraries is not None:
                self.compiler.set_libraries(self.libraries)
            if self.library_dirs is not None:
                self.compiler.set_library_dirs(self.library_dirs)
            if self.rpath is not None:
                self.compiler.set_runtime_library_dirs(self.rpath)
            if self.link_objects is not None:
                self.compiler.set_link_objects(self.link_objects)
            self.build_extensions()
            return
Example #17
0
    def setup_shlib_compiler(self):
        compiler = self.shlib_compiler = new_compiler(
            compiler=self.compiler, dry_run=self.dry_run, force=self.force
        )
        _customize_compiler_for_shlib(compiler)

        if self.include_dirs is not None:
            compiler.set_include_dirs(self.include_dirs)
        if self.define is not None:
            # 'define' option is a list of (name,value) tuples
            for (name, value) in self.define:
                compiler.define_macro(name, value)
        if self.undef is not None:
            for macro in self.undef:
                compiler.undefine_macro(macro)
        if self.libraries is not None:
            compiler.set_libraries(self.libraries)
        if self.library_dirs is not None:
            compiler.set_library_dirs(self.library_dirs)
        if self.rpath is not None:
            compiler.set_runtime_library_dirs(self.rpath)
        if self.link_objects is not None:
            compiler.set_link_objects(self.link_objects)

        # hack so distutils' build_extension() builds a library instead
        compiler.link_shared_object = link_shared_object.__get__(compiler)
Example #18
0
 def setUp(self):
     """
     Run gdb and have cygdb import the debug information from the code
     defined in TestParseTreeTransforms's setUp method
     """
     self.tempdir = tempfile.mkdtemp()
     self.destfile = os.path.join(self.tempdir, 'codefile.pyx')
     self.debug_dest = os.path.join(self.tempdir, 
                                   'cython_debug', 
                                   'cython_debug_info_codefile')
     self.cfuncs_destfile = os.path.join(self.tempdir, 'cfuncs')
     
     self.cwd = os.getcwd()
     os.chdir(self.tempdir)
     
     shutil.copy(codefile, self.destfile)
     shutil.copy(cfuncs_file, self.cfuncs_destfile + '.c')
     
     compiler = ccompiler.new_compiler()
     compiler.compile(['cfuncs.c'], debug=True)
     
     ext = Cython.Distutils.extension.Extension(
         'codefile',
         ['codefile.pyx'], 
         pyrex_debug=True,
         extra_objects=['cfuncs.o'])
         
     distutils.core.setup(
         script_args=['build_ext', '--inplace'],
         ext_modules=[ext],
         cmdclass=dict(build_ext=Cython.Distutils.build_ext)
     )
    def setup_shlib_compiler(self):
        compiler = self.shlib_compiler = new_compiler(compiler=self.compiler, dry_run=self.dry_run, force=self.force)
        if sys.platform == 'darwin':
            tmp = _config_vars.copy()
            try:
                _config_vars['LDSHARED'] = 'gcc -Wl,-x -dynamiclib -undefined dynamic_lookup'
                _config_vars['CCSHARED'] = ' -dynamiclib'
                _config_vars['SO'] = '.dylib'
                customize_compiler(compiler)
            finally:
                _config_vars.clear()
                _config_vars.update(tmp)

        else:
            customize_compiler(compiler)
        if self.include_dirs is not None:
            compiler.set_include_dirs(self.include_dirs)
        if self.define is not None:
            for name, value in self.define:
                compiler.define_macro(name, value)

        if self.undef is not None:
            for macro in self.undef:
                compiler.undefine_macro(macro)

        if self.libraries is not None:
            compiler.set_libraries(self.libraries)
        if self.library_dirs is not None:
            compiler.set_library_dirs(self.library_dirs)
        if self.rpath is not None:
            compiler.set_runtime_library_dirs(self.rpath)
        if self.link_objects is not None:
            compiler.set_link_objects(self.link_objects)
        compiler.link_shared_object = link_shared_object.__get__(compiler)
        return
Example #20
0
    def test_run(self):
        pkg_dir, dist = self.create_dist()
        cmd = build_clib(dist)

        foo_c = os.path.join(pkg_dir, 'foo.c')
        self.write_file(foo_c, 'int main(void) { return 1;}\n')
        cmd.libraries = [('foo', {'sources': [foo_c]})]

        build_temp = os.path.join(pkg_dir, 'build')
        os.mkdir(build_temp)
        cmd.build_temp = build_temp
        cmd.build_clib = build_temp

        # before we run the command, we want to make sure
        # all commands are present on the system
        # by creating a compiler and checking its executables
        from distutils.ccompiler import new_compiler
        from distutils.sysconfig import customize_compiler

        compiler = new_compiler()
        customize_compiler(compiler)
        for ccmd in compiler.executables.values():
            if ccmd is None:
                continue
            if find_executable(ccmd[0]) is None:
                self.skipTest('The %r command is not found' % ccmd[0])

        # this should work
        cmd.run()

        # let's check the result
        self.assertIn('libfoo.a', os.listdir(build_temp))
Example #21
0
def test_compilation(cfile, compiler=None, **compiler_attrs):
    """Test simple compilation with given settings"""
    if compiler is None or isinstance(compiler, str):
        cc = ccompiler.new_compiler(compiler=compiler)
        customize_compiler(cc)
        if cc.compiler_type == 'mingw32':
            customize_mingw(cc)
    else:
        cc = compiler

    for name, val in compiler_attrs.items():
        setattr(cc, name, val)

    efile, ext = os.path.splitext(cfile)

    cpreargs = lpreargs = None
    if sys.platform == 'darwin':
        # use appropriate arch for compiler
        if platform.architecture()[0]=='32bit':
            if platform.processor() == 'powerpc':
                cpu = 'ppc'
            else:
                cpu = 'i386'
            cpreargs = ['-arch', cpu]
            lpreargs = ['-arch', cpu, '-undefined', 'dynamic_lookup']
        else:
            # allow for missing UB arch, since it will still work:
            lpreargs = ['-undefined', 'dynamic_lookup']
    extra = compiler_attrs.get('extra_compile_args', None)

    objs = cc.compile([cfile],extra_preargs=cpreargs, extra_postargs=extra)
    cc.link_executable(objs, efile, extra_preargs=lpreargs)
    return efile
Example #22
0
def get_cxxflags():
    from distutils.ccompiler import new_compiler
    from distutils.sysconfig import customize_compiler
    from distutils import sysconfig
    from platform import system
    if system() == DARWIN_KEY:
        CXX_FLAGS["gcc"] = CXX_FLAGS["gcc-mac"]
        CXX_FLAGS["cc"] = CXX_FLAGS["clang"]
        CXX_FLAGS["c++"] = CXX_FLAGS["clang"]
    elif system() == LINUX_KEY:
        CXX_FLAGS["gcc"] = CXX_FLAGS["gcc-linux"]
        CXX_FLAGS["cc"] = CXX_FLAGS["gcc"]
        CXX_FLAGS["c++"] = CXX_FLAGS["gcc"]
    else:
        raise UnsupportedCompilerException("System: %s is not supported by HOPE" % system())
    
    sysconfig.get_config_vars() #init vars
    compiler = new_compiler()
    customize_compiler(compiler)
    compiler_name = compiler.compiler[0].split("/")[-1]
    
    _check_version(compiler_name, compiler.compiler[0])
    
    for name, flags in CXX_FLAGS.items():
        if compiler_name.startswith(name):
            return flags
    raise UnsupportedCompilerException("Unknown compiler: {0}".format(compiler_name))
Example #23
0
def find_ssl():

    # Detect SSL support for the socket module (via _ssl)
    from distutils.ccompiler import new_compiler

    compiler = new_compiler()
    inc_dirs = compiler.include_dirs + ['/usr/include']

    search_for_ssl_incs_in = [
                          '/usr/local/ssl/include',
                          '/usr/contrib/ssl/include/'
                         ]
    ssl_incs = find_file('openssl/ssl.h', inc_dirs,
                         search_for_ssl_incs_in
                         )
    if ssl_incs is not None:
        krb5_h = find_file('krb5.h', inc_dirs,
                           ['/usr/kerberos/include'])
        if krb5_h:
            ssl_incs += krb5_h

    ssl_libs = find_library_file(compiler, 'ssl',
                                 ['/usr/lib'],
                                 ['/usr/local/lib',
                                  '/usr/local/ssl/lib',
                                  '/usr/contrib/ssl/lib/'
                                 ] )

    if (ssl_incs is not None and ssl_libs is not None):
        return ssl_incs, ssl_libs, ['ssl', 'crypto']

    raise Exception("No SSL support found")
Example #24
0
def gecode_version():
    global GECODE_VERSION
    if GECODE_VERSION is not None:
        return GECODE_VERSION
    from distutils.ccompiler import new_compiler
    try:
        from distutils.ccompiler import customize_compiler
    except:
        from distutils.sysconfig import customize_compiler
    import os
    cxx = new_compiler()
    customize_compiler(cxx)
    pid = os.getpid()
    file_hh = "_gecode_version_%d.hh" % pid
    file_txt = "_gecode_version_%d.txt" % pid
    f = file(file_hh,"w")
    f.write("""#include "gecode/support/config.hpp"
@@GECODE_VERSION""")
    f.close()
    cxx.preprocess(file_hh,output_file=file_txt)
    if True:
        f = open(file_txt)
        version = ""
        for line in f:
            if line.startswith("@@"):
                version = line[3:-2]
                break
        f.close()
        os.remove(file_hh)
        os.remove(file_txt)
    else:
        version = "4.4.0" 
    GECODE_VERSION = version
    return version
Example #25
0
    def _build(self):
        from distutils.ccompiler import new_compiler

        compiler = new_compiler(force=1)
        if self.compiler_exe is not None:
            for c in """compiler compiler_so compiler_cxx
                        linker_exe linker_so""".split():
                compiler.executables[c][0] = self.compiler_exe
        compiler.spawn = log_spawned_cmd(compiler.spawn)
        objects = []
        for cfile in self.cfilenames:
            cfile = py.path.local(cfile)
            old = cfile.dirpath().chdir()
            try:
                res = compiler.compile(
                    [cfile.basename], include_dirs=self.eci.include_dirs, extra_preargs=self.compile_extra
                )
                assert len(res) == 1
                cobjfile = py.path.local(res[0])
                assert cobjfile.check()
                objects.append(str(cobjfile))
            finally:
                old.chdir()
        compiler.link_executable(
            objects,
            str(self.outputfilename),
            libraries=self.eci.libraries,
            extra_preargs=self.link_extra,
            library_dirs=self.eci.library_dirs,
        )
Example #26
0
    def _build(self):
        from distutils.ccompiler import new_compiler
        compiler = new_compiler(force=1)
        if self.compiler_exe is not None:
            for c in '''compiler compiler_so compiler_cxx
                        linker_exe linker_so'''.split():
                compiler.executables[c][0] = self.compiler_exe
        compiler.spawn = log_spawned_cmd(compiler.spawn)
        objects = []
        for cfile in self.cfilenames:
            old = os.getcwd()
            os.chdir(os.path.dirname(cfile))
            try:
                res = compiler.compile([os.path.basename(cfile)],
                                       include_dirs=self.eci.include_dirs,
                                       extra_preargs=self.compile_extra)
                assert len(res) == 1
                cobjfile = res[0]
                assert os.path.exists(cobjfile)
                objects.append(cobjfile)

                compiler.link_executable(objects, str(self.outputfilename),
                                         libraries=self.eci.libraries,
                                         extra_preargs=self.link_extra,
                                         library_dirs=self.eci.library_dirs)
            finally:
                os.chdir(old)
Example #27
0
    def run(self):
        # mostly copied from distutils.command.build_ext
        from distutils.ccompiler import new_compiler
        if not self.extensions:
            return
        self.compiler = new_compiler(
                compiler=self.compiler or patch_distutils.get_default_compiler(),
                verbose=self.verbose,
                dry_run=self.dry_run,
                force=self.force)
        from pyd import dcompiler
        assert isinstance(self.compiler, dcompiler.DCompiler)
        self.compiler.build_exe = True
        self.compiler.optimize = self.optimize
        # irrelevant for D compilers?
        #customize_compiler(self.compiler)
        if self.include_dirs is not None:
            self.compiler.set_include_dirs(self.include_dirs)
        if self.libraries is not None:
            self.compiler.set_libraries(self.libraries)
        if self.library_dirs is not None:
            self.compiler.set_library_dirs(self.library_dirs)
        if self.link_objects is not None:
            self.compiler.set_link_objects(self.link_objects)

        if self.print_flags:
            print( ' '.join(self.compiler.versionOpts()) )
        else:
            for ext in self.extensions:
                self.per_ext(ext)
Example #28
0
    def run(self):
        # Build our own POTFILES.in as DistUtilsExtra does not include rfc822
        # files automatically
        with open('po/POTFILES.in', 'w') as potfiles_in:
            potfiles_in.write('[encoding: UTF-8]\n')
            for f in glob("provider_jobs/*"):
                potfiles_in.write('[type: gettext/rfc822deb] ' + f + '\n')
            for f in glob("provider_bin/*"):
                potfiles_in.write(f + '\n')

        build_extra.build_extra.run(self)

        cc = new_compiler()
        for source in glob('provider_bin/*.c'):
            executable = os.path.splitext(source)[0]
            try:
                cc.link_executable(
                    [source], executable,
                    libraries=["rt", "pthread", "nl-3", "nl-genl-3"],
                    # Enforce security with CFLAGS + LDFLAGS
                    # See dpkg-buildflags
                    extra_preargs=[
                        "-O2", "-fstack-protector",
                        "--param=ssp-buffer-size=4", "-Wformat",
                        "-Werror=format-security",
                        "-Wl,-Bsymbolic-functions",
                        "-Wl,-z,relro",
                        "-I/usr/include/libnl3"])
            except LinkError as e:
                print('Please install libnl-genl-3-dev on Debian systems')
                raise

        os.unlink('po/POTFILES.in')
Example #29
0
    def detect_gmp(self):
        # It's far from perfect, but we try to compile a code that uses
        # Python long. If it fails, _whatever the reason_ we just disable gmp
        print('Trying to compile GMP dependencies.')

        cc = ccompiler.new_compiler(verbose=False)
        # try to compile a code that requires gmp support
        with NamedTemporaryFile(suffix='.cpp', delete=False) as temp:
            temp.write('''
                #include <gmpxx.h>
                int main() {
                    mpz_class a(1);
                    return a == 0;
                };
            '''.encode('ascii'))
            srcs = [temp.name]
        exe = "a.out"
        try:
            objs = cc.compile(srcs)
            cc.link(ccompiler.CCompiler.EXECUTABLE,
                    objs, exe,
                    libraries=['gmp', 'gmpxx'])
        except Exception:
            # failure: remove the gmp dependency
            print('Failed to compile GMP source, disabling long support.')
            for cfg in glob.glob(os.path.join(pythrandir, "pythran-*.cfg")):
                with open(cfg, "r+") as cfg:
                    content = cfg.read()
                    content = content.replace('USE_GMP', '')
                    content = content.replace('gmp gmpxx', '')
                    cfg.seek(0)
                    cfg.write(content)
        map(os.remove, objs + srcs + [exe])
Example #30
0
    def run(self):
        from distutils.ccompiler import new_compiler
        if not self.extensions:
            return
        # Setup the CCompiler object that we'll use to do all the
        # compiling and linking
        self.compiler = new_compiler(compiler=self.compiler,
                                     verbose=self.verbose,
                                     dry_run=self.dry_run,
                                     force=self.force)
        customize_compiler(self.compiler)
         # And make sure that any compile/link-related options (which might
        # come from the command-line or from the setup script) are set in
        # that CCompiler object -- that way, they automatically apply to
        # all compiling and linking done here.
        if self.include_dirs is not None:
            self.compiler.set_include_dirs(self.include_dirs)
        if self.define is not None:
            # 'define' option is a list of (name,value) tuples
            for (name, value) in self.define:
                self.compiler.define_macro(name, value)
        if self.undef is not None:
            for macro in self.undef:
                self.compiler.undefine_macro(macro)
        if self.libraries is not None:
            self.compiler.set_libraries(self.libraries)
        if self.library_dirs is not None:
            self.compiler.set_library_dirs(self.library_dirs)
        if self.rpath is not None:
            self.compiler.set_runtime_library_dirs(self.rpath)
        if self.link_objects is not None:
            self.compiler.set_link_objects(self.link_objects)

        # Now actually compile and link everything.
        self.build_extensions()
Example #31
0
def mypycify(
        paths: List[str],
        *,
        only_compile_paths: Optional[Iterable[str]] = None,
        verbose: bool = False,
        opt_level: str = '3',
        strip_asserts: bool = False,
        multi_file: bool = False,
        separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False,
        skip_cgen_input: Optional[Any] = None,
        target_dir: Optional[str] = None,
        include_runtime_files: Optional[bool] = None) -> List['Extension']:
    """Main entry point to building using mypyc.

    This produces a list of Extension objects that should be passed as the
    ext_modules parameter to setup.

    Arguments:
        paths: A list of file paths to build. It may also contain mypy options.
        only_compile_paths: If not None, an iterable of paths that are to be
                            the only modules compiled, even if other modules
                            appear in the mypy command line given to paths.
                            (These modules must still be passed to paths.)

        verbose: Should mypyc be more verbose. Defaults to false.

        opt_level: The optimization level, as a string. Defaults to '3' (meaning '-O3').
        strip_asserts: Should asserts be stripped from the generated code.

        multi_file: Should each Python module be compiled into its own C source file.
                    This can reduce compile time and memory requirements at the likely
                    cost of runtime performance of compiled code. Defaults to false.
        separate: Should compiled modules be placed in separate extension modules.
                  If False, all modules are placed in a single shared library.
                  If True, every module is placed in its own library.
                  Otherwise separate should be a list of
                  (file name list, optional shared library name) pairs specifying
                  groups of files that should be placed in the same shared library
                  (while all other modules will be placed in its own library).

                  Each group can be compiled independently, which can
                  speed up compilation, but calls between groups can
                  be slower than calls within a group and can't be
                  inlined.
        target_dir: The directory to write C output files. Defaults to 'build'.
        include_runtime_files: If not None, whether the mypyc runtime library
                               should be directly #include'd instead of linked
                               separately in order to reduce compiler invocations.
                               Defaults to False in multi_file mode, True otherwise.
    """

    # Figure out our configuration
    compiler_options = CompilerOptions(
        strip_asserts=strip_asserts,
        multi_file=multi_file,
        verbose=verbose,
        separate=separate is not False,
        target_dir=target_dir,
        include_runtime_files=include_runtime_files,
    )

    # Generate all the actual important C code
    groups, group_cfilenames = mypyc_build(
        paths,
        only_compile_paths=only_compile_paths,
        compiler_options=compiler_options,
        separate=separate,
        skip_cgen_input=skip_cgen_input,
    )

    # Mess around with setuptools and actually get the thing built
    setup_mypycify_vars()

    # Create a compiler object so we can make decisions based on what
    # compiler is being used. typeshed is missing some attribues on the
    # compiler object so we give it type Any
    compiler = ccompiler.new_compiler()  # type: Any
    sysconfig.customize_compiler(compiler)

    build_dir = compiler_options.target_dir

    cflags = []  # type: List[str]
    if compiler.compiler_type == 'unix':
        cflags += [
            '-O{}'.format(opt_level),
            '-Werror',
            '-Wno-unused-function',
            '-Wno-unused-label',
            '-Wno-unreachable-code',
            '-Wno-unused-variable',
            '-Wno-unused-command-line-argument',
            '-Wno-unknown-warning-option',
        ]
        if 'gcc' in compiler.compiler[0]:
            # This flag is needed for gcc but does not exist on clang.
            cflags += ['-Wno-unused-but-set-variable']
    elif compiler.compiler_type == 'msvc':
        if opt_level == '3':
            opt_level = '2'
        cflags += [
            '/O{}'.format(opt_level),
            '/wd4102',  # unreferenced label
            '/wd4101',  # unreferenced local variable
            '/wd4146',  # negating unsigned int
        ]
        if multi_file:
            # Disable whole program optimization in multi-file mode so
            # that we actually get the compilation speed and memory
            # use wins that multi-file mode is intended for.
            cflags += [
                '/GL-',
                '/wd9025',  # warning about overriding /GL
            ]

    # If configured to (defaults to yes in multi-file mode), copy the
    # runtime library in. Otherwise it just gets #included to save on
    # compiler invocations.
    shared_cfilenames = []
    if not compiler_options.include_runtime_files:
        for name in RUNTIME_C_FILES:
            rt_file = os.path.join(build_dir, name)
            with open(os.path.join(include_dir(), name),
                      encoding='utf-8') as f:
                write_file(rt_file, f.read())
            shared_cfilenames.append(rt_file)

    extensions = []
    for (group_sources, lib_name), (cfilenames,
                                    deps) in zip(groups, group_cfilenames):
        if lib_name:
            extensions.extend(
                build_using_shared_lib(group_sources, lib_name,
                                       cfilenames + shared_cfilenames, deps,
                                       build_dir, cflags))
        else:
            extensions.extend(
                build_single_module(group_sources,
                                    cfilenames + shared_cfilenames, cflags))

    return extensions
Example #32
0
def setup_package():
    root = os.path.abspath(os.path.dirname(__file__))

    if len(sys.argv) > 1 and sys.argv[1] == 'clean':
        return clean(root)

    with chdir(root):
        with io.open(os.path.join(root, 'spacy', 'about.py'),
                     encoding='utf8') as f:
            about = {}
            exec(f.read(), about)

        with io.open(os.path.join(root, 'README.rst'), encoding='utf8') as f:
            readme = f.read()

        include_dirs = [
            get_python_inc(plat_specific=True),
            os.path.join(root, 'include')
        ]

        if (ccompiler.new_compiler().compiler_type == 'msvc'
                and msvccompiler.get_build_version() == 9):
            include_dirs.append(os.path.join(root, 'include', 'msvc9'))

        ext_modules = []
        for mod_name in MOD_NAMES:
            mod_path = mod_name.replace('.', '/') + '.cpp'
            extra_link_args = []
            # ???
            # Imported from patch from @mikepb
            # See Issue #267. Running blind here...
            if sys.platform == 'darwin':
                dylib_path = ['..' for _ in range(mod_name.count('.'))]
                dylib_path = '/'.join(dylib_path)
                dylib_path = '@loader_path/%s/spacy/platform/darwin/lib' % dylib_path
                extra_link_args.append('-Wl,-rpath,%s' % dylib_path)
            ext_modules.append(
                Extension(mod_name, [mod_path],
                          language='c++',
                          include_dirs=include_dirs,
                          extra_link_args=extra_link_args))

        if not is_source_release(root):
            generate_cython(root, 'spacy')

        setup(
            name=about['__title__'],
            zip_safe=False,
            packages=PACKAGES,
            package_data=PACKAGE_DATA,
            description=about['__summary__'],
            long_description=readme,
            author=about['__author__'],
            author_email=about['__email__'],
            version=about['__version__'],
            url=about['__uri__'],
            license=about['__license__'],
            ext_modules=ext_modules,
            scripts=['bin/spacy'],
            install_requires=[
                'numpy>=1.7', 'murmurhash>=0.28,<0.29', 'cymem>=1.30,<1.32',
                'preshed>=1.0.0,<2.0.0', 'thinc>=6.10.1,<6.11.0',
                'plac<1.0.0,>=0.9.6', 'pathlib', 'ujson>=1.35',
                'dill>=0.2,<0.3', 'regex==2017.4.5', 'requests>=2.13.0,<3.0.0'
            ],
            classifiers=[
                'Development Status :: 5 - Production/Stable',
                'Environment :: Console', 'Intended Audience :: Developers',
                'Intended Audience :: Science/Research',
                'License :: OSI Approved :: MIT License',
                'Operating System :: POSIX :: Linux',
                'Operating System :: MacOS :: MacOS X',
                'Operating System :: Microsoft :: Windows',
                'Programming Language :: Cython',
                'Programming Language :: Python :: 2',
                'Programming Language :: Python :: 2.7',
                'Programming Language :: Python :: 3',
                'Programming Language :: Python :: 3.4',
                'Programming Language :: Python :: 3.5',
                'Programming Language :: Python :: 3.6',
                'Topic :: Scientific/Engineering'
            ],
            cmdclass={'build_ext': build_ext_subclass},
        )
Example #33
0
        yield
    finally:
        if oldstdchannel is not None:
            os.dup2(oldstdchannel, stdchannel.fileno())
        if dest_file is not None:
            dest_file.close()


def add_dir_if_exists(filtered_dirs, *dirs):
    for d in dirs:
        if osp.exists(d):
            filtered_dirs.append(d)


compiler = ccompiler.new_compiler()

_extra_compile_args = ['-DMSDBLIB']

WINDOWS = False
SYSTEM = platform.system()

print("setup.py: platform.system() => %r" % SYSTEM)
print("setup.py: platform.architecture() => %r" % (platform.architecture(), ))
print("setup.py: platform.linux_distribution() => %r" %
      (platform.linux_distribution(), ))
print("setup.py: platform.libc_ver() => %r" % (platform.libc_ver(), ))

# 32 bit or 64 bit system?
BITNESS = struct.calcsize("P") * 8
Example #34
0
    def run(self):
        from distutils.ccompiler import new_compiler

        # 'self.extensions', as supplied by setup.py, is a list of
        # Extension instances.  See the documentation for Extension (in
        # distutils.extension) for details.
        #
        # For backwards compatibility with Distutils 0.8.2 and earlier, we
        # also allow the 'extensions' list to be a list of tuples:
        #    (ext_name, build_info)
        # where build_info is a dictionary containing everything that
        # Extension instances do except the name, with a few things being
        # differently named.  We convert these 2-tuples to Extension
        # instances as needed.

        if not self.extensions:
            return

        # If we were asked to build any C/C++ libraries, make sure that the
        # directory where we put them is in the library search path for
        # linking extensions.
        if self.distribution.has_c_libraries():
            build_clib = self.get_finalized_command('build_clib')
            self.libraries.extend(build_clib.get_library_names() or [])
            self.library_dirs.append(build_clib.build_clib)

        # Setup the CCompiler object that we'll use to do all the
        # compiling and linking
        self.compiler = new_compiler(compiler=self.compiler,
                                     verbose=self.verbose,
                                     dry_run=self.dry_run,
                                     force=self.force)
        customize_compiler(self.compiler)
        # If we are cross-compiling, init the compiler now (if we are not
        # cross-compiling, init would not hurt, but people may rely on
        # late initialization of compiler even if they shouldn't...)
        if os.name == 'nt' and self.plat_name != get_platform():
            self.compiler.initialize(self.plat_name)

        # And make sure that any compile/link-related options (which might
        # come from the command-line or from the setup script) are set in
        # that CCompiler object -- that way, they automatically apply to
        # all compiling and linking done here.
        if self.include_dirs is not None:
            self.compiler.set_include_dirs(self.include_dirs)
        if self.define is not None:
            # 'define' option is a list of (name,value) tuples
            for (name, value) in self.define:
                self.compiler.define_macro(name, value)
        if self.undef is not None:
            for macro in self.undef:
                self.compiler.undefine_macro(macro)
        if self.libraries is not None:
            self.compiler.set_libraries(self.libraries)
        if self.library_dirs is not None:
            self.compiler.set_library_dirs(self.library_dirs)
        if self.rpath is not None:
            self.compiler.set_runtime_library_dirs(self.rpath)
        if self.link_objects is not None:
            self.compiler.set_link_objects(self.link_objects)

        # Now actually compile and link everything.
        self.build_extensions()
Example #35
0
 def has_configure(self):
     compiler = new_compiler(compiler=self.compiler)
     return compiler.compiler_type != 'msvc'
Example #36
0
class CommandDependency(Dependency):
    exe_extension = _distutils_ccompiler.new_compiler().exe_extension

    def __init__(self,
                 command,
                 paths=None,
                 version_options=('--version', ),
                 stdin=None,
                 version_regexp=None,
                 version_stream='stdout',
                 **kwargs):
        if 'name' not in kwargs:
            kwargs['name'] = command
        super(CommandDependency, self).__init__(**kwargs)
        self.command = command
        self.paths = paths
        self.version_options = version_options
        self.stdin = None
        if not version_regexp:
            regexp = r'([\d][\d{0}]*[\d])'.format(self.version_delimiter)
            version_regexp = _re.compile(regexp)
        self.version_regexp = version_regexp
        self.version_stream = version_stream

    def _get_command_version_stream(self,
                                    command=None,
                                    stdin=None,
                                    expect=(0, )):
        if command is None:
            command = self.command + (self.exe_extension or '')
        if not stdin:
            stdin = self.stdin
        if stdin:
            popen_stdin = _subprocess.PIPE
        else:
            popen_stdin = None
        try:
            p = _subprocess.Popen([command] + list(self.version_options),
                                  stdin=popen_stdin,
                                  stdout=_subprocess.PIPE,
                                  stderr=_subprocess.PIPE,
                                  universal_newlines=True)
        except OSError as e:
            raise DependencyError(
                checker=self,
                message="could not find '{0}' executable".format(command),
            )  # from e
        stdout, stderr = p.communicate(stdin)
        status = p.wait()
        if status not in expect:
            lines = [
                "failed to execute: {0} {1}".format(
                    command, ' '.join(
                        _shlex.quote(arg) for arg in self.version_options)),
                'status: {0}'.format(status),
            ]
            for name, string in [('stdout', stdout), ('stderr', stderr)]:
                if string:
                    lines.extend([name + ':', string])
            raise DependencyError(checker=self, message='\n'.join(lines))
        for name, string in [('stdout', stdout), ('stderr', stderr)]:
            if name == self.version_stream:
                if not string:
                    raise DependencyError(
                        checker=self,
                        message='empty version stream on {0} for {1}'.format(
                            self.version_stream, command))
                return string
        raise NotImplementedError(self.version_stream)

    def _get_version_stream(self, **kwargs):
        paths = [self.command + (self.exe_extension or '')]
        if self.exe_extension:
            paths.append(self.command)  # also look at the extension-less path
        if self.paths:
            paths.extend(self.paths)
        or_errors = []
        for path in paths:
            try:
                return self._get_command_version_stream(command=path, **kwargs)
            except DependencyError as e:
                or_errors.append(e)
        raise DependencyError(checker=self,
                              message='errors finding {0} version'.format(
                                  self.full_name()),
                              causes=or_errors)

    def _get_version(self):
        version_stream = self._get_version_stream()
        match = self.version_regexp.search(version_stream)
        if not match:
            raise DependencyError(
                checker=self,
                message='no version string in output:\n{0}'.format(
                    version_stream))
        return match.group(1)
Example #37
0
 def setUp(self):
     self.compiler = ccompiler.new_compiler()
     self.settings = build.get_compiler_setting()
Example #38
0
def get_compilation_flags():
    import os
    import tempfile
    from distutils.ccompiler import new_compiler, CompileError, LinkError
    import shutil
    print("INFO: Checking for OpenMP support...")
    cc = new_compiler()
    if cc.compiler_type == "msvc":
        compile_flags = ["/openmp"]
        link_flags = []
    else:
        compile_flags = ["-fopenmp"]
        link_flags = ["-fopenmp"]

    # see http://openmp.org/wp/openmp-compilers/
    omp_test = \
    b"""
    #include <omp.h>\n
    #include <stdio.h>\n
    int main() {\n
    #pragma omp parallel\n
    printf("Hello from thread %d, nthreads %d\\n", omp_get_thread_num(), omp_get_num_threads());\n
    }\n
    """
    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    # Write test.c
    filename = 'test.c'
    with open(filename, 'wb') as fp:
        fp.write(omp_test)

    try:
        buf = StringIO()
        with std_redirector(buf):
            objects = cc.compile([filename],
                                 output_dir=tmpdir,
                                 extra_postargs=compile_flags)
            cc.link_executable(objects,
                               os.path.join(tmpdir, "a.out"),
                               extra_postargs=link_flags)
    except (CompileError, LinkError):
        print("WARNING: Could not find OpenMP, parallelism will "
              "not be available!")
        compile_flags = []
        link_flags = []
    else:
        print("INFO: OpenMP is present and will be used!")

    os.chdir(curdir)

    # Clean up
    shutil.rmtree(tmpdir)

    # Add generic flags
    if cc.compiler_type == "msvc":
        compile_flags += ["/wd4244", "/wd4267", "/wd4018", "/wd4996"]
    else:
        compile_flags += [
            "-Wno-maybe-uninitialized", "-Wno-unused-function", "-Wno-cpp",
            "-Wno-shorten-64-to-32", "-Wno-unneeded-internal-declaration"
        ]

    return compile_flags, link_flags
Example #39
0
def mypycify(paths: List[str],
             mypy_options: Optional[List[str]] = None,
             opt_level: str = '3',
             multi_file: bool = False,
             skip_cgen: bool = False,
             verbose: bool = False) -> List[MypycifyExtension]:
    """Main entry point to building using mypyc.

    This produces a list of Extension objects that should be passed as the
    ext_modules parameter to setup.

    Arguments:
      * paths: A list of file paths to build. It may contain globs.
      * mypy_options: Optionally, a list of command line flags to pass to mypy.
                      (This can also contain additional files, for compatibility reasons.)
      * opt_level: The optimization level, as a string. Defaults to '3' (meaning '-O3').
    """

    setup_mypycify_vars()

    # Create a compiler object so we can make decisions based on what
    # compiler is being used. typeshed is missing some attribues on the
    # compiler object so we give it type Any
    compiler = ccompiler.new_compiler()  # type: Any
    sysconfig.customize_compiler(compiler)

    expanded_paths = []
    for path in paths:
        expanded_paths.extend(glob.glob(path))

    build_dir = 'build'  # TODO: can this be overridden??
    try:
        os.mkdir(build_dir)
    except FileExistsError:
        pass

    sources, options = get_mypy_config(expanded_paths, mypy_options)
    # We generate a shared lib if there are multiple modules or if any
    # of the modules are in package. (Because I didn't want to fuss
    # around with making the single module code handle packages.)
    use_shared_lib = len(sources) > 1 or any('.' in x.module for x in sources)

    lib_name = shared_lib_name([source.module for source in sources
                                ]) if use_shared_lib else None

    # We let the test harness make us skip doing the full compilation
    # so that it can do a corner-cutting version without full stubs.
    # TODO: Be able to do this based on file mtimes?
    if not skip_cgen:
        cfiles, ops_text = generate_c(sources, options, multi_file, lib_name,
                                      verbose)
        # TODO: unique names?
        with open(os.path.join(build_dir, 'ops.txt'), 'w') as f:
            f.write(ops_text)
        cfilenames = []
        for cfile, ctext in cfiles:
            cfile = os.path.join(build_dir, cfile)
            with open(cfile, 'w', encoding='utf-8') as f:
                f.write(ctext)
            if os.path.splitext(cfile)[1] == '.c':
                cfilenames.append(cfile)
    else:
        cfilenames = glob.glob(os.path.join(build_dir, '*.c'))

    cflags = []  # type: List[str]
    if compiler.compiler_type == 'unix':
        cflags += [
            '-O{}'.format(opt_level), '-Werror', '-Wno-unused-function',
            '-Wno-unused-label', '-Wno-unreachable-code',
            '-Wno-unused-variable', '-Wno-trigraphs',
            '-Wno-unused-command-line-argument'
        ]
        if 'gcc' in compiler.compiler[0]:
            # This flag is needed for gcc but does not exist on clang.
            cflags += ['-Wno-unused-but-set-variable']
    elif compiler.compiler_type == 'msvc':
        if opt_level == '3':
            opt_level = '2'
        cflags += [
            '/O{}'.format(opt_level),
            '/wd4102',  # unreferenced label
            '/wd4101',  # unreferenced local variable
            '/wd4146',  # negating unsigned int
        ]
        if multi_file:
            # Disable whole program optimization in multi-file mode so
            # that we actually get the compilation speed and memory
            # use wins that multi-file mode is intended for.
            cflags += [
                '/GL-',
                '/wd9025',  # warning about overriding /GL
            ]

    # Copy the runtime library in
    rt_file = os.path.join(build_dir, 'CPy.c')
    shutil.copyfile(os.path.join(include_dir(), 'CPy.c'), rt_file)
    cfilenames.append(rt_file)

    if use_shared_lib:
        assert lib_name
        extensions = build_using_shared_lib(sources, lib_name, cfilenames,
                                            build_dir, cflags)
    else:
        extensions = build_single_module(sources, cfilenames, cflags)

    return extensions
Example #40
0
if use_cython and not has_cython:
    if is_dev:
        raise RuntimeError('Cython required to build dev version of cyksuid.')
    print('WARNING: Cython not installed.  Building without Cython.')
    use_cython = False


if use_cython:
    suffix = '.pyx'
else:
    suffix = '.c'


include_dirs = []
if ccompiler.new_compiler().compiler_type == 'msvc' and msvccompiler.get_build_version() == 9:
    root = os.path.abspath(os.path.dirname(__file__))
    include_dirs.append(os.path.join(root, 'include', 'msvc9'))


ext_modules = []
for modname in ['fast_base62', 'ksuid']:
    ext_modules.append(Extension('cyksuid.' + modname.replace('/', '.'),
                                 ['cyksuid/' + modname + suffix],
                                 include_dirs=include_dirs))


if use_cython:
    try:
        from Cython.Compiler.Options import get_directive_defaults
        directive_defaults = get_directive_defaults()
def check_openmp_support(openmp_flags=None):
    """
    Check whether OpenMP test code can be compiled and run.

    Parameters
    ----------
    openmp_flags : dict, optional
        This should be a dictionary with keys ``compiler_flags`` and
        ``linker_flags`` giving the compiliation and linking flags respectively.
        These are passed as `extra_postargs` to `compile()` and
        `link_executable()` respectively. If this is not set, the flags will
        be automatically determined using environment variables.

    Returns
    -------
    result : bool
        `True` if the test passed, `False` otherwise.
    """

    ccompiler = new_compiler()
    customize_compiler(ccompiler)

    if not openmp_flags:
        # customize_compiler() extracts info from os.environ. If certain keys
        # exist it uses these plus those from sysconfig.get_config_vars().
        # If the key is missing in os.environ it is not extracted from
        # sysconfig.get_config_var(). E.g. 'LDFLAGS' get left out, preventing
        # clang from finding libomp.dylib because -L<path> is not passed to
        # linker. Call get_openmp_flags() to get flags missed by
        # customize_compiler().
        openmp_flags = get_openmp_flags()

    compile_flags = openmp_flags.get('compiler_flags')
    link_flags = openmp_flags.get('linker_flags')

    tmp_dir = tempfile.mkdtemp()
    start_dir = os.path.abspath('.')

    try:
        os.chdir(tmp_dir)

        # Write test program
        with open('test_openmp.c', 'w') as f:
            f.write(CCODE)

        os.mkdir('objects')

        # Compile, test program
        ccompiler.compile(['test_openmp.c'],
                          output_dir='objects',
                          extra_postargs=compile_flags)

        # Link test program
        objects = glob.glob(
            os.path.join('objects', '*' + ccompiler.obj_extension))
        ccompiler.link_executable(objects,
                                  'test_openmp',
                                  extra_postargs=link_flags)

        # Run test program
        output = subprocess.check_output('./test_openmp')
        output = output.decode(sys.stdout.encoding or 'utf-8').splitlines()

        if 'nthreads=' in output[0]:
            nthreads = int(output[0].strip().split('=')[1])
            if len(output) == nthreads:
                is_openmp_supported = True
            else:
                log.warn(
                    "Unexpected number of lines from output of test OpenMP "
                    "program (output was {0})".format(output))
                is_openmp_supported = False
        else:
            log.warn("Unexpected output from test OpenMP "
                     "program (output was {0})".format(output))
            is_openmp_supported = False
    except (CompileError, LinkError, subprocess.CalledProcessError):
        is_openmp_supported = False

    finally:
        os.chdir(start_dir)

    return is_openmp_supported
Example #42
0
def setup_package():
    root = os.path.abspath(os.path.dirname(__file__))

    if len(sys.argv) > 1 and sys.argv[1] == "clean":
        return clean(root)

    with chdir(root):
        with io.open(os.path.join(root, "spacy", "about.py"), encoding="utf8") as f:
            about = {}
            exec(f.read(), about)

        with io.open(os.path.join(root, "README.md"), encoding="utf8") as f:
            readme = f.read()

        include_dirs = [
            get_python_inc(plat_specific=True),
            os.path.join(root, "include"),
        ]

        if (
            ccompiler.new_compiler().compiler_type == "msvc"
            and msvccompiler.get_build_version() == 9
        ):
            include_dirs.append(os.path.join(root, "include", "msvc9"))

        ext_modules = []
        for mod_name in MOD_NAMES:
            mod_path = mod_name.replace(".", "/") + ".cpp"
            extra_link_args = []
            extra_compile_args = []
            # ???
            # Imported from patch from @mikepb
            # See Issue #267. Running blind here...
            if sys.platform == "darwin":
                dylib_path = [".." for _ in range(mod_name.count("."))]
                dylib_path = "/".join(dylib_path)
                dylib_path = "@loader_path/%s/spacy/platform/darwin/lib" % dylib_path
                extra_link_args.append("-Wl,-rpath,%s" % dylib_path)
            ext_modules.append(
                Extension(
                    mod_name,
                    [mod_path],
                    language="c++",
                    include_dirs=include_dirs,
                    extra_link_args=extra_link_args,
                )
            )

        if not is_source_release(root):
            generate_cython(root, "spacy")

        setup(
            name="spacy",
            zip_safe=False,
            packages=PACKAGES,
            package_data=PACKAGE_DATA,
            description=about["__summary__"],
            long_description=readme,
            long_description_content_type="text/markdown",
            author=about["__author__"],
            author_email=about["__email__"],
            version=about["__version__"],
            url=about["__uri__"],
            license=about["__license__"],
            ext_modules=ext_modules,
            scripts=["bin/spacy"],
            install_requires=[
                "numpy>=1.15.0",
                "murmurhash>=0.28.0,<1.1.0",
                "cymem>=2.0.2,<2.1.0",
                "preshed>=2.0.1,<2.1.0",
                "thinc>=7.0.8,<7.1.0",
                "blis>=0.2.2,<0.3.0",
                "plac<1.0.0,>=0.9.6",
                "requests>=2.13.0,<3.0.0",
                "wasabi>=0.2.0,<1.1.0",
                "srsly>=0.0.6,<1.1.0",
                'pathlib==1.0.1; python_version < "3.4"',
            ],
            setup_requires=["wheel"],
            extras_require={
                "cuda": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy>=5.0.0b4"],
                "cuda80": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda80>=5.0.0b4"],
                "cuda90": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda90>=5.0.0b4"],
                "cuda91": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda91>=5.0.0b4"],
                "cuda92": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda92>=5.0.0b4"],
                "cuda100": ["thinc_gpu_ops>=0.0.1,<0.1.0", "cupy-cuda100>=5.0.0b4"],
                # Language tokenizers with external dependencies
                "ja": ["mecab-python3==0.7"],
                "ko": ["natto-py==0.9.0"],
            },
            python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
            classifiers=[
                "Development Status :: 5 - Production/Stable",
                "Environment :: Console",
                "Intended Audience :: Developers",
                "Intended Audience :: Science/Research",
                "License :: OSI Approved :: MIT License",
                "Operating System :: POSIX :: Linux",
                "Operating System :: MacOS :: MacOS X",
                "Operating System :: Microsoft :: Windows",
                "Programming Language :: Cython",
                "Programming Language :: Python :: 2",
                "Programming Language :: Python :: 2.7",
                "Programming Language :: Python :: 3",
                "Programming Language :: Python :: 3.4",
                "Programming Language :: Python :: 3.5",
                "Programming Language :: Python :: 3.6",
                "Programming Language :: Python :: 3.7",
                "Topic :: Scientific/Engineering",
            ],
            cmdclass={"build_ext": build_ext_subclass},
        )
Example #43
0
        "-DH5Tarray_create_vers=2",
        # "-DH5Tcommit_vers=2",
        "-DH5Tget_array_dims_vers=2",
        # "-DH5Topen_vers=2",
        "-DH5Z_class_t_vers=2",
    ])
    # H5Oget_info_by_name seems to have performance issues (see gh-402), so we
    # need to use teh deprecated H5Gget_objinfo function
    # CFLAGS.append("-DH5_NO_DEPRECATED_SYMBOLS")

    # Do not use numpy deprecated API
    # CFLAGS.append("-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION")

    # Try to locate the compulsory and optional libraries.
    lzo2_enabled = False
    compiler = new_compiler()
    for (package, location) in [(hdf5_package, HDF5_DIR),
                                (lzo2_package, LZO_DIR),
                                (lzo1_package, LZO_DIR),
                                (bzip2_package, BZIP2_DIR),
                                (blosc_package, BLOSC_DIR)]:

        if package.tag == 'LZO' and lzo2_enabled:
            print(
                "* Skipping detection of %s since %s has already been found." %
                (lzo1_package.name, lzo2_package.name))
            continue  # do not use LZO 1 if LZO 2 is available

        # if a package location is not specified, try to find it in conda env
        if not location and CONDA_PREFIX:
            location = CONDA_PREFIX
Example #44
0
def get_device_extensions_and_extras(library_dirs=None):
    library_dirs = [] if library_dirs is None else library_dirs

    cur_dir = os.path.dirname(os.path.realpath(__file__))
    include_dirs = []

    device_extras = dict()

    if os.path.isdir(os.path.join(cur_dir, "lib/shared")):
        # Device libs are packaged, so we are in release mode
        include_dirs.insert(
            0, os.path.realpath(os.path.join(cur_dir, "lib/shared/include")))
        library_dirs.insert(
            0, os.path.realpath(os.path.join(cur_dir, "lib/shared")))

    if sys.platform == "darwin":
        # On Mac OS X clang is by default not smart enough to search in the lib dir
        # see: https://github.com/jopohl/urh/issues/173
        if os.path.isdir("/opt/local/lib"):
            library_dirs.append("/opt/local/lib")
        library_dirs.append("/usr/local/lib")
        if os.path.isdir("/opt/local/include"):
            include_dirs.append("/opt/local/include")

    result = []

    # None = automatic (depending on lib is installed)
    # 1 = install extension always
    # 0 = Do not install extension
    build_device_extensions = defaultdict(lambda: None)

    for dev_name in DEVICES:
        with_option = "--with-" + dev_name
        without_option = "--without-" + dev_name

        if with_option in sys.argv and without_option in sys.argv:
            print("ambiguous options for " + dev_name)
            sys.exit(1)
        elif without_option in sys.argv:
            build_device_extensions[dev_name] = 0
            sys.argv.remove(without_option)
        elif with_option in sys.argv:
            build_device_extensions[dev_name] = 1
            sys.argv.remove(with_option)

    sys.path.append(os.path.realpath(os.path.join(cur_dir, "lib")))

    compiler = ccompiler.new_compiler()
    for dev_name, params in DEVICES.items():
        if build_device_extensions[dev_name] == 0:
            print("Skipping native {0} support".format(dev_name))
            continue

        if build_device_extensions[dev_name] == 1:
            print("Enforcing native {0} support".format(dev_name))
        elif compiler_has_function(compiler, params["test_function"],
                                   (params["lib"], ), library_dirs,
                                   include_dirs):
            print("Found {0} lib. Will compile with native {1} support".format(
                params["lib"], dev_name))
        else:
            print("Skipping native support for {0}".format(dev_name))
            continue

        device_extras.update(
            get_device_extras(compiler, dev_name, [params["lib"]],
                              library_dirs, include_dirs))
        if "api_version_check_code" in params:
            env_name = dev_name.upper() + "_API_VERSION"
            ver = os.getenv(env_name)
            if ver is not None:
                try:
                    ver = float(ver)
                except Exception as e:
                    print("    Could not convert content of {} to float: {}".
                          format(env_name, e))
                    print(
                        "    Will now try to automatically detect API version."
                    )
                    ver = None
            else:
                print(
                    "    Environment variable {} is unset, try to automatically detect API version"
                    .format(env_name))

            if ver is None:
                ver = check_api_version(compiler,
                                        params["api_version_check_code"],
                                        (params["lib"], ), library_dirs,
                                        include_dirs)
            device_extras[env_name] = ver
            print("    Using {}={}".format(env_name, ver))

        extension = get_device_extension(dev_name, [params["lib"]],
                                         library_dirs, include_dirs)
        result.append(extension)

    return result, device_extras
Example #45
0
def pre_build_check():
    """
    Try to verify build tools
    """
    if os.environ.get('CASS_DRIVER_NO_PRE_BUILD_CHECK'):
        return True

    try:
        from distutils.ccompiler import new_compiler
        from distutils.sysconfig import customize_compiler
        from distutils.dist import Distribution

        # base build_ext just to emulate compiler option setup
        be = build_ext(Distribution())
        be.initialize_options()
        be.finalize_options()

        # First, make sure we have a Python include directory
        have_python_include = any(
            os.path.isfile(os.path.join(p, 'Python.h'))
            for p in be.include_dirs)
        if not have_python_include:
            sys.stderr.write("Did not find 'Python.h' in %s.\n" %
                             (be.include_dirs, ))
            return False

        compiler = new_compiler(compiler=be.compiler)
        customize_compiler(compiler)

        try:
            # We must be able to initialize the compiler if it has that method
            if hasattr(compiler, "initialize"):
                compiler.initialize()
        except:
            return False

        executables = []
        if compiler.compiler_type in ('unix', 'cygwin'):
            executables = [
                compiler.executables[exe][0]
                for exe in ('compiler_so', 'linker_so')
            ]
        elif compiler.compiler_type == 'nt':
            executables = [getattr(compiler, exe) for exe in ('cc', 'linker')]

        if executables:
            from distutils.spawn import find_executable
            for exe in executables:
                if not find_executable(exe):
                    sys.stderr.write(
                        "Failed to find %s for compiler type %s.\n" %
                        (exe, compiler.compiler_type))
                    return False

    except Exception as exc:
        sys.stderr.write('%s\n' % str(exc))
        sys.stderr.write("Failed pre-build check. Attempting anyway.\n")

    # if we are unable to positively id the compiler type, or one of these assumptions fails,
    # just proceed as we would have without the check
    return True
Example #46
0
    def run(self):
        if not self.libraries:
            return

        # Make sure that library sources are complete.
        languages = []

        # Make sure that extension sources are complete.
        self.run_command('build_src')

        for (lib_name, build_info) in self.libraries:
            l = build_info.get('language', None)
            if l and l not in languages:
                languages.append(l)

        from distutils.ccompiler import new_compiler
        self.compiler = new_compiler(compiler=self.compiler,
                                     dry_run=self.dry_run,
                                     force=self.force)
        self.compiler.customize(self.distribution,
                                need_cxx=self.have_cxx_sources())

        if self.warn_error:
            self.compiler.compiler.append('-Werror')
            self.compiler.compiler_so.append('-Werror')

        libraries = self.libraries
        self.libraries = None
        self.compiler.customize_cmd(self)
        self.libraries = libraries

        self.compiler.show_customization()

        if not self.disable_optimization:
            dispatch_hpath = os.path.join("numpy", "distutils", "include",
                                          "npy_cpu_dispatch_config.h")
            dispatch_hpath = os.path.join(
                self.get_finalized_command("build_src").build_src,
                dispatch_hpath)
            opt_cache_path = os.path.abspath(
                os.path.join(self.build_temp, 'ccompiler_opt_cache_clib.py'))
            if hasattr(self, "compiler_opt"):
                # By default `CCompilerOpt` update the cache at the exit of
                # the process, which may lead to duplicate building
                # (see build_extension()/force_rebuild) if run() called
                # multiple times within the same os process/thread without
                # giving the chance the previous instances of `CCompilerOpt`
                # to update the cache.
                self.compiler_opt.cache_flush()

            self.compiler_opt = new_ccompiler_opt(
                compiler=self.compiler,
                dispatch_hpath=dispatch_hpath,
                cpu_baseline=self.cpu_baseline,
                cpu_dispatch=self.cpu_dispatch,
                cache_path=opt_cache_path)

            def report(copt):
                log.info(
                    "\n########### CLIB COMPILER OPTIMIZATION ###########")
                log.info(copt.report(full=True))

            import atexit
            atexit.register(report, self.compiler_opt)

        if self.have_f_sources():
            from numpy.distutils.fcompiler import new_fcompiler
            self._f_compiler = new_fcompiler(compiler=self.fcompiler,
                                             verbose=self.verbose,
                                             dry_run=self.dry_run,
                                             force=self.force,
                                             requiref90='f90' in languages,
                                             c_compiler=self.compiler)
            if self._f_compiler is not None:
                self._f_compiler.customize(self.distribution)

                libraries = self.libraries
                self.libraries = None
                self._f_compiler.customize_cmd(self)
                self.libraries = libraries

                self._f_compiler.show_customization()
        else:
            self._f_compiler = None

        self.build_libraries(self.libraries)

        if self.inplace:
            for l in self.distribution.installed_libraries:
                libname = self.compiler.library_filename(l.name)
                source = os.path.join(self.build_clib, libname)
                target = os.path.join(l.target_dir, libname)
                self.mkpath(l.target_dir)
                shutil.copy(source, target)
Example #47
0
    def bundle_libzmq_extension(self):
        bundledir = "bundled"
        ext_modules = self.distribution.ext_modules
        if ext_modules and any(m.name == 'zmq.libzmq' for m in ext_modules):
            # I've already been run
            return

        line()
        info("Using bundled libzmq")

        # fetch sources for libzmq extension:
        if not os.path.exists(bundledir):
            os.makedirs(bundledir)

        fetch_libzmq(bundledir)

        stage_platform_hpp(pjoin(bundledir, 'zeromq'))

        # construct the Extensions:
        libzmq = Extension(
            'zmq.libzmq',
            sources=[pjoin('buildutils', 'initlibzmq.c')] +
            glob(pjoin(bundledir, 'zeromq', 'src', '*.cpp')),
            include_dirs=[
                pjoin(bundledir, 'zeromq', 'include'),
            ],
        )

        # register the extension:
        self.distribution.ext_modules.insert(0, libzmq)

        # select polling subsystem based on platform
        if sys.platform == 'darwin' or 'bsd' in sys.platform:
            libzmq.define_macros.append(('ZMQ_USE_KQUEUE', 1))
        elif 'linux' in sys.platform:
            libzmq.define_macros.append(('ZMQ_USE_EPOLL', 1))
        elif sys.platform.startswith('win'):
            libzmq.define_macros.append(('ZMQ_USE_SELECT', 1))
        else:
            # this may not be sufficiently precise
            libzmq.define_macros.append(('ZMQ_USE_POLL', 1))

        if sys.platform.startswith('win'):
            # include defines from zeromq msvc project:
            libzmq.define_macros.append(('FD_SETSIZE', 1024))
            libzmq.define_macros.append(('DLL_EXPORT', 1))
            libzmq.define_macros.append(('_CRT_SECURE_NO_WARNINGS', 1))

            # When compiling the C++ code inside of libzmq itself, we want to
            # avoid "warning C4530: C++ exception handler used, but unwind
            # semantics are not enabled. Specify /EHsc".
            if self.compiler_type == 'msvc':
                libzmq.extra_compile_args.append('/EHsc')
            elif self.compiler_type == 'mingw32':
                libzmq.define_macros.append(('ZMQ_HAVE_MINGW32', 1))

            # And things like sockets come from libraries that must be named.
            libzmq.libraries.extend(['rpcrt4', 'ws2_32', 'advapi32'])

            # link against libsodium in build dir:
            suffix = ''
            if sys.version_info >= (3, 5):
                # Python 3.5 adds EXT_SUFFIX to libs
                ext_suffix = distutils.sysconfig.get_config_var('EXT_SUFFIX')
                suffix = os.path.splitext(ext_suffix)[0]
            if self.debug:
                suffix = '_d' + suffix
            libzmq.libraries.append('libsodium' + suffix)
            libzmq.library_dirs.append(pjoin(self.build_temp, 'buildutils'))

        else:
            libzmq.include_dirs.append(bundledir)

            # check if we need to link against Realtime Extensions library
            cc = new_compiler(compiler=self.compiler_type)
            cc.output_dir = self.build_temp
            if not sys.platform.startswith(('darwin', 'freebsd')):
                line()
                info("checking for timer_create")
                if not cc.has_function('timer_create'):
                    info("no timer_create, linking librt")
                    libzmq.libraries.append('rt')
                else:
                    info("ok")

                if pypy:
                    # seem to need explicit libstdc++ on linux + pypy
                    # not sure why
                    libzmq.libraries.append("stdc++")

        # Also bundle libsodium, even on Windows.
        self.bundle_libsodium_extension(libzmq)

        # update other extensions, with bundled settings
        self.config['libzmq_extension'] = True
        self.init_settings_from_config()
        self.save_config('config', self.config)
Example #48
0
def get_device_extensions(use_cython: bool, library_dirs=None):
    library_dirs = [] if library_dirs is None else library_dirs

    cur_dir = os.path.dirname(os.path.realpath(__file__))
    include_dirs = [os.path.realpath(os.path.join(cur_dir, "includes"))]

    if sys.platform == "win32":
        if platform.architecture()[0] != "64bit":
            return [
            ]  # only 64 bit python supported for native device backends

        result = []
        lib_dir = os.path.realpath(os.path.join(cur_dir, "lib/win"))
        for dev_name, params in DEVICES.items():
            result.append(
                get_device_extension(dev_name, [params["lib"]], [lib_dir],
                                     include_dirs))

        return result

    if sys.platform == "darwin":
        # On Mac OS X clang is by default not smart enough to search in the lib dir
        # see: https://github.com/jopohl/urh/issues/173
        library_dirs.append("/usr/local/lib")

    result = []

    # None = automatic (depending on lib is installed)
    # 1 = install extension always
    # 0 = Do not install extension
    build_device_extensions = defaultdict(lambda: None)

    for dev_name in DEVICES:
        with_option = "--with-" + dev_name
        without_option = "--without-" + dev_name

        if with_option in sys.argv and without_option in sys.argv:
            print("ambiguous options for " + dev_name)
            sys.exit(1)
        elif without_option in sys.argv:
            build_device_extensions[dev_name] = 0
            sys.argv.remove(without_option)
        elif with_option in sys.argv:
            build_device_extensions[dev_name] = 1
            sys.argv.remove(with_option)

    sys.path.append(os.path.realpath(os.path.join(cur_dir, "lib")))

    compiler = ccompiler.new_compiler()
    for dev_name, params in DEVICES.items():
        if build_device_extensions[dev_name] == 0:
            print("\nSkipping native {0} support\n".format(dev_name))
            continue
        if build_device_extensions[dev_name] == 1:
            print("\nEnforcing native {0} support\n".format(dev_name))
            result.append(
                get_device_extension(dev_name, [params["lib"]], library_dirs,
                                     include_dirs, use_cython))
            continue

        if compiler.has_function(params["test_function"],
                                 libraries=(params["lib"], ),
                                 library_dirs=library_dirs,
                                 include_dirs=include_dirs):
            print("\nFound {0} lib. Will compile with native {1} support\n".
                  format(params["lib"], dev_name))
            result.append(
                get_device_extension(dev_name, [params["lib"]], library_dirs,
                                     include_dirs, use_cython))
        elif dev_name in FALLBACKS:
            print("Trying fallback for {0}".format(dev_name))
            params = FALLBACKS[dev_name]
            dev_name += "_fallback"
            if compiler.has_function(params["test_function"],
                                     libraries=(params["lib"], ),
                                     library_dirs=library_dirs,
                                     include_dirs=include_dirs):
                print(
                    "\nFound fallback. Will compile with native {0} support\n".
                    format(dev_name))
                result.append(
                    get_device_extension(dev_name, [params["lib"]],
                                         library_dirs, include_dirs,
                                         use_cython))
        else:
            print("\nSkipping native support for {1}\n".format(
                params["lib"], dev_name))

        # remove Temp file for checking
        try:
            os.remove("a.out")
        except OSError:
            pass

        for filename in os.listdir(tempfile.gettempdir()):
            dev_name = dev_name.replace("_fallback", "")
            func_names = [DEVICES[dev_name]["test_function"]]
            if dev_name in FALLBACKS:
                func_names.append(FALLBACKS[dev_name]["test_function"])

            if any(filename.startswith(func_name)
                   for func_name in func_names) and filename.endswith(".c"):
                os.remove(os.path.join(tempfile.gettempdir(), filename))

    return result
Example #49
0
    def do_custom_build(self):
        # We're using a system freetype
        if options.get('system_freetype'):
            return

        src_path = Path('build', f'freetype-{LOCAL_FREETYPE_VERSION}')

        # We've already built freetype
        if sys.platform == 'win32':
            libfreetype = 'libfreetype.lib'
        else:
            libfreetype = 'libfreetype.a'

        # bailing because it is already built
        if (src_path / 'objs' / '.libs' / libfreetype).is_file():
            return

        # do we need to download / load the source from cache?
        if not src_path.exists():
            os.makedirs('build', exist_ok=True)

            tarball = f'freetype-{LOCAL_FREETYPE_VERSION}.tar.gz'
            target_urls = [
                (f'https://downloads.sourceforge.net/project/freetype'
                 f'/freetype2/{LOCAL_FREETYPE_VERSION}/{tarball}'),
                (f'https://download.savannah.gnu.org/releases/freetype'
                 f'/{tarball}')
            ]

            for tarball_url in target_urls:
                try:
                    tar_contents = download_or_cache(tarball_url,
                                                     LOCAL_FREETYPE_HASH)
                    break
                except Exception:
                    pass
            else:
                raise IOError(
                    f"Failed to download FreeType. Please download one of "
                    f"{target_urls} and extract it into {src_path} at the "
                    f"top-level of the source repository.")

            print(f"Extracting {tarball}")
            with tarfile.open(fileobj=tar_contents, mode="r:gz") as tgz:
                tgz.extractall("build")

        print(f"Building freetype in {src_path}")
        if sys.platform != 'win32':  # compilation on non-windows
            env = {
                **os.environ, "CFLAGS":
                "{} -fPIC".format(os.environ.get("CFLAGS", ""))
            }
            subprocess.check_call([
                "./configure", "--with-zlib=no", "--with-bzip2=no",
                "--with-png=no", "--with-harfbuzz=no"
            ],
                                  env=env,
                                  cwd=src_path)
            subprocess.check_call(["make"], env=env, cwd=src_path)
        else:  # compilation on windows
            shutil.rmtree(src_path / "objs", ignore_errors=True)
            msbuild_platform = ('x64' if platform.architecture()[0] == '64bit'
                                else 'Win32')
            base_path = Path("build/freetype-2.6.1/builds/windows")
            vc = 'vc2010'
            sln_path = (base_path / vc / "freetype.sln")
            # https://developercommunity.visualstudio.com/comments/190992/view.html
            (sln_path.parent / "Directory.Build.props").write_text("""
<Project>
 <PropertyGroup>
  <!-- The following line *cannot* be split over multiple lines. -->
  <WindowsTargetPlatformVersion>$([Microsoft.Build.Utilities.ToolLocationHelper]::GetLatestSDKTargetPlatformVersion('Windows', '10.0'))</WindowsTargetPlatformVersion>
 </PropertyGroup>
</Project>
""")
            # It is not a trivial task to determine PlatformToolset to plug it
            # into msbuild command, and Directory.Build.props will not override
            # the value in the project file.
            # The DefaultPlatformToolset is from Microsoft.Cpp.Default.props
            with open(base_path / vc / "freetype.vcxproj", 'r+b') as f:
                toolset_repl = b'PlatformToolset>$(DefaultPlatformToolset)<'
                vcxproj = f.read().replace(b'PlatformToolset>v100<',
                                           toolset_repl)
                assert toolset_repl in vcxproj, (
                    'Upgrading Freetype might break this')
                f.seek(0)
                f.truncate()
                f.write(vcxproj)

            cc = ccompiler.new_compiler()
            cc.initialize()  # Get msbuild in the %PATH% of cc.spawn.
            cc.spawn([
                "msbuild",
                str(sln_path), "/t:Clean;Build",
                f"/p:Configuration=Release;Platform={msbuild_platform}"
            ])
            # Move to the corresponding Unix build path.
            (src_path / "objs" / ".libs").mkdir()
            # Be robust against change of FreeType version.
            lib_path, = (src_path / "objs" / vc /
                         msbuild_platform).glob("freetype*.lib")
            shutil.copy2(lib_path, src_path / "objs/.libs/libfreetype.lib")
Example #50
0
    def finalize_options(self):
        build.finalize_options(self)

        # Check that build options are available
        compiler = ccompiler.new_compiler(compiler=self.compiler, force=True)
        sysconfig.customize_compiler(compiler)

        if self.cpp11:
            if compiler.compiler_type == 'msvc':
                self.cpp11 = sys.version_info[:2] >= (3, 5)
            else:
                self.cpp1 = check_compile_flag(compiler,
                                               '-std=c++11',
                                               extension='.cc')
            if not self.cpp11:
                logger.warning("C++11 disabled: not available")

        if self.sse2:
            if compiler.compiler_type == 'msvc':
                self.sse2 = sys.version_info[0] >= 3
            else:
                self.sse2 = check_compile_flag(compiler, '-msse2')
            if not self.sse2:
                logger.warning("SSE2 disabled: not available")

        if self.avx2:
            if compiler.compiler_type == 'msvc':
                self.avx2 = sys.version_info[:2] >= (3, 5)
            else:
                self.avx2 = check_compile_flag(compiler, '-mavx2')
            if not self.avx2:
                logger.warning("AVX2 disabled: not available")

        if self.openmp:
            prefix = '/' if compiler.compiler_type == 'msvc' else '-f'
            self.openmp = check_compile_flag(compiler, prefix + 'openmp')
            if not self.openmp:
                logger.warning("OpenMP disabled: not available")

        if self.native:
            is_cpu_sse2, is_cpu_avx2 = get_cpu_sse2_avx2()
            self.sse2 = self.sse2 and is_cpu_sse2
            self.avx2 = self.avx2 and is_cpu_avx2

        logger.info("Building with C++11: %r", bool(self.cpp11))
        logger.info('Building with native option: %r', bool(self.native))
        logger.info("Building with SSE2: %r", bool(self.sse2))
        logger.info("Building with AVX2: %r", bool(self.avx2))
        logger.info("Building with OpenMP: %r", bool(self.openmp))

        # Filter out C++11 libraries if cpp11 option is False
        self.distribution.libraries = [
            (name, info) for name, info in self.distribution.libraries
            if self.cpp11 or '-std=c++11' not in info.get('cflags', [])
        ]

        # Filter out C++11-only extensions if cpp11 option is False
        self.distribution.ext_modules = [
            ext for ext in self.distribution.ext_modules if self.cpp11 or not (
                isinstance(ext, HDF5PluginExtension) and ext.cpp11_required)
        ]
Example #51
0
    def do_custom_build(self, env):
        # We're using a system freetype
        if options.get('system_freetype'):
            return

        tarball = f'freetype-{LOCAL_FREETYPE_VERSION}.tar.gz'
        src_path = get_and_extract_tarball(
            urls=[(f'https://downloads.sourceforge.net/project/freetype'
                   f'/freetype2/{LOCAL_FREETYPE_VERSION}/{tarball}'),
                  (f'https://download.savannah.gnu.org/releases/freetype'
                   f'/{tarball}')],
            sha=LOCAL_FREETYPE_HASH,
            dirname=f'freetype-{LOCAL_FREETYPE_VERSION}',
        )

        if sys.platform == 'win32':
            libfreetype = 'libfreetype.lib'
        else:
            libfreetype = 'libfreetype.a'
        if (src_path / 'objs' / '.libs' / libfreetype).is_file():
            return  # Bail out because we have already built FreeType.

        print(f"Building freetype in {src_path}")
        if sys.platform != 'win32':  # compilation on non-windows
            env = {**env, "CFLAGS": "{} -fPIC".format(env.get("CFLAGS", ""))}
            subprocess.check_call([
                "./configure", "--with-zlib=no", "--with-bzip2=no",
                "--with-png=no", "--with-harfbuzz=no", "--enable-static",
                "--disable-shared"
            ],
                                  env=env,
                                  cwd=src_path)
            if 'GNUMAKE' in env:
                make = env['GNUMAKE']
            elif 'MAKE' in env:
                make = env['MAKE']
            else:
                try:
                    output = subprocess.check_output(['make', '-v'],
                                                     stderr=subprocess.DEVNULL)
                except subprocess.CalledProcessError:
                    output = b''
                if b'GNU' not in output and b'makepp' not in output:
                    make = 'gmake'
                else:
                    make = 'make'
            subprocess.check_call([make], env=env, cwd=src_path)
        else:  # compilation on windows
            shutil.rmtree(src_path / "objs", ignore_errors=True)
            msbuild_platform = ('x64' if platform.architecture()[0] == '64bit'
                                else 'Win32')
            base_path = Path("build/freetype-2.6.1/builds/windows")
            vc = 'vc2010'
            sln_path = (base_path / vc / "freetype.sln")
            # https://developercommunity.visualstudio.com/comments/190992/view.html
            (sln_path.parent / "Directory.Build.props").write_text("""
<Project>
 <PropertyGroup>
  <!-- The following line *cannot* be split over multiple lines. -->
  <WindowsTargetPlatformVersion>$([Microsoft.Build.Utilities.ToolLocationHelper]::GetLatestSDKTargetPlatformVersion('Windows', '10.0'))</WindowsTargetPlatformVersion>
 </PropertyGroup>
</Project>
""")
            # It is not a trivial task to determine PlatformToolset to plug it
            # into msbuild command, and Directory.Build.props will not override
            # the value in the project file.
            # The DefaultPlatformToolset is from Microsoft.Cpp.Default.props
            with open(base_path / vc / "freetype.vcxproj", 'r+b') as f:
                toolset_repl = b'PlatformToolset>$(DefaultPlatformToolset)<'
                vcxproj = f.read().replace(b'PlatformToolset>v100<',
                                           toolset_repl)
                assert toolset_repl in vcxproj, (
                    'Upgrading Freetype might break this')
                f.seek(0)
                f.truncate()
                f.write(vcxproj)

            cc = ccompiler.new_compiler()
            cc.initialize()  # Get msbuild in the %PATH% of cc.spawn.
            cc.spawn([
                "msbuild",
                str(sln_path), "/t:Clean;Build",
                f"/p:Configuration=Release;Platform={msbuild_platform}"
            ])
            # Move to the corresponding Unix build path.
            (src_path / "objs" / ".libs").mkdir()
            # Be robust against change of FreeType version.
            lib_path, = (src_path / "objs" / vc /
                         msbuild_platform).glob("freetype*.lib")
            shutil.copy2(lib_path, src_path / "objs/.libs/libfreetype.lib")
Example #52
0
    def run(self):
        if not self.extensions:
            return

        # Make sure that extension sources are complete.
        self.run_command('build_src')

        if self.distribution.has_c_libraries():
            if self.inplace:
                if self.distribution.have_run.get('build_clib'):
                    log.warn('build_clib already run, it is too late to '
                             'ensure in-place build of build_clib')
                    build_clib = self.distribution.get_command_obj(
                        'build_clib')
                else:
                    build_clib = self.distribution.get_command_obj(
                        'build_clib')
                    build_clib.inplace = 1
                    build_clib.ensure_finalized()
                    build_clib.run()
                    self.distribution.have_run['build_clib'] = 1

            else:
                self.run_command('build_clib')
                build_clib = self.get_finalized_command('build_clib')
            self.library_dirs.append(build_clib.build_clib)
        else:
            build_clib = None

        # Not including C libraries to the list of
        # extension libraries automatically to prevent
        # bogus linking commands. Extensions must
        # explicitly specify the C libraries that they use.

        from distutils.ccompiler import new_compiler
        from numpy.distutils.fcompiler import new_fcompiler

        compiler_type = self.compiler
        # Initialize C compiler:
        self.compiler = new_compiler(compiler=compiler_type,
                                     verbose=self.verbose,
                                     dry_run=self.dry_run,
                                     force=self.force)
        self.compiler.customize(self.distribution)
        self.compiler.customize_cmd(self)

        if self.warn_error:
            self.compiler.compiler.append('-Werror')
            self.compiler.compiler_so.append('-Werror')

        self.compiler.show_customization()

        if not self.disable_optimization:
            dispatch_hpath = os.path.join("numpy", "distutils", "include",
                                          "npy_cpu_dispatch_config.h")
            dispatch_hpath = os.path.join(
                self.get_finalized_command("build_src").build_src,
                dispatch_hpath)
            opt_cache_path = os.path.abspath(
                os.path.join(self.build_temp, 'ccompiler_opt_cache_ext.py'))
            if hasattr(self, "compiler_opt"):
                # By default `CCompilerOpt` update the cache at the exit of
                # the process, which may lead to duplicate building
                # (see build_extension()/force_rebuild) if run() called
                # multiple times within the same os process/thread without
                # giving the chance the previous instances of `CCompilerOpt`
                # to update the cache.
                self.compiler_opt.cache_flush()

            self.compiler_opt = new_ccompiler_opt(
                compiler=self.compiler,
                dispatch_hpath=dispatch_hpath,
                cpu_baseline=self.cpu_baseline,
                cpu_dispatch=self.cpu_dispatch,
                cache_path=opt_cache_path)

            def report(copt):
                log.info("\n########### EXT COMPILER OPTIMIZATION ###########")
                log.info(copt.report(full=True))

            import atexit
            atexit.register(report, self.compiler_opt)

        # Setup directory for storing generated extra DLL files on Windows
        self.extra_dll_dir = os.path.join(self.build_temp, '.libs')
        if not os.path.isdir(self.extra_dll_dir):
            os.makedirs(self.extra_dll_dir)

        # Create mapping of libraries built by build_clib:
        clibs = {}
        if build_clib is not None:
            for libname, build_info in build_clib.libraries or []:
                if libname in clibs and clibs[libname] != build_info:
                    log.warn('library %r defined more than once,'
                             ' overwriting build_info\n%s... \nwith\n%s...' %
                             (libname, repr(clibs[libname])[:300],
                              repr(build_info)[:300]))
                clibs[libname] = build_info
        # .. and distribution libraries:
        for libname, build_info in self.distribution.libraries or []:
            if libname in clibs:
                # build_clib libraries have a precedence before distribution ones
                continue
            clibs[libname] = build_info

        # Determine if C++/Fortran 77/Fortran 90 compilers are needed.
        # Update extension libraries, library_dirs, and macros.
        all_languages = set()
        for ext in self.extensions:
            ext_languages = set()
            c_libs = []
            c_lib_dirs = []
            macros = []
            for libname in ext.libraries:
                if libname in clibs:
                    binfo = clibs[libname]
                    c_libs += binfo.get('libraries', [])
                    c_lib_dirs += binfo.get('library_dirs', [])
                    for m in binfo.get('macros', []):
                        if m not in macros:
                            macros.append(m)

                for l in clibs.get(libname, {}).get('source_languages', []):
                    ext_languages.add(l)
            if c_libs:
                new_c_libs = ext.libraries + c_libs
                log.info('updating extension %r libraries from %r to %r' %
                         (ext.name, ext.libraries, new_c_libs))
                ext.libraries = new_c_libs
                ext.library_dirs = ext.library_dirs + c_lib_dirs
            if macros:
                log.info('extending extension %r defined_macros with %r' %
                         (ext.name, macros))
                ext.define_macros = ext.define_macros + macros

            # determine extension languages
            if has_f_sources(ext.sources):
                ext_languages.add('f77')
            if has_cxx_sources(ext.sources):
                ext_languages.add('c++')
            l = ext.language or self.compiler.detect_language(ext.sources)
            if l:
                ext_languages.add(l)
            # reset language attribute for choosing proper linker
            if 'c++' in ext_languages:
                ext_language = 'c++'
            elif 'f90' in ext_languages:
                ext_language = 'f90'
            elif 'f77' in ext_languages:
                ext_language = 'f77'
            else:
                ext_language = 'c'  # default
            if l and l != ext_language and ext.language:
                log.warn('resetting extension %r language from %r to %r.' %
                         (ext.name, l, ext_language))
            ext.language = ext_language
            # global language
            all_languages.update(ext_languages)

        need_f90_compiler = 'f90' in all_languages
        need_f77_compiler = 'f77' in all_languages
        need_cxx_compiler = 'c++' in all_languages

        # Initialize C++ compiler:
        if need_cxx_compiler:
            self._cxx_compiler = new_compiler(compiler=compiler_type,
                                              verbose=self.verbose,
                                              dry_run=self.dry_run,
                                              force=self.force)
            compiler = self._cxx_compiler
            compiler.customize(self.distribution, need_cxx=need_cxx_compiler)
            compiler.customize_cmd(self)
            compiler.show_customization()
            self._cxx_compiler = compiler.cxx_compiler()
        else:
            self._cxx_compiler = None

        # Initialize Fortran 77 compiler:
        if need_f77_compiler:
            ctype = self.fcompiler
            self._f77_compiler = new_fcompiler(compiler=self.fcompiler,
                                               verbose=self.verbose,
                                               dry_run=self.dry_run,
                                               force=self.force,
                                               requiref90=False,
                                               c_compiler=self.compiler)
            fcompiler = self._f77_compiler
            if fcompiler:
                ctype = fcompiler.compiler_type
                fcompiler.customize(self.distribution)
            if fcompiler and fcompiler.get_version():
                fcompiler.customize_cmd(self)
                fcompiler.show_customization()
            else:
                self.warn('f77_compiler=%s is not available.' % (ctype))
                self._f77_compiler = None
        else:
            self._f77_compiler = None

        # Initialize Fortran 90 compiler:
        if need_f90_compiler:
            ctype = self.fcompiler
            self._f90_compiler = new_fcompiler(compiler=self.fcompiler,
                                               verbose=self.verbose,
                                               dry_run=self.dry_run,
                                               force=self.force,
                                               requiref90=True,
                                               c_compiler=self.compiler)
            fcompiler = self._f90_compiler
            if fcompiler:
                ctype = fcompiler.compiler_type
                fcompiler.customize(self.distribution)
            if fcompiler and fcompiler.get_version():
                fcompiler.customize_cmd(self)
                fcompiler.show_customization()
            else:
                self.warn('f90_compiler=%s is not available.' % (ctype))
                self._f90_compiler = None
        else:
            self._f90_compiler = None

        # Build extensions
        self.build_extensions()

        # Copy over any extra DLL files
        # FIXME: In the case where there are more than two packages,
        # we blindly assume that both packages need all of the libraries,
        # resulting in a larger wheel than is required. This should be fixed,
        # but it's so rare that I won't bother to handle it.
        pkg_roots = {
            self.get_ext_fullname(ext.name).split('.')[0]
            for ext in self.extensions
        }
        for pkg_root in pkg_roots:
            shared_lib_dir = os.path.join(pkg_root, '.libs')
            if not self.inplace:
                shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir)
            for fn in os.listdir(self.extra_dll_dir):
                if not os.path.isdir(shared_lib_dir):
                    os.makedirs(shared_lib_dir)
                if not fn.lower().endswith('.dll'):
                    continue
                runtime_lib = os.path.join(self.extra_dll_dir, fn)
                copy_file(runtime_lib, shared_lib_dir)
Example #53
0
    def bundle_libqpid_proton_extension(self):
        """The proper version of libqpid-proton is not present on the system,
        so attempt to retrieve the proper libqpid-proton sources and
        include them in the extension.
        """
        setup_path = os.path.dirname(os.path.realpath(__file__))
        base = self.get_finalized_command('build').build_base
        build_include = os.path.join(base, 'include')

        log.info("Bundling qpid-proton into the extension")

        # QPID_PROTON_SRC - (optional) pathname to the Proton C sources.  Can
        # be used to override where this setup gets the Proton C sources from
        # (see bundle.fetch_libqpid_proton())
        if 'QPID_PROTON_SRC' not in os.environ:
            if not os.path.exists(os.path.join(setup_path, 'tox.ini')):
                bundledir = os.path.join(base, "bundled")
                if not os.path.exists(bundledir):
                    os.makedirs(bundledir)
                bundle.fetch_libqpid_proton(bundledir)
                libqpid_proton_dir = os.path.abspath(os.path.join(bundledir, 'qpid-proton'))
            else:
                # This should happen just in **dev** environemnts since
                # tox.ini is not shipped with the driver. It should only
                # be triggered when calling `setup.py`. This can happen either
                # manually or when calling `tox` in the **sdist** step. Tox will
                # defined the `QPID_PROTON_SRC` itself.
                proton_c = os.path.join(setup_path, 'qpid-proton')
                libqpid_proton_dir = os.path.abspath(proton_c)
        else:
            libqpid_proton_dir = os.path.abspath(os.environ['QPID_PROTON_SRC'])

        log.debug("Using libqpid-proton src: %s" % libqpid_proton_dir)

        proton_base = os.path.join(libqpid_proton_dir, 'proton-c')
        proton_src = os.path.join(proton_base, 'src')
        proton_include = os.path.join(proton_base, 'include')

        #
        # Create any generated header files, and put them in build_include:
        #
        if not os.path.exists(build_include):
            os.makedirs(build_include)
            os.mkdir(os.path.join(build_include, 'proton'))

        # Generate `protocol.h` by calling the python
        # script found in the source dir.
        with open(os.path.join(build_include, 'protocol.h'), 'wb') as header:
            subprocess.Popen([sys.executable, os.path.join(proton_src, 'protocol.h.py')],
                              env={'PYTHONPATH': proton_base}, stdout=header)

        # Generate `encodings.h` by calling the python
        # script found in the source dir.
        with open(os.path.join(build_include, 'encodings.h'), 'wb') as header:
            subprocess.Popen([sys.executable,
                              os.path.join(proton_src, 'codec', 'encodings.h.py')],
                              env={'PYTHONPATH': proton_base}, stdout=header)

        # Create a custom, temporary, version.h file mapping the
        # major and minor versions from the downloaded tarbal. This version should
        # match the ones in the bundle module
        with open(os.path.join(build_include, 'proton', 'version.h'), "wb") as ver:
            version_text = """
#ifndef _PROTON_VERSION_H
#define _PROTON_VERSION_H 1
#define PN_VERSION_MAJOR %i
#define PN_VERSION_MINOR %i
#endif /* version.h */
""" % bundle.min_qpid_proton
            ver.write(version_text.encode('utf-8'))

        # Collect all the Proton C files that need to be built.
        # we could've used `glob(.., '*', '*.c')` but I preferred going
        # with an explicit list of subdirs that we can control and expand
        # depending on the version. Specifically, lets avoid adding things
        # we don't need.

        sources = []
        for subdir in ['object', 'framing', 'codec', 'dispatcher',
                       'engine', 'events', 'transport',
                       'message', 'reactor', 'messenger',
                       'handlers', 'posix']:

            sources.extend(glob.glob(os.path.join(proton_src, subdir, '*.c')))

        sources.extend(filter(lambda x: not x.endswith('dump.c'),
                       glob.iglob(os.path.join(proton_src, '*.c'))))

        # Look for any optional libraries that proton needs, and adjust the
        # source list as necessary.
        libraries = []

        # Check whether openssl is installed by poking
        # pkg-config for a minimum version 0. If it's installed, it should
        # return True and we'll use it. Otherwise, we'll use the stub.
        if misc.pkg_config_version(atleast='0', module='openssl'):
            libraries += ['ssl', 'crypto']
            sources.append(os.path.join(proton_src, 'ssl', 'openssl.c'))
        else:
            sources.append(os.path.join(proton_src, 'ssl', 'ssl_stub.c'))

        # create a temp compiler to check for optional compile-time features
        cc = new_compiler(compiler=self.compiler_type)
        cc.output_dir = self.build_temp

        # Some systems need to link to
        # `rt`. Check whether `clock_getttime` is around
        # and if not, link on rt.
        if not cc.has_function('clock_getttime'):
            libraries.append('rt')

        # 0.10 added an implementation for cyrus. Check
        # if it is available before adding the implementation to the sources
        # list. Eventually, `sasl.c` will be added and one of the existing
        # implementations will be used.
        if cc.has_function('sasl_client_done', includes=['sasl/sasl.h'],
                           libraries=['sasl2']):
            libraries.append('sasl2')
            sources.append(os.path.join(proton_src, 'sasl', 'cyrus_sasl.c'))
        else:
            sources.append(os.path.join(proton_src, 'sasl', 'none_sasl.c'))

        sources.append(os.path.join(proton_src, 'sasl', 'sasl.c'))

        # compile all the proton sources.  We'll add the resulting list of
        # objects to the _cproton extension as 'extra objects'.  We do this
        # instead of just lumping all the sources into the extension to prevent
        # any proton-specific compilation flags from affecting the compilation
        # of the generated swig code

        cc = new_compiler(compiler=self.compiler_type)
        ds_sys.customize_compiler(cc)

        objects = cc.compile(sources,
                             # -D flags (None means no value, just define)
                             macros=[('qpid_proton_EXPORTS', None),
                                     ('USE_ATOLL', None),
                                     ('USE_CLOCK_GETTIME', None),
                                     ('USE_STRERROR_R', None)],
                             include_dirs=[build_include,
                                           proton_include,
                                           proton_src],
                             # compiler command line options:
                             extra_postargs=['-std=gnu99'],
                             output_dir=self.build_temp)

        #
        # Now update the _cproton extension instance to include the objects and
        # libraries
        #
        _cproton = self.distribution.ext_modules[-1]
        _cproton.extra_objects = objects
        _cproton.include_dirs.append(build_include)
        _cproton.include_dirs.append(proton_include)

        # swig will need to access the proton headers:
        _cproton.swig_opts.append('-I%s' % build_include)
        _cproton.swig_opts.append('-I%s' % proton_include)

        # lastly replace the libqpid-proton dependency with libraries required
        # by the Proton objects:
        _cproton.libraries=libraries
Example #54
0
    Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
    Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
    Extension('mercurial.parsers', ['mercurial/parsers.c']),
]

# disable osutil.c under windows + python 2.4 (issue1364)
if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
    pymodules.append('mercurial.pure.osutil')
else:
    extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))

if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
    # The inotify extension is only usable with Linux 2.6 kernels.
    # You also need a reasonably recent C library.
    # In any case, if it fails to build the error will be skipped ('optional').
    cc = new_compiler()
    if hasfunction(cc, 'inotify_add_watch'):
        inotify = Extension('hgext.inotify.linux._inotify',
                            ['hgext/inotify/linux/_inotify.c'], ['mercurial'])
        inotify.optional = True
        extmodules.append(inotify)
        packages.extend(['hgext.inotify', 'hgext.inotify.linux'])

packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo', 'help/*.txt']}


def ordinarypath(p):
    return p and p[0] != '.' and p[-1] != '~'


for root in ('templates', ):
Example #55
0
def test_compilation(program,
                     extra_cc_options=None,
                     extra_libraries=None,
                     msg=''):
    """Test if a certain C program can be compiled."""

    # Create a temporary file with the C program
    if not os.path.exists("build"):
        os.makedirs("build")
    fname = os.path.join("build", "test1.c")
    f = open(fname, 'w')
    f.write(program)
    f.close()

    # Name for the temporary executable
    oname = os.path.join("build", "test1.out")

    debug = False
    # Mute the compiler and the linker
    if msg:
        print("Testing support for %s" % msg)
    if not (debug or os.name == 'nt'):
        old_stdout = os.dup(sys.stdout.fileno())
        old_stderr = os.dup(sys.stderr.fileno())
        dev_null = open(os.devnull, "w")
        os.dup2(dev_null.fileno(), sys.stdout.fileno())
        os.dup2(dev_null.fileno(), sys.stderr.fileno())

    objects = []
    try:
        compiler = ccompiler.new_compiler()
        distutils.sysconfig.customize_compiler(compiler)

        if compiler.compiler_type in ['msvc']:
            # Force creation of the manifest file (http://bugs.python.org/issue16296)
            # as needed by VS2010
            extra_linker_options = ["/MANIFEST"]
        else:
            extra_linker_options = []

        # In Unix, force the linker step to use CFLAGS and not CC alone (see GH#180)
        if compiler.compiler_type in ['unix']:
            compiler.set_executables(linker_exe=compiler.compiler)

        objects = compiler.compile([fname], extra_postargs=extra_cc_options)
        compiler.link_executable(objects,
                                 oname,
                                 libraries=extra_libraries,
                                 extra_preargs=extra_linker_options)
        result = True
    except CCompilerError:
        result = False
    for f in objects + [fname, oname]:
        try:
            os.remove(f)
        except OSError:
            pass

    # Restore stdout and stderr
    if not (debug or os.name == 'nt'):
        if old_stdout is not None:
            os.dup2(old_stdout, sys.stdout.fileno())
        if old_stderr is not None:
            os.dup2(old_stderr, sys.stderr.fileno())
        if dev_null is not None:
            dev_null.close()
    if msg:
        if result:
            x = ""
        else:
            x = " not"
        print("Target does%s support %s" % (x, msg))

    return result
Example #56
0
def using_clang():
    """Will we be using a clang compiler?"""
    compiler = new_compiler()
    customize_compiler(compiler)
    compiler_ver = getoutput("{0} -v".format(compiler.compiler[0]))
    return 'clang' in compiler_ver
Example #57
0
def check_sanity():
    """
    Test if development headers and library for cephfs is available by compiling a dummy C program.
    """
    CEPH_SRC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                '..', '..')

    tmp_dir = tempfile.mkdtemp(
        dir=os.environ.get('TMPDIR', os.path.dirname(__file__)))
    tmp_file = os.path.join(tmp_dir, 'cephfs_dummy.c')

    with open(tmp_file, 'w') as fp:
        dummy_prog = textwrap.dedent("""
        #include <stddef.h>
        #include "cephfs/libcephfs.h"

        int main(void) {
            struct ceph_mount_info *cmount = NULL;
            ceph_init(cmount);
            return 0;
        }
        """)
        fp.write(dummy_prog)

    compiler = new_compiler()
    distutils.sysconfig.customize_compiler(compiler)

    if 'CEPH_LIBDIR' in os.environ:
        # The setup.py has been invoked by a top-level Ceph make.
        # Set the appropriate CFLAGS and LDFLAGS
        compiler.set_library_dirs([os.environ.get('CEPH_LIBDIR')])

    try:
        compiler.define_macro('_FILE_OFFSET_BITS', '64')

        link_objects = compiler.compile(
            sources=[tmp_file],
            output_dir=tmp_dir,
            extra_preargs=[
                '-iquote{path}'.format(
                    path=os.path.join(CEPH_SRC_DIR, 'include'))
            ])

        compiler.link_executable(
            objects=link_objects,
            output_progname=os.path.join(tmp_dir, 'cephfs_dummy'),
            libraries=['cephfs'],
            output_dir=tmp_dir,
        )

    except CompileError:
        print('\nCompile Error: Ceph FS development headers not found',
              file=sys.stderr)
        return False
    except LinkError:
        print('\nLink Error: Ceph FS library not found', file=sys.stderr)
        return False
    else:
        return True
    finally:
        shutil.rmtree(tmp_dir)
Example #58
0
    def run(self):
        if not self.libraries:
            return

        # Make sure that library sources are complete.
        languages = []

        # Make sure that extension sources are complete.
        self.run_command('build_src')

        for (lib_name, build_info) in self.libraries:
            l = build_info.get('language', None)
            if l and l not in languages:
                languages.append(l)

        from distutils.ccompiler import new_compiler
        self.compiler = new_compiler(compiler=self.compiler,
                                     dry_run=self.dry_run,
                                     force=self.force)
        self.compiler.customize(self.distribution,
                                need_cxx=self.have_cxx_sources())

        if self.warn_error:
            self.compiler.compiler.append('-Werror')
            self.compiler.compiler_so.append('-Werror')

        libraries = self.libraries
        self.libraries = None
        self.compiler.customize_cmd(self)
        self.libraries = libraries

        self.compiler.show_customization()

        if self.have_f_sources():
            from numpy.distutils.fcompiler import new_fcompiler
            self._f_compiler = new_fcompiler(compiler=self.fcompiler,
                                             verbose=self.verbose,
                                             dry_run=self.dry_run,
                                             force=self.force,
                                             requiref90='f90' in languages,
                                             c_compiler=self.compiler)
            if self._f_compiler is not None:
                self._f_compiler.customize(self.distribution)

                libraries = self.libraries
                self.libraries = None
                self._f_compiler.customize_cmd(self)
                self.libraries = libraries

                self._f_compiler.show_customization()
        else:
            self._f_compiler = None

        self.build_libraries(self.libraries)

        if self.inplace:
            for l in self.distribution.installed_libraries:
                libname = self.compiler.library_filename(l.name)
                source = os.path.join(self.build_clib, libname)
                target = os.path.join(l.target_dir, libname)
                self.mkpath(l.target_dir)
                shutil.copy(source, target)
Example #59
0
def add_openmp_flags_if_available(extension):
    """
    Add OpenMP compilation flags, if available (if not a warning will be
    printed to the console and no flags will be added)

    Returns `True` if the flags were added, `False` otherwise.
    """

    ccompiler = new_compiler()
    customize_compiler(ccompiler)

    tmp_dir = tempfile.mkdtemp()

    start_dir = os.path.abspath('.')

    if get_compiler_option() == 'msvc':
        compile_flag = '-openmp'
        link_flag = ''
    else:
        compile_flag = '-fopenmp'
        link_flag = '-fopenmp'

    try:

        os.chdir(tmp_dir)

        with open('test_openmp.c', 'w') as f:
            f.write(CCODE)

        os.mkdir('objects')

        # Compile, link, and run test program
        ccompiler.compile(['test_openmp.c'],
                          output_dir='objects',
                          extra_postargs=[compile_flag])
        ccompiler.link_executable(glob.glob(
            os.path.join('objects', '*' + ccompiler.obj_extension)),
                                  'test_openmp',
                                  extra_postargs=[link_flag])
        output = subprocess.check_output('./test_openmp').decode(
            sys.stdout.encoding or 'utf-8').splitlines()

        if 'nthreads=' in output[0]:
            nthreads = int(output[0].strip().split('=')[1])
            if len(output) == nthreads:
                using_openmp = True
            else:
                log.warn(
                    "Unexpected number of lines from output of test OpenMP "
                    "program (output was {0})".format(output))
                using_openmp = False
        else:
            log.warn("Unexpected output from test OpenMP "
                     "program (output was {0})".format(output))
            using_openmp = False

    except (CompileError, LinkError):

        using_openmp = False

    finally:

        os.chdir(start_dir)

    if using_openmp:
        log.info("Compiling Cython extension with OpenMP support")
        extension.extra_compile_args.append(compile_flag)
        extension.extra_link_args.append(link_flag)
    else:
        log.warn(
            "Cannot compile Cython extension with OpenMP, reverting to non-parallel code"
        )

    return using_openmp
Example #60
0
def mypycify(
    paths: List[str],
    mypy_options: Optional[List[str]] = None,
    *,
    verbose: bool = False,
    opt_level: str = '3',
    strip_asserts: bool = False,
    multi_file: bool = False,
    separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False,
    skip_cgen_input: Optional[Any] = None
) -> List['Extension']:
    """Main entry point to building using mypyc.

    This produces a list of Extension objects that should be passed as the
    ext_modules parameter to setup.

    Arguments:
        paths: A list of file paths to build. It may contain globs.
        mypy_options: Optionally, a list of command line flags to pass to mypy.
                      (This can also contain additional files, for compatibility reasons.)
        verbose: Should mypyc be more verbose. Defaults to false.

        opt_level: The optimization level, as a string. Defaults to '3' (meaning '-O3').
        strip_asserts: Should asserts be stripped from the generated code.

        multi_file: Should each Python module be compiled into its own C source file.
                    This can reduce compile time and memory requirements at the likely
                    cost of runtime performance of compiled code. Defaults to false.
        separate: Should compiled modules be placed in separate extension modules.
                  If False, all modules are placed in a single shared library.
                  If True, every module is placed in its own library.
                  Otherwise separate should be a list of
                  (file name list, optional shared library name) pairs specifying
                  groups of files that should be placed in the same shared library
                  (while all other modules will be placed in its own library).

                  Each group can be compiled independently, which can
                  speed up compilation, but calls between groups can
                  be slower than calls within a group and can't be
                  inlined.
    """

    setup_mypycify_vars()
    compiler_options = CompilerOptions(strip_asserts=strip_asserts,
                                       multi_file=multi_file, verbose=verbose)

    # Create a compiler object so we can make decisions based on what
    # compiler is being used. typeshed is missing some attribues on the
    # compiler object so we give it type Any
    compiler = ccompiler.new_compiler()  # type: Any
    sysconfig.customize_compiler(compiler)

    expanded_paths = []
    for path in paths:
        expanded_paths.extend(glob.glob(path))

    build_dir = 'build'  # TODO: can this be overridden??
    try:
        os.mkdir(build_dir)
    except FileExistsError:
        pass

    sources, options = get_mypy_config(expanded_paths, mypy_options)
    # We generate a shared lib if there are multiple modules or if any
    # of the modules are in package. (Because I didn't want to fuss
    # around with making the single module code handle packages.)
    use_shared_lib = len(sources) > 1 or any('.' in x.module for x in sources)

    groups = construct_groups(sources, separate, use_shared_lib)

    # We let the test harness just pass in the c file contents instead
    # so that it can do a corner-cutting version without full stubs.
    if not skip_cgen_input:
        group_cfiles, ops_text = generate_c(sources, options, groups,
                                            compiler_options=compiler_options)
        # TODO: unique names?
        with open(os.path.join(build_dir, 'ops.txt'), 'w') as f:
            f.write(ops_text)
    else:
        group_cfiles = skip_cgen_input

    # Write out the generated C and collect the files for each group
    group_cfilenames = []  # type: List[Tuple[List[str], List[str]]]
    for cfiles in group_cfiles:
        cfilenames = []
        for cfile, ctext in cfiles:
            cfile = os.path.join(build_dir, cfile)
            write_file(cfile, ctext)
            if os.path.splitext(cfile)[1] == '.c':
                cfilenames.append(cfile)

        deps = [os.path.join(build_dir, dep) for dep in get_header_deps(cfiles)]
        group_cfilenames.append((cfilenames, deps))

    cflags = []  # type: List[str]
    if compiler.compiler_type == 'unix':
        cflags += [
            '-O{}'.format(opt_level), '-Werror', '-Wno-unused-function', '-Wno-unused-label',
            '-Wno-unreachable-code', '-Wno-unused-variable', '-Wno-trigraphs',
            '-Wno-unused-command-line-argument', '-Wno-unknown-warning-option',
        ]
        if 'gcc' in compiler.compiler[0]:
            # This flag is needed for gcc but does not exist on clang.
            cflags += ['-Wno-unused-but-set-variable']
    elif compiler.compiler_type == 'msvc':
        if opt_level == '3':
            opt_level = '2'
        cflags += [
            '/O{}'.format(opt_level),
            '/wd4102',  # unreferenced label
            '/wd4101',  # unreferenced local variable
            '/wd4146',  # negating unsigned int
        ]
        if multi_file:
            # Disable whole program optimization in multi-file mode so
            # that we actually get the compilation speed and memory
            # use wins that multi-file mode is intended for.
            cflags += [
                '/GL-',
                '/wd9025',  # warning about overriding /GL
            ]

    # In multi-file mode, copy the runtime library in.
    # Otherwise it just gets #included to save on compiler invocations
    shared_cfilenames = []
    if multi_file:
        for name in ['CPy.c', 'getargs.c']:
            rt_file = os.path.join(build_dir, name)
            with open(os.path.join(include_dir(), name), encoding='utf-8') as f:
                write_file(rt_file, f.read())
            shared_cfilenames.append(rt_file)

    extensions = []
    for (group_sources, lib_name), (cfilenames, deps) in zip(groups, group_cfilenames):
        if use_shared_lib:
            assert lib_name
            extensions.extend(build_using_shared_lib(
                group_sources, lib_name, cfilenames + shared_cfilenames, deps, build_dir, cflags))
        else:
            extensions.extend(build_single_module(
                group_sources, cfilenames + shared_cfilenames, cflags))

    return extensions