Beispiel #1
0
    def build_extension(self, ext):
        # Make sure that if pythonXX-sys is used, it builds against the current
        # executing python interpreter.
        bindir = os.path.dirname(sys.executable)

        env = os.environ.copy()
        env.update({
            # disables rust's pkg-config seeking for specified packages,
            # which causes pythonXX-sys to fall back to detecting the
            # interpreter from the path.
            "PATH": bindir + os.pathsep + os.environ.get("PATH", "")
        })

        if not os.path.exists(ext.path):
            raise DistutilsFileError(
                "Can not file rust extension project file: %s" % ext.path)

        features = set(ext.features)

        features.update(
            cpython_feature(pyo3=ext.pyo3, no_binding=ext.no_binding))

        if ext.debug is None:
            debug_build = self.inplace
        else:
            debug_build = ext.debug

        debug_build = self.debug if self.debug is not None else debug_build
        if self.release:
            debug_build = False

        quiet = self.qbuild or ext.quiet

        # build cargo command
        feature_args = ["--features" + " ".join(features)] if features else []
        args = (["cargo", "rustc", "--lib", "--manifest-path", ext.path] +
                feature_args + list(ext.args or []))
        if not debug_build:
            args.append("--release")
        if quiet:
            args.append("-q")

        args.extend(["--", '--crate-type', 'cdylib'])

        # OSX requires special linker argument
        if sys.platform == "darwin":
            args.extend(
                ["-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup"])

        if not quiet:
            print(" ".join(args), file=sys.stderr)

        # Execute cargo
        try:
            output = subprocess.check_output(args, env=env)
        except subprocess.CalledProcessError as e:
            raise CompileError("cargo failed with code: %d\n%s" %
                               (e.returncode, e.output))
        except OSError:
            raise DistutilsExecError(
                "Unable to execute 'cargo' - this package "
                "requires rust to be installed and cargo to be on the PATH")

        if not quiet:
            if isinstance(output, bytes):
                output = output.decode('latin-1')
                if output:
                    print(output, file=sys.stderr)

        # Find the shared library that cargo hopefully produced and copy
        # it into the build directory as if it were produced by build_ext.
        if debug_build:
            suffix = "debug"
        else:
            suffix = "release"

        target_dir = os.path.join(os.path.dirname(ext.path), "target/", suffix)

        if sys.platform == "win32":
            wildcard_so = "*.dll"
        elif sys.platform == "darwin":
            wildcard_so = "*.dylib"
        else:
            wildcard_so = "*.so"

        try:
            dylib_path = glob.glob(os.path.join(target_dir, wildcard_so))[0]
        except IndexError:
            raise DistutilsExecError(
                "rust build failed; unable to find any %s in %s" %
                (wildcard_so, target_dir))

        # Ask build_ext where the shared library would go if it had built it,
        # then copy it there.
        build_ext = self.get_finalized_command('build_ext')
        build_ext.inplace = self.inplace
        target_fname = ext.name
        if target_fname is None:
            target_fname = os.path.basename(
                os.path.splitext(os.path.basename(dylib_path)[3:])[0])

        ext_path = build_ext.get_ext_fullpath(target_fname)
        try:
            os.makedirs(os.path.dirname(ext_path))
        except OSError:
            pass
        shutil.copyfile(dylib_path, ext_path)
Beispiel #2
0
def pyx2obj(pyxpath,
            objpath=None,
            destdir=None,
            cwd=None,
            include_dirs=None,
            cy_kwargs=None,
            cplus=None,
            **kwargs):
    """
    Convenience function

    If cwd is specified, pyxpath and dst are taken to be relative
    If only_update is set to `True` the modification time is checked
    and compilation is only run if the source is newer than the
    destination

    Parameters
    ==========

    pyxpath: str
        Path to Cython source file.
    objpath: str (optional)
        Path to object file to generate.
    destdir: str (optional)
        Directory to put generated C file. When ``None``: directory of ``objpath``.
    cwd: str (optional)
        Working directory and root of relative paths.
    include_dirs: iterable of path strings (optional)
        Passed onto src2obj and via cy_kwargs['include_path']
        to simple_cythonize.
    cy_kwargs: dict (optional)
        Keyword arguments passed onto `simple_cythonize`
    cplus: bool (optional)
        Indicate whether C++ is used. default: auto-detect using ``.util.pyx_is_cplus``.
    compile_kwargs: dict
        keyword arguments passed onto src2obj

    Returns
    =======

    Absolute path of generated object file.

    """
    assert pyxpath.endswith('.pyx')
    cwd = cwd or '.'
    objpath = objpath or '.'
    destdir = destdir or os.path.dirname(objpath)

    abs_objpath = get_abspath(objpath, cwd=cwd)

    if os.path.isdir(abs_objpath):
        pyx_fname = os.path.basename(pyxpath)
        name, ext = os.path.splitext(pyx_fname)
        objpath = os.path.join(objpath, name + objext)

    cy_kwargs = cy_kwargs or {}
    cy_kwargs['output_dir'] = cwd
    if cplus is None:
        cplus = pyx_is_cplus(pyxpath)
    cy_kwargs['cplus'] = cplus

    interm_c_file = simple_cythonize(pyxpath,
                                     destdir=destdir,
                                     cwd=cwd,
                                     **cy_kwargs)

    include_dirs = include_dirs or []
    flags = kwargs.pop('flags', [])
    needed_flags = ('-fwrapv', '-pthread', '-fPIC')
    for flag in needed_flags:
        if flag not in flags:
            flags.append(flag)

    options = kwargs.pop('options', [])

    if kwargs.pop('strict_aliasing', False):
        raise CompileError("Cython requires strict aliasing to be disabled.")

    # Let's be explicit about standard
    if cplus:
        std = kwargs.pop('std', 'c++98')
    else:
        std = kwargs.pop('std', 'c99')

    return src2obj(interm_c_file,
                   objpath=objpath,
                   cwd=cwd,
                   include_dirs=include_dirs,
                   flags=flags,
                   std=std,
                   options=options,
                   inc_py=True,
                   strict_aliasing=False,
                   **kwargs)
Beispiel #3
0
    def compile(self,
                sources,
                output_dir=None,
                macros=None,
                include_dirs=None,
                debug=0,
                extra_preargs=None,
                extra_postargs=None,
                depends=None):

        if not self.initialized:
            self.initialize()
        compile_info = self._setup_compile(output_dir, macros, include_dirs,
                                           sources, depends, extra_postargs)
        macros, objects, extra_postargs, pp_opts, build = compile_info

        compile_opts = extra_preargs or []
        compile_opts.append('/c')
        if debug:
            compile_opts.extend(self.compile_options_debug)
        else:
            compile_opts.extend(self.compile_options)

        for obj in objects:
            try:
                src, ext = build[obj]
            except KeyError:
                continue
            if debug:
                # pass the full pathname to MSVC in debug mode,
                # this allows the debugger to find the source file
                # without asking the user to browse for it
                src = os.path.abspath(src)

            if ext in self._c_extensions:
                input_opt = "/Tc" + src
            elif ext in self._cpp_extensions:
                input_opt = "/Tp" + src
            elif ext in self._rc_extensions:
                # compile .RC to .RES file
                input_opt = src
                output_opt = "/fo" + obj
                try:
                    self.spawn([self.rc] + pp_opts + [output_opt] +
                               [input_opt])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            elif ext in self._mc_extensions:
                # Compile .MC to .RC file to .RES file.
                #   * '-h dir' specifies the directory for the
                #     generated include file
                #   * '-r dir' specifies the target directory of the
                #     generated RC file and the binary message resource
                #     it includes
                #
                # For now (since there are no options to change this),
                # we use the source-directory for the include file and
                # the build directory for the RC file and message
                # resources. This works at least for win32all.
                h_dir = os.path.dirname(src)
                rc_dir = os.path.dirname(obj)
                try:
                    # first compile .MC to .RC and .H file
                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
                    base, _ = os.path.splitext(os.path.basename(src))
                    rc_file = os.path.join(rc_dir, base + '.rc')
                    # then compile .RC to .RES file
                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file])

                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            else:
                # how to handle this file?
                raise CompileError("Don't know how to compile %s to %s" %
                                   (src, obj))

            output_opt = "/Fo" + obj
            try:
                self.spawn([self.cc] + compile_opts + pp_opts +
                           [input_opt, output_opt] + extra_postargs)
            except DistutilsExecError as msg:
                raise CompileError(msg)

        return objects
def check_openmp_support():
    """Check whether OpenMP test code can be compiled and run"""
    ccompiler = new_compiler()
    customize_compiler(ccompiler)

    if os.getenv('SKTIME_NO_OPENMP'):
        # Build explicitly without OpenMP support
        return False

    start_dir = os.path.abspath('.')

    with tempfile.TemporaryDirectory() as tmp_dir:
        try:
            os.chdir(tmp_dir)

            # Write test program
            with open('test_openmp.c', 'w') as f:
                f.write(CCODE)

            os.mkdir('objects')

            # Compile, test program
            openmp_flags = get_openmp_flag(ccompiler)
            ccompiler.compile(['test_openmp.c'],
                              output_dir='objects',
                              extra_postargs=openmp_flags)

            # Link test program
            extra_preargs = os.getenv('LDFLAGS', None)
            extra_preargs = extra_preargs.split(
                " ") if extra_preargs is not None else []
            objects = glob.glob(
                os.path.join('objects', '*' + ccompiler.obj_extension))
            ccompiler.link_executable(objects,
                                      'test_openmp',
                                      extra_preargs=extra_preargs,
                                      extra_postargs=openmp_flags)

            # Run test program
            output = subprocess.check_output('./test_openmp')
            output = output.decode(sys.stdout.encoding or 'utf-8').splitlines()

            # Check test program output
            if 'nthreads=' in output[0]:
                nthreads = int(output[0].strip().split('=')[1])
                openmp_supported = (len(output) == nthreads)
            else:
                openmp_supported = False

        except (CompileError, LinkError, subprocess.CalledProcessError):
            openmp_supported = False

        finally:
            os.chdir(start_dir)

    err_message = textwrap.dedent("""
                            ***
        It seems that sktime cannot be built with OpenMP support.            
            
        - If your compiler supports OpenMP but the build still fails, please
          submit a bug report at: 
          'https://github.com/alan-turing-institute/sktime/issues'
          
        - If you want to build sktime without OpenMP support, you can set
          the environment variable SKTIME_NO_OPENMP and rerun the build
          command. Note however that some estimators will run in sequential
          mode and their `n_jobs` parameter will have no effect anymore.
          
        - See sktime advanced installation instructions for more info: 
                https://alan-turing-institute.github.io/sktime/installation
                .html
                            ***
        """)

    if not openmp_supported:
        raise CompileError(err_message)

    return True
Beispiel #5
0
 def raise_compile_error(*args, **kwargs):  # type: ignore
     raise CompileError('Chalice blocked C extension compiling.')
Beispiel #6
0
    def build_extension(self, ext):
        executable = ext.binding == Binding.Exec

        # Make sure that if pythonXX-sys is used, it builds against the current
        # executing python interpreter.
        bindir = os.path.dirname(sys.executable)

        env = os.environ.copy()
        env.update(
            {
                # disables rust's pkg-config seeking for specified packages,
                # which causes pythonXX-sys to fall back to detecting the
                # interpreter from the path.
                "PATH": os.path.join(bindir, os.environ.get("PATH", ""))
            }
        )

        # Find where to put the temporary build files created by `cargo`
        metadata_command = [
            "cargo",
            "metadata",
            "--manifest-path",
            ext.path,
            "--format-version",
            "1",
        ]
        # The decoding is needed for python 3.5 compatibility
        metadata = json.loads(check_output(metadata_command).decode("utf-8"))
        target_dir = metadata["target_directory"]

        if not os.path.exists(ext.path):
            raise DistutilsFileError(
                "Can not find rust extension project file: %s" % ext.path
            )

        features = set(ext.features)
        features.update(cpython_feature(binding=ext.binding))

        debug_build = ext.debug if ext.debug is not None else self.inplace
        debug_build = self.debug if self.debug is not None else debug_build
        if self.release:
            debug_build = False

        quiet = self.qbuild or ext.quiet

        # build cargo command
        feature_args = ["--features", " ".join(features)] if features else []

        if executable:
            args = (
                ["cargo", "build", "--manifest-path", ext.path]
                + feature_args
                + list(ext.args or [])
            )
            if not debug_build:
                args.append("--release")
            if quiet:
                args.append("-q")
            elif self.verbose:
                args.append("--verbose")

        else:
            args = (
                ["cargo", "rustc", "--lib", "--manifest-path", ext.path]
                + feature_args
                + list(ext.args or [])
            )
            if not debug_build:
                args.append("--release")
            if quiet:
                args.append("-q")
            elif self.verbose:
                args.append("--verbose")

            args.extend(["--", "--crate-type", "cdylib"])
            args.extend(ext.rustc_flags or [])

            # OSX requires special linker argument
            if sys.platform == "darwin":
                args.extend(
                    ["-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup"]
                )

        if not quiet:
            print(" ".join(args), file=sys.stderr)

        if ext.native:
            env["RUSTFLAGS"] = "-C target-cpu=native"

        # Execute cargo
        try:
            output = subprocess.check_output(args, env=env)
        except subprocess.CalledProcessError as e:
            output = e.output
            if isinstance(output, bytes):
                output = e.output.decode("latin-1").strip()
            raise CompileError(
                "cargo failed with code: %d\n%s" % (e.returncode, output)
            )

        except OSError:
            raise DistutilsExecError(
                "Unable to execute 'cargo' - this package "
                "requires rust to be installed and cargo to be on the PATH"
            )

        if not quiet:
            if isinstance(output, bytes):
                output = output.decode("latin-1")
            if output:
                print(output, file=sys.stderr)

        # Find the shared library that cargo hopefully produced and copy
        # it into the build directory as if it were produced by build_ext.
        if debug_build:
            suffix = "debug"
        else:
            suffix = "release"

        # location of cargo compiled files
        artifactsdir = os.path.join(target_dir, suffix)
        dylib_paths = []

        if executable:
            for name, dest in ext.target.items():
                if name:
                    path = os.path.join(artifactsdir, name)
                    if os.access(path, os.X_OK):
                        dylib_paths.append((dest, path))
                        continue
                    else:
                        raise DistutilsExecError(
                            "rust build failed; "
                            'unable to find executable "%s" in %s' % (name, target_dir)
                        )
                else:
                    # search executable
                    for name in os.listdir(artifactsdir):
                        path = os.path.join(artifactsdir, name)
                        if name.startswith(".") or not os.path.isfile(path):
                            continue

                        if os.access(path, os.X_OK):
                            dylib_paths.append((ext.name, path))
                            break

            if not dylib_paths:
                raise DistutilsExecError(
                    "rust build failed; unable to find executable in %s" % target_dir
                )
        else:
            if sys.platform == "win32":
                dylib_ext = "dll"
            elif sys.platform == "darwin":
                dylib_ext = "dylib"
            else:
                dylib_ext = "so"

            wildcard_so = "*{}.{}".format(ext.get_lib_name(), dylib_ext)

            try:
                dylib_paths.append(
                    (
                        ext.name,
                        next(glob.iglob(os.path.join(artifactsdir, wildcard_so))),
                    )
                )
            except StopIteration:
                raise DistutilsExecError(
                    "rust build failed; unable to find any %s in %s"
                    % (wildcard_so, artifactsdir)
                )

        # Ask build_ext where the shared library would go if it had built it,
        # then copy it there.
        build_ext = self.get_finalized_command("build_ext")
        build_ext.inplace = self.inplace

        for target_fname, dylib_path in dylib_paths:
            if not target_fname:
                target_fname = os.path.basename(
                    os.path.splitext(os.path.basename(dylib_path)[3:])[0]
                )

            if executable:
                ext_path = build_ext.get_ext_fullpath(target_fname)
                # remove .so extension
                ext_path, _ = os.path.splitext(ext_path)
                # remove python3 extension (i.e. cpython-36m)
                ext_path, _ = os.path.splitext(ext_path)

                ext.install_script(ext_path)
            else:
                ext_path = build_ext.get_ext_fullpath(target_fname)

            try:
                os.makedirs(os.path.dirname(ext_path))
            except OSError:
                pass

            shutil.copyfile(dylib_path, ext_path)

            if sys.platform != "win32" and not debug_build:
                args = []
                if ext.strip == Strip.All:
                    args.append("-x")
                elif ext.strip == Strip.Debug:
                    args.append("-S")

                if args:
                    args.insert(0, "strip")
                    args.append(ext_path)
                    try:
                        output = subprocess.check_output(args, env=env)
                    except subprocess.CalledProcessError as e:
                        pass

            if executable:
                mode = os.stat(ext_path).st_mode
                mode |= (mode & 0o444) >> 2  # copy R bits to X
                os.chmod(ext_path, mode)
Beispiel #7
0
    def compile(self, sources,
                output_dir=None, macros=None, include_dirs=None, debug=0,
                extra_preargs=None, extra_postargs=None, depends=None):

        macros, objects, extra_postargs, pp_opts, build = \
                self._setup_compile(output_dir, macros, include_dirs, sources,
                                    depends, extra_postargs)
        compile_opts = extra_preargs or []
        compile_opts.append ('-c')
        if debug:
            compile_opts.extend (self.compile_options_debug)
        else:
            compile_opts.extend (self.compile_options)

        for obj in objects:
            try:
                src, ext = build[obj]
            except KeyError:
                continue
            # XXX why do the normpath here?
            src = os.path.normpath(src)
            obj = os.path.normpath(obj)
            # XXX _setup_compile() did a mkpath() too but before the normpath.
            # Is it possible to skip the normpath?
            self.mkpath(os.path.dirname(obj))

            if ext == '.res':
                # This is already a binary file -- skip it.
                continue # the 'for' loop
            if ext == '.rc':
                # This needs to be compiled to a .res file -- do it now.
                try:
                    self.spawn (["brcc32", "-fo", obj, src])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue # the 'for' loop

            # The next two are both for the real compiler.
            if ext in self._c_extensions:
                input_opt = ""
            elif ext in self._cpp_extensions:
                input_opt = "-P"
            else:
                # Unknown file type -- no extra options.  The compiler
                # will probably fail, but let it just in case this is a
                # file the compiler recognizes even if we don't.
                input_opt = ""

            output_opt = "-o" + obj

            # Compiler command line syntax is: "bcc32 [options] file(s)".
            # Note that the source file names must appear at the end of
            # the command line.
            try:
                self.spawn ([self.cc] + compile_opts + pp_opts +
                            [input_opt, output_opt] +
                            extra_postargs + [src])
            except DistutilsExecError as msg:
                raise CompileError(msg)

        return objects
Beispiel #8
0
 def run(self):
     CleanCommand.run(self)
     try:
         subprocess.check_call(['scons', '--remove'])
     except subprocess.CalledProcessError:
         raise CompileError("Error while cleaning Python Extensions")
Beispiel #9
0
    def build_extension(
        self, ext: RustExtension, forced_target_triple: Optional[str]
    ) -> List["_BuiltModule"]:

        target_info = self._detect_rust_target(forced_target_triple)
        if target_info is not None:
            target_triple = target_info.triple
            cross_lib = target_info.cross_lib
            linker = target_info.linker
            # We're ignoring target_info.linker_args for now because we're not
            # sure if they will always do the right thing. Might help with some
            # of the OS-specific logic if it does.

        else:
            target_triple = None
            cross_lib = None
            linker = None

        rustc_cfgs = _get_rustc_cfgs(target_triple)

        env = _prepare_build_environment(cross_lib)

        if not os.path.exists(ext.path):
            raise DistutilsFileError(
                f"can't find Rust extension project file: {ext.path}"
            )

        # Find where to put the temporary build files created by `cargo`
        target_dir = _base_cargo_target_dir(ext)
        if target_triple is not None:
            target_dir = os.path.join(target_dir, target_triple)

        quiet = self.qbuild or ext.quiet
        debug = self._is_debug_build(ext)
        cargo_args = self._cargo_args(
            ext=ext, target_triple=target_triple, release=not debug, quiet=quiet
        )

        if ext._uses_exec_binding():
            command = [self.cargo, "build", "--manifest-path", ext.path, *cargo_args]

        else:
            rustc_args = [
                "--crate-type",
                "cdylib",
            ]

            if ext.rustc_flags is not None:
                rustc_args.extend(ext.rustc_flags)

            if linker is not None:
                rustc_args.extend(["-C", "linker=" + linker])

            # OSX requires special linker arguments
            if sys.platform == "darwin":
                ext_basename = os.path.basename(self.get_dylib_ext_path(ext, ext.name))
                rustc_args.extend(
                    [
                        "-C",
                        f"link-args=-undefined dynamic_lookup -Wl,-install_name,@rpath/{ext_basename}",
                    ]
                )

            if ext.native:
                rustc_args.extend(["-C", "target-cpu=native"])

            # Tell musl targets not to statically link libc. See
            # https://github.com/rust-lang/rust/issues/59302 for details.
            if rustc_cfgs.get("target_env") == "musl":
                # This must go in the env otherwise rustc will refuse to build
                # the cdylib, see https://github.com/rust-lang/cargo/issues/10143
                MUSL_FLAGS = "-C target-feature=-crt-static"
                rustflags = env.get("RUSTFLAGS")
                if rustflags is not None:
                    env["RUSTFLAGS"] = f"{rustflags} {MUSL_FLAGS}"
                else:
                    env["RUSTFLAGS"] = MUSL_FLAGS

                # Include this in the command-line anyway, so that when verbose
                # logging enabled the user will see that this flag is in use.
                rustc_args.extend(MUSL_FLAGS.split())

            command = [
                self.cargo,
                "rustc",
                "--lib",
                "--manifest-path",
                ext.path,
                *cargo_args,
                "--",
                *rustc_args,
            ]

        if not quiet:
            print(" ".join(command), file=sys.stderr)

        # Execute cargo
        try:
            output = subprocess.check_output(command, env=env, encoding="latin-1")
        except subprocess.CalledProcessError as e:
            raise CompileError(f"cargo failed with code: {e.returncode}\n{e.output}")

        except OSError:
            raise DistutilsExecError(
                "Unable to execute 'cargo' - this package "
                "requires Rust to be installed and cargo to be on the PATH"
            )

        if not quiet:
            if output:
                print(output, file=sys.stderr)

        # Find the shared library that cargo hopefully produced and copy
        # it into the build directory as if it were produced by build_ext.

        artifacts_dir = os.path.join(target_dir, "debug" if debug else "release")
        dylib_paths = []

        if ext._uses_exec_binding():
            for name, dest in ext.target.items():
                if not name:
                    name = dest.split(".")[-1]
                exe = sysconfig.get_config_var("EXE")
                if exe is not None:
                    name += exe

                path = os.path.join(artifacts_dir, name)
                if os.access(path, os.X_OK):
                    dylib_paths.append(_BuiltModule(dest, path))
                else:
                    raise DistutilsExecError(
                        "Rust build failed; "
                        f"unable to find executable '{name}' in '{artifacts_dir}'"
                    )
        else:
            if sys.platform == "win32" or sys.platform == "cygwin":
                dylib_ext = "dll"
            elif sys.platform == "darwin":
                dylib_ext = "dylib"
            else:
                dylib_ext = "so"

            wildcard_so = "*{}.{}".format(ext.get_lib_name(), dylib_ext)

            try:
                dylib_paths.append(
                    _BuiltModule(
                        ext.name,
                        next(glob.iglob(os.path.join(artifacts_dir, wildcard_so))),
                    )
                )
            except StopIteration:
                raise DistutilsExecError(
                    f"Rust build failed; unable to find any {wildcard_so} in {artifacts_dir}"
                )
        return dylib_paths
Beispiel #10
0
        def compile(sources,
                    output_dir=None,
                    macros=None,
                    include_dirs=None,
                    debug=0,
                    extra_preargs=None,
                    extra_postargs=None,
                    depends=None):

            if not self.initialized:
                self.initialize()

            compile_info = self._setup_compile(output_dir, macros,
                                               include_dirs, sources, depends,
                                               extra_postargs)
            macros, objects, extra_postargs, pp_opts, build = compile_info

            compile_opts = extra_preargs or []
            compile_opts.append('/c')
            if debug:
                compile_opts.extend(self.compile_options_debug)
            else:
                compile_opts.extend(self.compile_options)

            add_cpp_opts = False

            for obj in objects:
                try:
                    src, ext = build[obj]
                except KeyError:
                    continue
                if debug:
                    # pass the full pathname to MSVC in debug mode,
                    # this allows the debugger to find the source file
                    # without asking the user to browse for it
                    src = os.path.abspath(src)

                if ext in self._c_extensions:
                    input_opt = "/Tc" + src
                elif ext in self._cpp_extensions:
                    input_opt = "/Tp" + src
                    add_cpp_opts = True
                elif ext in self._rc_extensions:
                    # compile .RC to .RES file
                    input_opt = src
                    output_opt = "/fo" + obj
                    try:
                        self.spawn([self.rc] + pp_opts +
                                   [output_opt, input_opt])
                    except DistutilsExecError as msg:
                        raise CompileError(msg)
                    continue
                elif ext in self._mc_extensions:
                    # Compile .MC to .RC file to .RES file.
                    #   * '-h dir' specifies the directory for the
                    #	 generated include file
                    #   * '-r dir' specifies the target directory of the
                    #	 generated RC file and the binary message resource
                    #	 it includes
                    #
                    # For now (since there are no options to change this),
                    # we use the source-directory for the include file and
                    # the build directory for the RC file and message
                    # resources. This works at least for win32all.
                    h_dir = os.path.dirname(src)
                    rc_dir = os.path.dirname(obj)
                    try:
                        # first compile .MC to .RC and .H file
                        self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
                        base, _ = os.path.splitext(os.path.basename(src))
                        rc_file = os.path.join(rc_dir, base + '.rc')
                        # then compile .RC to .RES file
                        self.spawn([self.rc, "/fo" + obj, rc_file])

                    except DistutilsExecError as msg:
                        raise CompileError(msg)
                    continue
                elif ext in self._cuda_extensions:
                    input_opt = src
                else:
                    # how to handle this file?
                    raise CompileError(
                        "Don't know how to compile {} to {}, ext {}".format(
                            src, obj, ext))

                # release MT
                if "/MD" in compile_opts:
                    pass  #compile_opts[compile_opts.index("/MD")] = "/MT"
                # for cuda compiler
                if ext in self._cuda_extensions:
                    args = [CUDA['nvcc']]
                    args.append(input_opt)
                    args.append("-o=" + obj)
                    # suppress annoying unicode warnings
                    args.extend(["-Xcompiler", "/wd 4819"])
                    args.extend(
                        [_arg for _arg in pp_opts if _arg.startswith('-I')])
                    if isinstance(extra_postargs, dict):
                        args.extend(extra_postargs["nvcc"])
                    else:
                        args.extend(extra_postargs)
                else:
                    args = [self.cc] + compile_opts + pp_opts
                    if add_cpp_opts:
                        args.append('/EHsc')
                    args.append(input_opt)
                    args.append("/Fo" + obj)
                    if isinstance(extra_postargs, dict):
                        args.extend(extra_postargs["cc"])
                    else:
                        args.extend(extra_postargs)

                try:
                    # print('-----', args)
                    self.spawn(args)
                except DistutilsExecError as msg:
                    print("-----", args)
                    raise CompileError(msg)

            return objects
Beispiel #11
0
 def run(self):
     try:
         subprocess.check_call(['scons'], shell=True)
     except subprocess.CalledProcessError:
         raise CompileError("Error while building Python Extensions")
     self.extensions = []
Beispiel #12
0
 def run(self):
     print("[gymfc] starting Gazebo plugin build.")
     if self.build_plugin() != 0:
         raise CompileError("Failed to compile Gazebo plugin.")
     print("[gymfc] plugins built successfully.")
     DistutilsBuild.run(self)
Beispiel #13
0
def createMakefile():

    say("==== Checking supported compiler options and available libraries ====\n"
        )
    LINK_FLAGS = LDFLAGS.split(
    )  # accumulate the linker flags that will be put to Makefile.local
    COMPILE_FLAGS = CFLAGS.split(
    )  # same for the compilation of the shared library only
    # default compilation flags for both the shared library and all example programs that use it
    CXXFLAGS = ['-fPIC', '-Wall', '-O2']

    # [1a]: check if a compiler exists at all
    if not runCompiler():
        raise CompileError(
            "Could not locate a compiler (set CXX=... environment variable to override)"
        )

    # [1b]: test if OpenMP is supported (optional)
    OMP_FLAG = '-fopenmp'
    OMP_CODE = "#include <omp.h>\nint main(){\n#pragma omp parallel for\nfor(int i=0; i<16; i++);\n}\n"
    if runCompiler(code=OMP_CODE,
                   flags=OMP_FLAG + ' -Werror -Wno-unknown-pragmas'):
        CXXFLAGS += [OMP_FLAG]
    else:
        if not ask(
                "Warning, OpenMP is not supported\n" +
                "If you're compiling on MacOS with clang, you'd better install another compiler such as GCC\n"
                + "Do you want to continue without OpenMP? [Y/N] "):
            exit(1)

    # [1c]: test if C++11 is supported (optional)
    CXX11_FLAG = '-std=c++11'
    if runCompiler(flags=CXX11_FLAG):
        CXXFLAGS += [CXX11_FLAG]

    # [1d]: test the -march flag (optional, allows architecture-dependent compiler optimizations)
    ARCH_FLAG = '-march=native'
    ARCH_CODE = 'int main(int c, char** v) { double x=c*3.14; return x==42; }\n'
    if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG):
        CXXFLAGS += [ARCH_FLAG]
    else:
        ARCH_FLAG = '-march=core2'  # try a less ambitious option
        if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG):
            CXXFLAGS += [ARCH_FLAG]

    # [1e]: special treatment for Intel compiler to restore determinism in OpenMP-parallelized loops
    INTEL_FLAG = '-qno-opt-dynamic-align'
    if runCompiler(flags=INTEL_FLAG):
        CXXFLAGS += [INTEL_FLAG]

    # [2a]: check that NumPy is present (required by the python interface)
    try:
        import numpy
        NUMPY_INC = '-I' + numpy.get_include()
    except ImportError:
        raise CompileError(
            "NumPy is not present - python extension cannot be compiled")

    # [2b]: find out the paths to Python.h and libpythonXX.{a,so} (this is rather tricky) and
    # all other relevant compilation/linking flags needed to build a shared library that uses Python
    PYTHON_INC = '-I' + sysconfig.get_python_inc()

    # various other system libraries that are needed at link time
    PYTHON_LIB_EXTRA = \
        sysconfig.get_config_var('LIBS').split() + \
        sysconfig.get_config_var('SYSLIBS').split() #+ ['-lz']

    # try compiling a test code with the provided link flags (in particular, the name of Python library):
    # check that a sample C++ program with embedded python compiles, links and runs properly
    numAttempts = 0

    def tryPythonCode(PYTHON_LIB):
        # test code for a shared library
        PYTEST_LIB_CODE = """
#include "Python.h"
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/arrayobject.h"
void bla() {PyRun_SimpleString("from distutils import sysconfig;print(sysconfig.PREFIX);");}
void run() {Py_Initialize();bla();Py_Finalize();}
PyMODINIT_FUNC
"""
        if sys.version_info[0] == 2:  # Python 2.6-2.7
            PYTEST_LIB_CODE += """
initpytest42(void) {
    Py_InitModule3("pytest42", NULL, "doc");
    import_array();
    bla();
}
"""
        else:  # Python 3.x
            PYTEST_LIB_CODE += """
PyInit_pytest42(void) {
    static PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, "pytest42", "doc", -1, NULL};
    PyObject* mod = PyModule_Create(&moduledef);
    import_array1(mod);
    bla();
    return mod;
}
"""
        # test code for a program that loads this shared library
        PYTEST_EXE_CODE = 'extern void run();int main(){run();}\n'
        PYTEST_LIB_NAME = './pytest42.so'
        PYTEST_EXE_NAME = './pytest42.exe'
        # try compiling the test shared library
        if not runCompiler(code=PYTEST_LIB_CODE, \
            flags=' '.join([PYTHON_INC, NUMPY_INC, '-shared', '-fPIC'] + PYTHON_LIB), \
            dest=PYTEST_LIB_NAME):
            return False  # the program couldn't be compiled at all (try the next variant)
        # if succeeded, compile the test program that uses this library
        if not runCompiler(code=PYTEST_EXE_CODE, flags=PYTEST_LIB_NAME, dest=PYTEST_EXE_NAME) \
            or not os.path.isfile(PYTEST_LIB_NAME) or not os.path.isfile(PYTEST_EXE_NAME):
            return False  # can't find compiled test program
        resultexe = runProgram(PYTEST_EXE_NAME).rstrip()
        # also try loading this shared library as an extension module
        proc = subprocess.Popen(sys.executable+" -c 'import pytest42'", \
            stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
        resultpy = proc.communicate()[0].decode().rstrip()
        # clean up
        os.remove(PYTEST_EXE_NAME)
        os.remove(PYTEST_LIB_NAME)
        # check if the results (reported library path prefix) are the same as we have in this script
        if resultexe != sysconfig.PREFIX or resultpy != sysconfig.PREFIX:
            # return a warning, the user may still wish to continue
            return "Test program doesn't seem to use the same version of Python, "+\
                "or the library path is reported incorrectly: \n"+\
                "Expected: "+sysconfig.PREFIX+"\n"+\
                "Received: "+resultexe+"\n"+\
                "From py:  "+resultpy+"\n"+\
                "Should we continue the build (things may go wrong at a later stage)? [Y/N] "
        return True  # this combination of options seems reasonable...

    # explore various possible combinations of file name and path to the python library...
    def findPythonLib():
        PLANB_LIB = None
        # try linking against the static python library libpython**.a, if this does not succeed,
        # try the shared library libpython**.so**
        LIBNAMES = ['LIBRARY', 'LDLIBRARY', 'INSTSONAME']
        for PYTHON_LIB_FILE in [sysconfig.get_config_var(x) for x in LIBNAMES]:
            for PYTHON_LIB_PATH in [
                    sysconfig.get_config_var(x) for x in ['LIBPL', 'LIBDIR']
            ]:
                PYTHON_LIB_FILEPATH = os.path.join(PYTHON_LIB_PATH,
                                                   PYTHON_LIB_FILE)
                if os.path.isfile(PYTHON_LIB_FILEPATH):
                    # other libraries depend on whether this is a static or a shared python library
                    PYTHON_LIB = [PYTHON_LIB_FILEPATH] + PYTHON_LIB_EXTRA
                    if PYTHON_LIB_FILE.endswith(
                            '.a'
                    ) and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
                        PYTHON_LIB += sysconfig.get_config_var(
                            'LINKFORSHARED').split()
                    # the stack_size flag is problematic and needs to be removed
                    PYTHON_LIB = [
                        x for x in PYTHON_LIB
                        if not x.startswith('-Wl,-stack_size,')
                    ]
                    result = tryPythonCode(PYTHON_LIB)
                    if result is True:
                        return PYTHON_LIB  # successful compilation
                    if result:  # not True, but a warning string
                        # test compiled, but with a version mismatch warning, store it as a backup option
                        PLANB_LIB = PYTHON_LIB
                        PLANB_ASK = result
        if not PLANB_LIB is None and ask(
                PLANB_ASK
        ):  # the user wants to continue with the backup option
            return PLANB_LIB
        # if none of the above combinations worked, give up...
        raise CompileError(
            "Could not compile test program which uses libpython" +
            sysconfig.get_config_var('VERSION'))

    # [2c]: find the python library and other relevant linking flags
    PYTHON_LIB = findPythonLib()
    COMPILE_FLAGS += ['-DHAVE_PYTHON', PYTHON_INC, NUMPY_INC]
    LINK_FLAGS += PYTHON_LIB

    # [3]: check that GSL is present, and find out its version (required)
    # try compiling a snippet of code into a shared library (tests if GSL has been compiled with -fPIC)
    GSL_CODE = """#include <gsl/gsl_version.h>
    #if not defined(GSL_MAJOR_VERSION) || (GSL_MAJOR_VERSION == 1) && (GSL_MINOR_VERSION < 15)
    #error "GSL version is too old (need at least 1.15)"
    #endif
    void dummy(){}
    """
    if runCompiler(code=GSL_CODE, flags='-fPIC -lgsl -lgslcblas -shared'):
        # apparently the headers and libraries can be found in some standard location,
        LINK_FLAGS += ['-lgsl', '-lgslcblas']  # so we only list their names
    else:
        if not ask("GSL library (required) is not found\n" +
                   "Should we try to download and compile it now? [Y/N] "):
            exit(1)
        distutils.dir_util.mkpath(EXTRAS_DIR)
        os.chdir(EXTRAS_DIR)
        say('Downloading GSL\n')
        filename = 'gsl.tar.gz'
        dirname = 'gsl-2.4'
        try:
            urlretrieve('ftp://ftp.gnu.org/gnu/gsl/gsl-2.4.tar.gz', filename)
            if os.path.isfile(filename):
                subprocess.call(['tar', '-zxf',
                                 filename])  # unpack the archive
                os.remove(filename)  # remove the downloaded archive
                if not os.path.isdir(dirname):
                    raise Exception("Error unpacking GSL")
        except Exception as e:
            raise CompileError(
                str(e) + "\nError downloading GSL library, aborting...\n" +
                "You may try to manually compile GSL and install it to " +
                ROOT_DIR + "/" + EXTRAS_DIR + ", so that " +
                "the header files are in " + EXTRAS_DIR +
                "/include and library files - in " + EXTRAS_DIR + "/lib")
        say('Compiling GSL (may take a few minutes)\n')
        result = subprocess.call(
            '(cd ' + dirname + '; ./configure --prefix=' + os.getcwd() +
            ' CFLAGS="-fPIC -O2" --enable-shared=no; make; make install) > gsl-install.log',
            shell=True)
        if result != 0 or not os.path.isfile('lib/libgsl.a'):
            raise CompileError("GSL compilation failed (check " + EXTRAS_DIR +
                               "/gsl-install.log)")
        distutils.dir_util.remove_tree(
            dirname)  # clean up source and build directories
        COMPILE_FLAGS += ['-I' + EXTRAS_DIR + '/include']
        LINK_FLAGS += [
            EXTRAS_DIR + '/lib/libgsl.a', EXTRAS_DIR + '/lib/libgslcblas.a'
        ]
        os.chdir(ROOT_DIR)

    # [4]: test if Eigen library is present (optional)
    if runCompiler(code='#include <Eigen/Core>\nint main(){}\n'):
        COMPILE_FLAGS += ['-DHAVE_EIGEN']
    else:
        if ask("Eigen library (recommended) is not found\n" +
               "Should we try to download it now (no compilation needed)? [Y/N] "
               ):
            distutils.dir_util.mkpath(EXTRAS_DIR + '/include/unsupported')
            os.chdir(EXTRAS_DIR)
            say('Downloading Eigen\n')
            filename = 'Eigen.zip'
            dirname = 'eigen-git-mirror-3.3.4'
            try:
                urlretrieve(
                    'https://github.com/eigenteam/eigen-git-mirror/archive/3.3.4.zip',
                    filename)
                if os.path.isfile(filename):
                    subprocess.call('unzip ' + filename + ' >/dev/null',
                                    shell=True)  # unpack the archive
                    if os.path.isdir(dirname):
                        distutils.dir_util.copy_tree(dirname + '/Eigen',
                                                     'include/Eigen',
                                                     verbose=False)
                        distutils.dir_util.copy_tree(
                            dirname + '/unsupported/Eigen',
                            'include/unsupported/Eigen',
                            verbose=False)  # copy the headers
                        distutils.dir_util.remove_tree(
                            dirname)  # and delete the rest
                        COMPILE_FLAGS += [
                            '-DHAVE_EIGEN', '-I' + EXTRAS_DIR + '/include'
                        ]
                    os.remove(filename)  # remove the downloaded archive
            except:
                pass  # didn't succeed with Eigen
            os.chdir(ROOT_DIR)

    # [5a]: test if CVXOPT is present (optional)
    try:
        import cvxopt  # import the python module
    except:  # import error or some other problem, might be corrected
        if ask("CVXOPT library (needed only for Schwarzschild modelling) is not found\n"
               "Should we try to install it now? [Y/N] "):
            try:
                import pip
                pip.main(['install', '--user', 'cvxopt'])
            except Exception as e:
                say("Failed to install CVXOPT: " + str(e) + "\n")

    # [5b]: if the cvxopt module is available in Python, make sure that we also have C header files
    try:
        import cvxopt  # if this fails, skip cvxopt altogether
        if runCompiler(code='#include <cvxopt.h>\nint main(){}\n',
                       flags=' '.join(['-c', PYTHON_INC, NUMPY_INC])):
            COMPILE_FLAGS += ['-DHAVE_CVXOPT']
        else:
            # download the C header file if it does not appear to be present in a default location
            distutils.dir_util.mkpath(EXTRAS_DIR + '/include')
            say('Downloading CVXOPT header files\n')
            try:
                urlretrieve(
                    'https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/cvxopt.h',
                    EXTRAS_DIR + '/include/cvxopt.h')
                urlretrieve(
                    'https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/blas_redefines.h',
                    EXTRAS_DIR + '/include/blas_redefines.h')
            except:
                pass  # problems in downloading, skip it
            if  os.path.isfile(EXTRAS_DIR+'/include/cvxopt.h') and \
                os.path.isfile(EXTRAS_DIR+'/include/blas_redefines.h'):
                COMPILE_FLAGS += [
                    '-DHAVE_CVXOPT', '-I' + EXTRAS_DIR + '/include'
                ]
            else:
                say("Failed to download CVXOPT header files, this feature will not be available\n"
                    )
    except:
        pass  # cvxopt wasn't available

    # [6]: test if GLPK is present (optional)
    if runCompiler(code='#include <glpk.h>\nint main(){}\n', flags='-lglpk'):
        COMPILE_FLAGS += ['-DHAVE_GLPK']
        LINK_FLAGS += ['-lglpk']
    else:
        say("GLPK library (optional) is not found\n")

    # [7]: test if UNSIO is present (optional)
    if runCompiler(code='#include <uns.h>\nint main(){}\n',
                   flags='-lunsio -lnemo'):
        COMPILE_FLAGS += ['-DHAVE_UNSIO']
        LINK_FLAGS += ['-lunsio', '-lnemo']
    else:
        say("UNSIO library (optional) is not found\n")

    # [99]: put everything togeter and create Makefile.local
    with open('Makefile.local', 'w') as f:
        f.write(
            "# set the default compiler if no value is found in the environment variables or among command-line arguments\n"
            + "ifeq ($(origin CXX),default)\nCXX = g++\nendif\n" +
            "ifeq ($(origin FC), default)\nFC  = gfortran\nendif\nLINK = $(CXX)\n"
            +
            "# compilation/linking flags for both the shared library and any programs that use it\n"
            + "CXXFLAGS      += " + " ".join(compressList(CXXFLAGS)) + "\n" +
            "# compilation flags for the shared library only (files in src/)\n"
            + "COMPILE_FLAGS += " + " ".join(compressList(COMPILE_FLAGS)) +
            "\n" + "# linking flags for the shared library only\n" +
            "LINK_FLAGS    += " + " ".join(compressList(LINK_FLAGS)) + "\n")
Beispiel #14
0
from distutils.errors import CompileError

from distutils.command.build_py import build_py as BuildPyCommand
from distutils.command.build_ext import build_ext as BuildExtCommand
from distutils.command.clean import clean as CleanCommand
from distutils.cmd import Command
import distutils.extension
import subprocess
import os.path
import os
import fnmatch

try:
    subprocess.check_call(['scons'], shell=True)
except subprocess.CalledProcessError:
    raise CompileError("Error while building Python Extensions")


def remove_head_directories(path, heads=1):
    def explode_path(path):
        head, tail = os.path.split(path)
        return explode_path(head) + [tail] \
            if head and head != path else [head or tail]

    exploded_path = explode_path(path)
    if len(exploded_path) < (heads + 1):
        return ''
    else:
        return os.path.join(*exploded_path[heads:])

Beispiel #15
0
def check_openmp_support():
    """Check whether OpenMP test code can be compiled and run"""
    if "PYODIDE_PACKAGE_ABI" in os.environ:
        # Pyodide doesn't support OpenMP
        return False
    code = textwrap.dedent("""\
        #include <omp.h>
        #include <stdio.h>
        int main(void) {
        #pragma omp parallel
        printf("nthreads=%d\\n", omp_get_num_threads());
        return 0;
        }
        """)

    extra_preargs = os.getenv('LDFLAGS', None)
    if extra_preargs is not None:
        extra_preargs = extra_preargs.strip().split(" ")
        extra_preargs = [
            flag for flag in extra_preargs
            if flag.startswith(('-L', '-Wl,-rpath', '-l'))
        ]

    extra_postargs = get_openmp_flag

    try:
        output = compile_test_program(code,
                                      extra_preargs=extra_preargs,
                                      extra_postargs=extra_postargs)

        if output and 'nthreads=' in output[0]:
            nthreads = int(output[0].strip().split('=')[1])
            openmp_supported = len(output) == nthreads
        elif "PYTHON_CROSSENV" in os.environ:
            # Since we can't run the test program when cross-compiling
            # assume that openmp is supported if the program can be
            # compiled.
            openmp_supported = True
        else:
            openmp_supported = False

    except (CompileError, LinkError, subprocess.CalledProcessError):
        openmp_supported = False

    if not openmp_supported:
        if os.getenv("SKLEARN_FAIL_NO_OPENMP"):
            raise CompileError("Failed to build with OpenMP")
        else:
            message = textwrap.dedent("""

                                ***********
                                * WARNING *
                                ***********

                It seems that scikit-learn cannot be built with OpenMP.

                - Make sure you have followed the installation instructions:

                    https://scikit-learn.org/dev/developers/advanced_installation.html

                - If your compiler supports OpenMP but you still see this
                  message, please submit a bug report at:

                    https://github.com/scikit-learn/scikit-learn/issues

                - The build will continue with OpenMP-based parallelism
                  disabled. Note however that some estimators will run in
                  sequential mode instead of leveraging thread-based
                  parallelism.

                                    ***
                """)
            warnings.warn(message)

    return openmp_supported
Beispiel #16
0
    def run(self):
        if 'sdist' in self.distribution.commands:
            return

        if not self.extensions:
            return

        all_optional = all(ext.optional for ext in self.extensions)
        try:
            version = get_rust_version()
        except DistutilsPlatformError as e:
            if not all_optional:
                raise
            else:
                print(str(e))
                return
        if version not in MIN_VERSION:
            print('Rust version mismatch: required rust%s got rust%s' %
                  (MIN_VERSION, version))
            return

        # Make sure that if pythonXX-sys is used, it builds against the current
        # executing python interpreter.
        bindir = os.path.dirname(sys.executable)

        env = os.environ.copy()
        env.update({
            # disables rust's pkg-config seeking for specified packages,
            # which causes pythonXX-sys to fall back to detecting the
            # interpreter from the path.
            "PYTHON_2.7_NO_PKG_CONFIG": "1",
            "PATH": bindir + os.pathsep + os.environ.get("PATH", "")
        })

        for ext in self.extensions:
            try:
                if not os.path.exists(ext.path):
                    raise DistutilsFileError(
                        "Can not file rust extension project file: %s" %
                        ext.path)

                features = set(ext.features)
                features.update(cpython_feature(binding=ext.binding))

                # check cargo command
                feature_args = ["--features", " ".join(features)
                                ] if features else []
                args = (["cargo", "check", "--manifest-path", ext.path] +
                        feature_args + list(ext.args or []))

                # Execute cargo command
                try:
                    subprocess.check_output(args)
                except subprocess.CalledProcessError as e:
                    raise CompileError(
                        "cargo failed with code: %d\n%s" %
                        (e.returncode, e.output.decode("utf-8")))
                except OSError:
                    raise DistutilsExecError(
                        "Unable to execute 'cargo' - this package "
                        "requires rust to be installed and "
                        "cargo to be on the PATH")
                else:
                    print("Extension '%s' checked" % ext.name)
            except (DistutilsFileError, DistutilsExecError, CompileError) as e:
                if not ext.optional:
                    raise
                else:
                    print('Check optional Rust extension %s failed.' %
                          ext.name)
                    print(str(e))
Beispiel #17
0
    def build_extension(self, ext):
        executable = ext.binding == Binding.Exec

        rust_target_info = get_rust_target_info()

        # Make sure that if pythonXX-sys is used, it builds against the current
        # executing python interpreter.
        bindir = os.path.dirname(sys.executable)

        env = os.environ.copy()
        env.update({
            # disables rust's pkg-config seeking for specified packages,
            # which causes pythonXX-sys to fall back to detecting the
            # interpreter from the path.
            "PATH": os.path.join(bindir, os.environ.get("PATH", "")),
            "PYTHON_SYS_EXECUTABLE": sys.executable,
            "PYO3_PYTHON": sys.executable,
        })
        rustflags = ""

        # If we are on a 64-bit machine, but running a 32-bit Python, then
        # we'll target a 32-bit Rust build.
        # TODO: include --target for all platforms so env vars can't break the build
        target_triple = None
        target_args = []
        if platform.machine() == "AMD64" and self.plat_name == "win32":
            target_triple = "i686-pc-windows-msvc"
            target_args = ["--target", target_triple]

        # Find where to put the temporary build files created by `cargo`
        metadata_command = [
            "cargo",
            "metadata",
            "--manifest-path",
            ext.path,
            "--format-version",
            "1",
        ]
        # The decoding is needed for python 3.5 compatibility
        metadata = json.loads(check_output(metadata_command).decode("utf-8"))
        target_dir = metadata["target_directory"]

        if not os.path.exists(ext.path):
            raise DistutilsFileError(
                "Can not find rust extension project file: %s" % ext.path)

        features = set(ext.features)
        features.update(cpython_feature(binding=ext.binding))

        debug_build = ext.debug if ext.debug is not None else self.inplace
        debug_build = self.debug if self.debug is not None else debug_build
        if self.release:
            debug_build = False

        quiet = self.qbuild or ext.quiet

        # build cargo command
        feature_args = ["--features", " ".join(features)] if features else []

        if executable:
            args = (["cargo", "build", "--manifest-path", ext.path] +
                    feature_args + target_args + list(ext.args or []))
            if not debug_build:
                args.append("--release")
            if quiet:
                args.append("-q")
            elif self.verbose:
                args.append("--verbose")

        else:
            args = (["cargo", "rustc", "--lib", "--manifest-path", ext.path] +
                    feature_args + target_args + list(ext.args or []))
            if not debug_build:
                args.append("--release")
            if quiet:
                args.append("-q")
            elif self.verbose:
                args.append("--verbose")

            args.extend(["--", "--crate-type", "cdylib"])
            args.extend(ext.rustc_flags or [])

            # OSX requires special linker argument
            if sys.platform == "darwin":
                args.extend([
                    "-C", "link-arg=-undefined", "-C",
                    "link-arg=dynamic_lookup"
                ])
            # Tell musl targets not to statically link libc. See
            # https://github.com/rust-lang/rust/issues/59302 for details.
            if b'target_env="musl"' in rust_target_info:
                rustflags += " -C target-feature=-crt-static"

        if not quiet:
            print(" ".join(args), file=sys.stderr)

        if ext.native:
            rustflags += " -C target-cpu=native"

        if rustflags:
            env["RUSTFLAGS"] = rustflags

        # Execute cargo
        try:
            output = subprocess.check_output(args, env=env)
        except subprocess.CalledProcessError as e:
            output = e.output
            if isinstance(output, bytes):
                output = e.output.decode("latin-1").strip()
            raise CompileError("cargo failed with code: %d\n%s" %
                               (e.returncode, output))

        except OSError:
            raise DistutilsExecError(
                "Unable to execute 'cargo' - this package "
                "requires rust to be installed and cargo to be on the PATH")

        if not quiet:
            if isinstance(output, bytes):
                output = output.decode("latin-1")
            if output:
                print(output, file=sys.stderr)

        # Find the shared library that cargo hopefully produced and copy
        # it into the build directory as if it were produced by build_ext.
        if debug_build:
            suffix = "debug"
        else:
            suffix = "release"

        # location of cargo compiled files
        artifactsdir = os.path.join(target_dir, target_triple or "", suffix)
        dylib_paths = []

        if executable:
            for name, dest in ext.target.items():
                if name:
                    path = os.path.join(artifactsdir, name)
                    if os.access(path, os.X_OK):
                        dylib_paths.append((dest, path))
                        continue
                    else:
                        raise DistutilsExecError(
                            "rust build failed; "
                            'unable to find executable "%s" in %s' %
                            (name, target_dir))
                else:
                    # search executable
                    for name in os.listdir(artifactsdir):
                        path = os.path.join(artifactsdir, name)
                        if name.startswith(".") or not os.path.isfile(path):
                            continue

                        if os.access(path, os.X_OK):
                            dylib_paths.append((ext.name, path))
                            break

            if not dylib_paths:
                raise DistutilsExecError(
                    "rust build failed; unable to find executable in %s" %
                    target_dir)
        else:
            if sys.platform == "win32" or sys.platform == "cygwin":
                dylib_ext = "dll"
            elif sys.platform == "darwin":
                dylib_ext = "dylib"
            else:
                dylib_ext = "so"

            wildcard_so = "*{}.{}".format(ext.get_lib_name(), dylib_ext)

            try:
                dylib_paths.append((
                    ext.name,
                    next(glob.iglob(os.path.join(artifactsdir, wildcard_so))),
                ))
            except StopIteration:
                raise DistutilsExecError(
                    "rust build failed; unable to find any %s in %s" %
                    (wildcard_so, artifactsdir))

        # Ask build_ext where the shared library would go if it had built it,
        # then copy it there.
        build_ext = self.get_finalized_command("build_ext")
        build_ext.inplace = self.inplace

        for target_fname, dylib_path in dylib_paths:
            if not target_fname:
                target_fname = os.path.basename(
                    os.path.splitext(os.path.basename(dylib_path)[3:])[0])

            if executable:
                ext_path = build_ext.get_ext_fullpath(target_fname)
                # remove .so extension
                ext_path, _ = os.path.splitext(ext_path)
                # remove python3 extension (i.e. cpython-36m)
                ext_path, _ = os.path.splitext(ext_path)

                ext.install_script(ext_path)
            else:
                # Technically it's supposed to contain a
                # `setuptools.Extension`, but in practice the only attribute it
                # checks is `ext.py_limited_api`.
                modpath = target_fname.split('.')[-1]
                assert modpath not in build_ext.ext_map
                build_ext.ext_map[modpath] = ext
                try:
                    ext_path = build_ext.get_ext_fullpath(target_fname)
                finally:
                    del build_ext.ext_map[modpath]

            try:
                os.makedirs(os.path.dirname(ext_path))
            except OSError:
                pass

            shutil.copyfile(dylib_path, ext_path)

            if sys.platform != "win32" and not debug_build:
                args = []
                if ext.strip == Strip.All:
                    args.append("-x")
                elif ext.strip == Strip.Debug:
                    args.append("-S")

                if args:
                    args.insert(0, "strip")
                    args.append(ext_path)
                    try:
                        output = subprocess.check_output(args, env=env)
                    except subprocess.CalledProcessError as e:
                        pass

            # executables and win32(cygwin)-dll's need X bits
            if executable or sys.platform == "win32" or sys.platform == "cygwin":
                mode = os.stat(ext_path).st_mode
                mode |= (mode & 0o444) >> 2  # copy R bits to X
                os.chmod(ext_path, mode)
Beispiel #18
0
    def compile(self, sources,
                output_dir=None, macros=None, include_dirs=None, debug=0,
                extra_preargs=None, extra_postargs=None, depends=None):

        if not self.initialized:
            self.initialize()
        compile_info = self._setup_compile(output_dir, macros, include_dirs,
                                           sources, depends, extra_postargs)
        macros, objects, extra_postargs, pp_opts, build = compile_info

        compile_opts = extra_preargs or []
        compile_opts.append('/c')
        if debug:
            compile_opts.extend(self.compile_options_debug)
        else:
            compile_opts.extend(self.compile_options)


        add_cpp_opts = False

        for obj in objects:
            try:
                src, ext = build[obj]
            except KeyError:
                continue
            if debug:
                # pass the full pathname to MSVC in debug mode,
                # this allows the debugger to find the source file
                # without asking the user to browse for it
                src = os.path.abspath(src)

            # Anaconda/conda-forge customisation, we want our pdbs to be
            # relocatable:
            # https://developercommunity.visualstudio.com/comments/623156/view.html
            d1trimfile_opts = []
            if 'SRC_DIR' in os.environ:
                d1trimfile_opts.append("/d1trimfile:" + os.environ['SRC_DIR'])

            if ext in self._c_extensions:
                input_opt = "/Tc" + src
            elif ext in self._cpp_extensions:
                input_opt = "/Tp" + src
                add_cpp_opts = True
            elif ext in self._rc_extensions:
                # compile .RC to .RES file
                input_opt = src
                output_opt = "/fo" + obj
                try:
                    self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            elif ext in self._mc_extensions:
                # Compile .MC to .RC file to .RES file.
                #   * '-h dir' specifies the directory for the
                #     generated include file
                #   * '-r dir' specifies the target directory of the
                #     generated RC file and the binary message resource
                #     it includes
                #
                # For now (since there are no options to change this),
                # we use the source-directory for the include file and
                # the build directory for the RC file and message
                # resources. This works at least for win32all.
                h_dir = os.path.dirname(src)
                rc_dir = os.path.dirname(obj)
                try:
                    # first compile .MC to .RC and .H file
                    self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
                    base, _ = os.path.splitext(os.path.basename (src))
                    rc_file = os.path.join(rc_dir, base + '.rc')
                    # then compile .RC to .RES file
                    self.spawn([self.rc, "/fo" + obj, rc_file])

                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            else:
                # how to handle this file?
                raise CompileError("Don't know how to compile {} to {}"
                                   .format(src, obj))

            args = [self.cc] + compile_opts + pp_opts + d1trimfile_opts
            if add_cpp_opts:
                args.append('/EHsc')
            args.append(input_opt)
            args.append("/Fo" + obj)
            args.extend(extra_postargs)

            try:
                self.spawn(args)
            except DistutilsExecError as msg:
                raise CompileError(msg)

        return objects
Beispiel #19
0
    def __init__(self,
                 sources,
                 out,
                 flags=None,
                 run_linker=True,
                 compiler=None,
                 cwd='.',
                 include_dirs=None,
                 libraries=None,
                 library_dirs=None,
                 std=None,
                 define=None,
                 undef=None,
                 strict_aliasing=None,
                 preferred_vendor=None,
                 **kwargs):
        if isinstance(sources, str):
            raise ValueError(
                "Expected argument sources to be a list of strings.")
        self.sources = list(sources)
        self.out = out
        self.flags = flags or []
        self.cwd = cwd
        if compiler:
            self.compiler_name, self.compiler_binary = compiler
        else:
            # Find a compiler
            if preferred_vendor is None:
                preferred_vendor = os.environ.get('SYMPY_COMPILER_VENDOR',
                                                  None)
            self.compiler_name, self.compiler_binary, self.compiler_vendor = self.find_compiler(
                preferred_vendor)
            if self.compiler_binary is None:
                raise ValueError("No compiler found (searched: {})".format(
                    ', '.join(self.compiler_dict.values())))
        self.define = define or []
        self.undef = undef or []
        self.include_dirs = include_dirs or []
        self.libraries = libraries or []
        self.library_dirs = library_dirs or []
        self.std = std or self.standards[0]
        self.run_linker = run_linker
        if self.run_linker:
            # both gnu and intel compilers use '-c' for disabling linker
            self.flags = list(filter(lambda x: x != '-c', self.flags))
        else:
            if '-c' not in self.flags:
                self.flags.append('-c')

        if self.std:
            self.flags.append(self.std_formater[self.compiler_name](self.std))

        self.linkline = []

        if strict_aliasing is not None:
            nsa_re = re.compile("no-strict-aliasing$")
            sa_re = re.compile("strict-aliasing$")
            if strict_aliasing is True:
                if any(map(nsa_re.match, flags)):
                    raise CompileError(
                        "Strict aliasing cannot be both enforced and disabled")
                elif any(map(sa_re.match, flags)):
                    pass  # already enforced
                else:
                    flags.append('-fstrict-aliasing')
            elif strict_aliasing is False:
                if any(map(nsa_re.match, flags)):
                    pass  # already disabled
                else:
                    if any(map(sa_re.match, flags)):
                        raise CompileError(
                            "Strict aliasing cannot be both enforced and disabled"
                        )
                    else:
                        flags.append('-fno-strict-aliasing')
            else:
                msg = "Expected argument strict_aliasing to be True/False, got {}"
                raise ValueError(msg.format(strict_aliasing))
Beispiel #20
0
def check_openmp_support():
    """Check whether OpenMP test code can be compiled and run"""
    code = textwrap.dedent("""\
        #include <omp.h>
        #include <stdio.h>
        int main(void) {
        #pragma omp parallel
        printf("nthreads=%d\\n", omp_get_num_threads());
        return 0;
        }
        """)

    extra_preargs = os.getenv("LDFLAGS", None)
    if extra_preargs is not None:
        extra_preargs = extra_preargs.strip().split(" ")
        extra_preargs = [
            flag for flag in extra_preargs
            if flag.startswith(("-L", "-Wl,-rpath", "-l"))
        ]

    extra_postargs = get_openmp_flag

    try:
        output = compile_test_program(code,
                                      extra_preargs=extra_preargs,
                                      extra_postargs=extra_postargs)

        if "nthreads=" in output[0]:
            nthreads = int(output[0].strip().split("=")[1])
            openmp_supported = len(output) == nthreads
        else:
            openmp_supported = False

    except (CompileError, LinkError, subprocess.CalledProcessError):
        openmp_supported = False

    if not openmp_supported:
        if os.getenv("NNETSAUCE_FAIL_NO_OPENMP"):
            raise CompileError("Failed to build with OpenMP")
        else:
            message = textwrap.dedent("""

                                ***********
                                * WARNING *
                                ***********

                It seems that nnetsauce cannot be built with OpenMP.

                - Make sure you have followed the installing instructions:

                    https://github.com/thierrymoudiki/nnetsauce 

                - If your compiler supports OpenMP but you still see this
                  message, please submit a bug report at:

                    https://github.com/thierrymoudiki/nnetsauce/issues

                - The build will continue with OpenMP-based parallelism
                  disabled. Note however that some estimators will run in
                  sequential mode instead of leveraging thread-based
                  parallelism.

                                    ***
                """)
            warnings.warn(message)

    return openmp_supported
Beispiel #21
0
    def __init__(self, compiler):
        log.debug("Compiler include_dirs: %s" % compiler.include_dirs)
        if hasattr(compiler, "initialize"):
            compiler.initialize()  # to set all variables
            log.debug("Compiler include_dirs after initialize: %s" %
                      compiler.include_dirs)
        self.compiler = compiler

        log.debug(
            sys.version)  # contains the compiler used to build this python

        # members with the info for the outside world
        self.include_dirs = get_include_dirs()
        self.objects = []
        self.libraries = []
        self.library_dirs = get_library_dirs()
        self.linker_flags = []
        self.compile_time_env = {}

        if self.compiler.compiler_type == 'msvc':
            if (sys.version_info.major, sys.version_info.minor) < (3, 3):
                # The check above is a nasty hack. We're using the python
                # version as a proxy for the MSVC version. 2008 doesn't
                # have stdint.h, so is needed. 2010 does.
                #
                # We need to add the path to msvc includes
                msvc_2008_path = (os.path.join(os.getcwd(), 'include',
                                               'msvc_2008'))
                self.include_dirs.append(msvc_2008_path)
            elif (sys.version_info.major, sys.version_info.minor) < (3, 5):
                # Actually, it seems that appveyor doesn't have a stdint that
                # works, so even for 2010 we use our own (hacked) version
                # of stdint.
                # This should be pretty safe in whatever case.
                msvc_2010_path = (os.path.join(os.getcwd(), 'include',
                                               'msvc_2010'))
                self.include_dirs.append(msvc_2010_path)

                # To avoid http://bugs.python.org/issue4431
                #
                # C:\Program Files\Microsoft
                # SDKs\Windows\v7.1\Bin\x64\mt.exe -nologo -manifest
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # -outputresource:C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe;1
                # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest
                # : general error c1010070: Failed to load and parse
                # the manifest. The system cannot find the file
                # specified.
                self.compiler.ldflags_shared.append('/MANIFEST')

        if get_platform().startswith('linux'):
            # needed at least libm for linker checks to succeed
            self.libraries.append('m')

        # main fftw3 header is required
        if not self.has_header(['fftw3.h'], include_dirs=self.include_dirs):
            raise CompileError("Could not find the FFTW header 'fftw3.h'")

        # mpi is optional
        # self.support_mpi = self.has_header(['mpi.h', 'fftw3-mpi.h'])
        # TODO enable check when wrappers are included in Pyfftw
        self.support_mpi = False

        if self.support_mpi:
            try:
                import mpi4py
                self.include_dirs.append(mpi4py.get_include())
            except ImportError:
                log.error(
                    "Could not import mpi4py. Skipping support for FFTW MPI.")
                self.support_mpi = False

        self.search_dependencies()
Beispiel #22
0
class CygwinCCompiler (UnixCCompiler):

    compiler_type = 'cygwin'
    obj_extension = ".o"
    static_lib_extension = ".a"
    shared_lib_extension = ".dll"
    dylib_lib_extension = ".dll.a"
    static_lib_format = "lib%s%s"
    shared_lib_format = "%s%s"
    exe_extension = ".exe"

    def __init__ (self, verbose=0, dry_run=0, force=0):

        UnixCCompiler.__init__ (self, verbose, dry_run, force)

        (status, details) = check_config_h()
        self.debug_print("Python's GCC status: %s (details: %s)" %
                         (status, details))
        if status is not CONFIG_H_OK:
            self.warn(
                "Python's pyconfig.h doesn't seem to support your compiler. "
                "Reason: %s. "
                "Compiling may fail because of undefined preprocessor macros."
                % details)

        self.gcc_version, self.ld_version, self.dllwrap_version = \
            get_versions()
        self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
                         (self.gcc_version,
                          self.ld_version,
                          self.dllwrap_version) )

        # ld_version >= "2.10.90" and < "2.13" should also be able to use
        # gcc -mdll instead of dllwrap
        # Older dllwraps had own version numbers, newer ones use the
        # same as the rest of binutils ( also ld )
        # dllwrap 2.10.90 is buggy
        if self.ld_version >= "2.10.90":
            self.linker_dll = "gcc"
        else:
            self.linker_dll = "dllwrap"

        # ld_version >= "2.13" support -shared so use it instead of
        # -mdll -static
        if self.ld_version >= "2.13":
            shared_option = "-shared"
        else:
            shared_option = "-mdll -static"

        # Hard-code GCC because that's what this is all about.
        # XXX optimization, warnings etc. should be customizable.
        self.set_executables(compiler='gcc -mcygwin -O -Wall',
                             compiler_so='gcc -mcygwin -mdll -O -Wall',
                             compiler_cxx='g++ -mcygwin -O -Wall',
                             linker_exe='gcc -mcygwin',
                             linker_so=('%s -mcygwin %s' %
                                        (self.linker_dll, shared_option)))

        # cygwin and mingw32 need different sets of libraries
        if self.gcc_version == "2.91.57":
            # cygwin shouldn't need msvcrt, but without the dlls will crash
            # (gcc version 2.91.57) -- perhaps something about initialization
            self.dll_libraries=["msvcrt"]
            self.warn(
                "Consider upgrading to a newer version of gcc")
        else:
            # Include the appropriate MSVC runtime library if Python was built
            # with MSVC 7.0 or later.
            self.dll_libraries = get_msvcr()

    # __init__ ()


    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
        if ext == '.rc' or ext == '.res':
            # gcc needs '.res' and '.rc' compiled to object files !!!
            try:
                self.spawn(["windres", "-i", src, "-o", obj])
            except DistutilsExecError, msg:
                raise CompileError, msg
        elif ext == '.mc':
            # Adapted from msvc9compiler:
            #
            # Compile .MC to .RC file to .RES file.
            #   * '-h dir' specifies the directory for the generated include file
            #   * '-r dir' specifies the target directory of the generated RC file and the binary message resource it includes
            #
            # For now (since there are no options to change this),
            # we use the source-directory for the include file and
            # the build directory for the RC file and message
            # resources. This works at least for win32all.
            h_dir = os.path.dirname(src)
            rc_dir = os.path.dirname(obj)
            try:
                # first compile .MC to .RC and .H file
                self.spawn(['windmc'] + ['-h', h_dir, '-r', rc_dir] + [src])
                base, _ = os.path.splitext (os.path.basename (src))
                rc_file = os.path.join (rc_dir, base + '.rc')
                # then compile .RC to .RES file
                self.spawn(['windres', '-i', rc_file, '-o', obj])
            except DistutilsExecError, msg:
                raise CompileError(msg)
Beispiel #23
0
    def compile(sources,
                output_dir=None, macros=None, include_dirs=None, debug=0,
                extra_preargs=None, extra_postargs=None, depends=None):

        if not self.initialized:
            self.initialize()
        compile_info = self._setup_compile(output_dir, macros, include_dirs,
                                           sources, depends, extra_postargs)
        macros, objects, extra_postargs, pp_opts, build = compile_info

        compile_opts = extra_preargs or []
        compile_opts.append('/c')
        if debug:
            compile_opts.extend(self.compile_options_debug)
        else:
            compile_opts.extend(self.compile_options)

        add_cpp_opts = False

        for obj in objects:
            try:
                src, ext = build[obj]
            except KeyError:
                continue
            if debug:
                # pass the full pathname to MSVC in debug mode,
                # this allows the debugger to find the source file
                # without asking the user to browse for it
                src = os.path.abspath(src)

            if ext in self._c_extensions:
                input_opt = "/Tc" + src
            elif ext in self._cpp_extensions:
                input_opt = "/Tp" + src
                add_cpp_opts = True
            elif ext in self._rc_extensions:
                # compile .RC to .RES file
                input_opt = src
                output_opt = "/fo" + obj
                try:
                    self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            elif ext in self._mc_extensions:
                # Compile .MC to .RC file to .RES file.
                #   * '-h dir' specifies the directory for the
                #     generated include file
                #   * '-r dir' specifies the target directory of the
                #     generated RC file and the binary message resource
                #     it includes
                #
                # For now (since there are no options to change this),
                # we use the source-directory for the include file and
                # the build directory for the RC file and message
                # resources. This works at least for win32all.
                h_dir = os.path.dirname(src)
                rc_dir = os.path.dirname(obj)
                try:
                    # first compile .MC to .RC and .H file
                    self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
                    base, _ = os.path.splitext(os.path.basename(src))
                    rc_file = os.path.join(rc_dir, base + '.rc')
                    # then compile .RC to .RES file
                    self.spawn([self.rc, "/fo" + obj, rc_file])

                except DistutilsExecError as msg:
                    raise CompileError(msg)
                continue
            elif ext == '.cu':
                # a trigger for cu compile
                try:
                    # use the cuda for .cu files
                    # self.set_executable('compiler_so', CUDA['nvcc'])
                    # use only a subset of the extra_postargs, which are 1-1 translated
                    # from the extra_compile_args in the Extension class
                    postargs = extra_postargs['nvcc']
                    arg = [CUDA['nvcc']] + sources + ['-odir', pjoin(output_dir,'nms')]
                    #arg = [CUDA['nvcc']] + sources
                    for include_dir in include_dirs:
                        arg.append('-I')
                        arg.append(include_dir)
                    arg += ['-I', py_include]
                    # arg += ['-lib', CUDA['lib64']]
                    arg += ['-Xcompiler', '/EHsc,/W3,/nologo,/Ox,/MD']
                    arg += postargs
                    self.spawn(arg)
                    continue
                except DistutilsExecError as msg:
                    # raise CompileError(msg)
                    continue
            else:
                # how to handle this file?
                raise CompileError("Don't know how to compile {} to {}"
                                   .format(src, obj))

            args = [self.cc] + compile_opts + pp_opts
            if add_cpp_opts:
                args.append('/EHsc')
            args.append(input_opt)
            args.append("/Fo" + obj)
            args.extend(extra_postargs)

            try:
                self.spawn(args)
            except DistutilsExecError as msg:
                raise CompileError(msg)

        return objects
 def raise_compile_error(*args, **kwargs):
     raise CompileError("Prevented C extension compiling.")
Beispiel #25
0
def src2obj(srcpath,
            Runner=None,
            objpath=None,
            cwd=None,
            inc_py=False,
            **kwargs):
    """ Compiles a source code file to an object file.

    Files ending with '.pyx' assumed to be cython files and
    are dispatched to pyx2obj.

    Parameters
    ==========

    srcpath: str
        Path to source file.
    Runner: CompilerRunner subclass (optional)
        If ``None``: deduced from extension of srcpath.
    objpath : str (optional)
        Path to generated object. If ``None``: deduced from ``srcpath``.
    cwd: str (optional)
        Working directory and root of relative paths. If ``None``: current dir.
    inc_py: bool
        Add Python include path to kwarg "include_dirs". Default: False
    \\*\\*kwargs: dict
        keyword arguments passed to Runner or pyx2obj

    """
    name, ext = os.path.splitext(os.path.basename(srcpath))
    if objpath is None:
        if os.path.isabs(srcpath):
            objpath = '.'
        else:
            objpath = os.path.dirname(srcpath)
            objpath = objpath or '.'  # avoid objpath == ''

    if os.path.isdir(objpath):
        objpath = os.path.join(objpath, name + objext)

    include_dirs = kwargs.pop('include_dirs', [])
    if inc_py:
        from distutils.sysconfig import get_python_inc
        py_inc_dir = get_python_inc()
        if py_inc_dir not in include_dirs:
            include_dirs.append(py_inc_dir)

    if ext.lower() == '.pyx':
        return pyx2obj(srcpath,
                       objpath=objpath,
                       include_dirs=include_dirs,
                       cwd=cwd,
                       **kwargs)

    if Runner is None:
        Runner, std = extension_mapping[ext.lower()]
        if 'std' not in kwargs:
            kwargs['std'] = std

    flags = kwargs.pop('flags', [])
    needed_flags = ('-fPIC', )
    for flag in needed_flags:
        if flag not in flags:
            flags.append(flag)

    # src2obj implies not running the linker...
    run_linker = kwargs.pop('run_linker', False)
    if run_linker:
        raise CompileError("src2obj called with run_linker=True")

    runner = Runner([srcpath],
                    objpath,
                    include_dirs=include_dirs,
                    run_linker=run_linker,
                    cwd=cwd,
                    flags=flags,
                    **kwargs)
    runner.run()
    return objpath
Beispiel #26
0
        def _single_compile(obj):
            try:
                src, ext = build[obj]
            except KeyError:
                return

            add_cpp_opts = False

            if debug:
                # pass the full pathname to MSVC in debug mode,
                # this allows the debugger to find the source file
                # without asking the user to browse for it
                src = os.path.abspath(src)

            if ext in self._c_extensions:
                input_opt = "/Tc" + src
            elif ext in self._cpp_extensions:
                input_opt = "/Tp" + src
                add_cpp_opts = True
            elif ext in self._rc_extensions:
                # compile .RC to .RES file
                input_opt = src
                output_opt = "/fo" + obj
                try:
                    self.spawn([self.rc] + pp_opts +
                               [output_opt] + [input_opt])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
                return
            elif ext in self._mc_extensions:
                # Compile .MC to .RC file to .RES file.
                h_dir = os.path.dirname(src)
                rc_dir = os.path.dirname(obj)
                try:
                    # first compile .MC to .RC and .H file
                    self.spawn([self.mc] +
                               ['-h', h_dir, '-r', rc_dir] + [src])
                    base, _ = os.path.splitext (os.path.basename (src))
                    rc_file = os.path.join (rc_dir, base + '.rc')
                    # then compile .RC to .RES file
                    self.spawn([self.rc] +
                               ["/fo" + obj] + [rc_file])

                except DistutilsExecError as msg:
                    raise CompileError(msg)
                return
            else:
                # how to handle this file?
                raise CompileError("Don't know how to compile %s to %s"
                                   % (src, obj))

            args = [self.cc] + compile_opts + pp_opts
            if add_cpp_opts:
                args.append('/EHsc')
            args.append(input_opt)
            args.append("/Fo" + obj)
            args.extend(extra_postargs)

            try:
                msvc_spawn_and_write_d_file(obj, src, args, env, self.dry_run)
            except DistutilsExecError as msg:
                raise CompileError(msg)
Beispiel #27
0
 def compile(self,
             sources,
             output_dir=None,
             macros=None,
             include_dirs=None,
             debug=0,
             extra_preargs=None,
             extra_postargs=None,
             depends=None):
     if not self.initialized:
         self.initialize()
     compile_info = self._setup_compile(output_dir, macros, include_dirs,
                                        sources, depends, extra_postargs)
     macros, objects, extra_postargs, pp_opts, build = compile_info
     compile_opts = extra_preargs or []
     compile_opts.append('/c')
     if debug:
         compile_opts.extend(self.compile_options_debug)
     else:
         compile_opts.extend(self.compile_options)
     for obj in objects:
         try:
             src, ext = build[obj]
         except KeyError:
             continue
         if debug:
             src = os.path.abspath(src)
         if ext in self._c_extensions:
             input_opt = '/Tc' + src
         elif ext in self._cpp_extensions:
             input_opt = '/Tp' + src
         elif ext in self._rc_extensions:
             input_opt = src
             output_opt = '/fo' + obj
             try:
                 self.spawn([self.rc] + pp_opts + [output_opt] +
                            [input_opt])
             except DistutilsExecError as msg:
                 raise CompileError(msg)
             continue
         elif ext in self._mc_extensions:
             h_dir = os.path.dirname(src)
             rc_dir = os.path.dirname(obj)
             try:
                 self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
                 base, _ = os.path.splitext(os.path.basename(src))
                 rc_file = os.path.join(rc_dir, base + '.rc')
                 self.spawn([self.rc] + ['/fo' + obj] + [rc_file])
             except DistutilsExecError as msg:
                 raise CompileError(msg)
             continue
         else:
             raise CompileError("Don't know how to compile %s to %s" %
                                (src, obj))
         output_opt = '/Fo' + obj
         try:
             self.spawn([self.cc] + compile_opts + pp_opts +
                        [input_opt, output_opt] + extra_postargs)
         except DistutilsExecError as msg:
             raise CompileError(msg)
     return objects
Beispiel #28
0
    def compile_single(self, obj, build, debug, pp_opts, compile_opts,
                       add_cpp_opts, extra_postargs):
        try:
            src, ext = build[obj]
        except KeyError:
            return
        if debug:
            # pass the full pathname to MSVC in debug mode,
            # this allows the debugger to find the source file
            # without asking the user to browse for it
            src = os.path.abspath(src)

        if ext in self._c_extensions:
            input_opt = "/Tc" + src
        elif ext in self._cpp_extensions:
            input_opt = "/Tp" + src
            add_cpp_opts = True
        elif ext in self._rc_extensions:
            # compile .RC to .RES file
            input_opt = src
            output_opt = "/fo" + obj
            try:
                self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
            except DistutilsExecError as msg:
                raise CompileError(msg)
            return
        elif ext in self._mc_extensions:
            # Compile .MC to .RC file to .RES file.
            #   * '-h dir' specifies the directory for the
            #     generated include file
            #   * '-r dir' specifies the target directory of the
            #     generated RC file and the binary message resource
            #     it includes
            #
            # For now (since there are no options to change this),
            # we use the source-directory for the include file and
            # the build directory for the RC file and message
            # resources. This works at least for win32all.
            h_dir = os.path.dirname(src)
            rc_dir = os.path.dirname(obj)
            try:
                # first compile .MC to .RC and .H file
                self.spawn([self.mc, "-h", h_dir, "-r", rc_dir, src])
                base, _ = os.path.splitext(os.path.basename(src))
                rc_file = join(rc_dir, base + ".rc")
                # then compile .RC to .RES file
                self.spawn([self.rc, "/fo" + obj, rc_file])

            except DistutilsExecError as msg:
                raise CompileError(msg)
            return
        else:
            # how to handle this file?
            raise CompileError("Don't know how to compile {} to {}".format(
                src, obj))

        args = [self.cc] + compile_opts + pp_opts
        if add_cpp_opts:
            args.append("/EHsc")
        args.append(input_opt)
        args.append("/Fo" + obj)
        args.extend(extra_postargs)

        try:
            self.spawn(args)
        except DistutilsExecError as msg:
            raise CompileError(msg)
Beispiel #29
0
                        # first compile .MC to .RC and .H file
                        self.spawn ([self.mc] +
                                    ['-h', h_dir, '-r', rc_dir] + [src])
                        base, _ = os.path.splitext (os.path.basename (src))
                        rc_file = os.path.join (rc_dir, base + '.rc')
                        # then compile .RC to .RES file
                        self.spawn ([self.rc] +
                                    ["/fo" + obj] + [rc_file])

                    except DistutilsExecError, msg:
                        raise CompileError, msg
                    continue
                else:
                    # how to handle this file?
                    raise CompileError (
                        "Don't know how to compile %s to %s" % \
                        (src, obj))

                output_opt = "/Fo" + obj
                try:
                    self.spawn ([self.cc] + compile_opts + pp_opts +
                                [input_opt, output_opt] +
                                extra_postargs)
                except DistutilsExecError, msg:
                    raise CompileError, msg

        return objects

    # compile ()

Beispiel #30
0
def createMakefile():

    say("\n    ==== Checking supported compiler options and available libraries ====\n\n")
    LINK_FLAGS    = LDFLAGS.split()   # accumulate the linker flags that will be put to Makefile.local
    COMPILE_FLAGS = CFLAGS. split()   # same for the compilation of the shared library only
    # default compilation flags for both the shared library and all example programs that use it
    CXXFLAGS = ['-fPIC', '-Wall', '-O2']
    # additional compilation/linking flags for example programs
    EXE_FLAGS = []

    # [1a]: check if a compiler exists at all
    if not runCompiler():
        raise CompileError("Could not locate a compiler (set CXX=... environment variable to override)")

    # [1b]: test if OpenMP is supported (optional but highly recommended)
    OMP_FLAG = '-fopenmp'
    OMP_FLAGS= OMP_FLAG+' -Werror -Wno-unknown-pragmas'
    OMP_CODE = '#include <omp.h>\n#include <iostream>\nint main(){\nstd::cout << "Number of threads: ";\n'+\
        '#pragma omp parallel\n{std::cout<<\'*\';}\nstd::cout << "\\n";\n}\n'
    if runCompiler(code=OMP_CODE, flags=OMP_FLAGS):
        CXXFLAGS += [OMP_FLAG]
    else:
        # on MacOS the clang compiler pretends not to support OpenMP, but in fact it does so
        # if we insist (libomp.so/dylib or libgomp.so must be present in the system for this to work);
        # in some Anaconda installations, though, linking to the system-default libomp.dylib
        # leads to conflicts with libiomp5.dylib, so we first try to link to the latter explicitly.
        CONDA_EXE = os.environ.get('CONDA_EXE')
        if CONDA_EXE is not None and os.path.isfile(CONDA_EXE.replace('bin/conda', 'lib/libiomp5.dylib')) \
            and runCompiler(code=OMP_CODE, flags=CONDA_EXE.replace('bin/conda', 'lib/libiomp5.dylib') +
            ' -Xpreprocessor ' + OMP_FLAGS):
            CXXFLAGS   += ['-Xpreprocessor', OMP_FLAG]
            LINK_FLAGS += [CONDA_EXE.replace('bin/conda', 'lib/libiomp5.dylib')]
            EXE_FLAGS  += [CONDA_EXE.replace('bin/conda', 'lib/libiomp5.dylib')]
        elif runCompiler(code=OMP_CODE, flags='-lgomp -Xpreprocessor '+OMP_FLAGS):
            CXXFLAGS   += ['-Xpreprocessor', OMP_FLAG]
            LINK_FLAGS += ['-lgomp']
            EXE_FLAGS  += ['-lgomp']
        elif runCompiler(code=OMP_CODE, flags='-lomp -Xpreprocessor '+OMP_FLAGS):
            CXXFLAGS   += ['-Xpreprocessor', OMP_FLAG]
            LINK_FLAGS += ['-lomp']
            EXE_FLAGS  += ['-lomp']
        elif not ask("Warning, OpenMP is not supported\n"+
            "If you're compiling on MacOS with clang, you'd better install another compiler such as GCC\n"+
            "Do you want to continue without OpenMP? [Y/N] "): exit(1)

    # [1c]: test if C++11 is supported (optional)
    CXX11_FLAG = '-std=c++11'
    if runCompiler(flags=CXX11_FLAG):
        CXXFLAGS += [CXX11_FLAG]

    # [1d]: test the -march flag (optional, allows architecture-dependent compiler optimizations)
    ARCH_FLAG = '-march=native'
    ARCH_CODE = 'int main(int c, char** v) { double x=c*3.14; return x==42; }\n'
    if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG):
        CXXFLAGS += [ARCH_FLAG]
    else:
        ARCH_FLAG = '-march=core2'  # try a less ambitious option
        if runCompiler(code=ARCH_CODE, flags=ARCH_FLAG):
            CXXFLAGS += [ARCH_FLAG]

    # [1e]: special treatment for Intel compiler to restore determinism in OpenMP-parallelized loops
    INTEL_FLAG = '-qno-opt-dynamic-align'
    if runCompiler(code='#ifndef __INTEL_COMPILER\n#error\n#endif\nint main(){}\n', flags=INTEL_FLAG):
        CXXFLAGS += [INTEL_FLAG]

    # [2a]: check that NumPy is present (required by the python interface)
    try:
        import numpy
        NUMPY_INC = '-I'+numpy.get_include()
    except ImportError:
        raise CompileError("NumPy is not present - python extension cannot be compiled")

    # [2b]: find out the paths to Python.h and libpythonXX.{a,so,dylib,...} (this is rather tricky)
    # and all other relevant compilation/linking flags needed to build a shared library that uses Python
    PYTHON_INC = '-I'+sysconfig.get_python_inc()

    # various other system libraries that are needed at link time
    PYTHON_LIB_EXTRA = compressList(
        get_config_var('LIBS').split() +
        get_config_var('SYSLIBS').split())

    # try compiling a test code with the provided link flags (in particular, the name of Python library):
    # check that a sample C++ program with embedded python compiles, links and runs properly
    def tryPythonCode(PYTHON_SO_FLAGS, PYTHON_EXE_FLAGS=[]):
        print("    **** Trying the following options for linking against Python library ****")
        # test code for a shared library
        PYTEST_LIB_CODE = """
#include "Python.h"
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/arrayobject.h"
void bla() {PyRun_SimpleString("import sys;print(sys.prefix);");}
void run() {Py_Initialize();bla();Py_Finalize();}
PyMODINIT_FUNC
"""
        if sys.version_info[0]==2:  # Python 2.6-2.7
            PYTEST_LIB_CODE += """
initagamatest(void) {
    Py_InitModule3("agamatest", NULL, "doc");
    import_array();
    bla();
}
"""
        else:  # Python 3.x
            PYTEST_LIB_CODE += """
PyInit_agamatest(void) {
    static PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, "agamatest", "doc", -1, NULL};
    PyObject* mod = PyModule_Create(&moduledef);
    import_array1(mod);
    bla();
    return mod;
}
"""
        # test code for a program that loads this shared library
        PYTEST_EXE_CODE = 'extern void run();int main(){run();}\n'
        PYTEST_LIB_NAME = './agamatest.so'
        PYTEST_EXE_NAME = './agamatest.exe'
        # try compiling the test shared library
        if not runCompiler(code=PYTEST_LIB_CODE,
            flags=' '.join([PYTHON_INC, NUMPY_INC, '-shared', '-fPIC'] + PYTHON_SO_FLAGS),
            dest=PYTEST_LIB_NAME):
            return False  # the program couldn't be compiled at all (try the next variant)

        # if succeeded, compile the test program that uses this library
        if not runCompiler(code=PYTEST_EXE_CODE,
            flags=' '.join([PYTEST_LIB_NAME] + PYTHON_EXE_FLAGS),
            dest=PYTEST_EXE_NAME) \
            or not os.path.isfile(PYTEST_LIB_NAME) \
            or not os.path.isfile(PYTEST_EXE_NAME):
            return False  # can't find compiled test program
        resultexe = subprocess.Popen(PYTEST_EXE_NAME,
            stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().rstrip()
        # the test program might not be able to find the python home, in which case manually provide it
        if 'Could not find platform independent libraries <prefix>' in resultexe:
            resultexe = subprocess.Popen(PYTEST_EXE_NAME,
                env=dict(os.environ, PYTHONHOME=sys.prefix),
                stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].decode().rstrip()
        # also try loading this shared library as an extension module
        procpy = subprocess.Popen(sys.executable+" -c 'import agamatest'", shell=True, \
            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
        resultpy = procpy.communicate()[0].decode().rstrip()
        returnpy = procpy.returncode
        # clean up
        os.remove(PYTEST_EXE_NAME)
        os.remove(PYTEST_LIB_NAME)
        # check if the results (reported library path prefix) are the same as we have in this script
        sysprefix = os.path.realpath(sys.prefix)
        if os.path.realpath(resultexe) != sysprefix or os.path.realpath(resultpy) != sysprefix:
            print("Test program doesn't seem to use the same version of Python, "+\
                "or the library path is reported incorrectly: \n"+\
                "Expected: "+sysprefix+"\n"+\
                "Received: "+resultexe+"\n"+\
                "From py:  "+resultpy+('' if returnpy==0 else ' (crashed with error '+str(returnpy)+')'))
            return False
        print("    **** Successfully linked using these options ****")
        return True   # this combination of options seems reasonable...

    # explore various possible combinations of file name and path to the python library...
    def findPythonLib():
        # try linking against the static python library libpython**.a, if this does not succeed,
        # try the shared library libpython**.so** or libpython**.dylib
        for PYTHON_LIB_FILENAME in compressList([sysconfig.get_config_var(x) for x in ['LIBRARY', 'LDLIBRARY', 'INSTSONAME']]):
            for PYTHON_LIB_PATH in compressList([sysconfig.get_config_var(x) for x in ['LIBPL', 'LIBDIR']]):
                # obtain full path to the python library
                PYTHON_LIB_FILEPATH = os.path.join(PYTHON_LIB_PATH, PYTHON_LIB_FILENAME)
                # check if the file exists at all at the given location
                if os.path.isfile(PYTHON_LIB_FILEPATH):
                    # flags for compiling the shared library which serves as a Python extension module
                    PYTHON_SO_FLAGS = [PYTHON_LIB_FILEPATH] + PYTHON_LIB_EXTRA
                    # other libraries depend on whether this is a static or a shared python library
                    if PYTHON_LIB_FILENAME.endswith('.a') and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
                        PYTHON_SO_FLAGS += get_config_var('LINKFORSHARED').split()
                    # the stack_size flag is problematic and needs to be removed
                    PYTHON_SO_FLAGS = [x for x in PYTHON_SO_FLAGS if not x.startswith('-Wl,-stack_size,')]
                    if tryPythonCode(PYTHON_SO_FLAGS):
                        return PYTHON_SO_FLAGS, []   # successful compilation
                    elif not PYTHON_LIB_FILENAME.endswith('.a'):
                        # sometimes the python installation is so wrecked that the linker can find and use
                        # the shared library libpython***.so, but this library is not in LD_LIBRARY_PATH and
                        # cannot be found when loading the python extension module outside python itself.
                        # the (inelegant) fix is to hardcode the path to this libpython***.so as -rpath.
                        print("Trying rpath")
                        RPATH = ['-Wl,-rpath,'+PYTHON_LIB_PATH]  # extend the linker options and try again
                        if tryPythonCode(PYTHON_SO_FLAGS + RPATH):
                            return PYTHON_SO_FLAGS + RPATH, []
                        if "-undefined dynamic_lookup" in sysconfig.get_config_var('LDSHARED'):
                            print("Trying the last resort solution")
                            PYTHON_SO_FLAGS = ['-undefined dynamic_lookup'] + PYTHON_LIB_EXTRA
                            PYTHON_EXE_FLAGS = RPATH + [PYTHON_LIB_FILEPATH]
                            if tryPythonCode(PYTHON_SO_FLAGS, PYTHON_EXE_FLAGS):
                                return PYTHON_SO_FLAGS, PYTHON_EXE_FLAGS

        # if none of the above combinations worked, give up...
        raise CompileError("Could not compile test program which uses libpython" +
            sysconfig.get_config_var('VERSION'))

    # [2c]: find the python library and other relevant linking flags
    PYTHON_SO_FLAGS, PYTHON_EXE_FLAGS = findPythonLib()
    COMPILE_FLAGS += ['-DHAVE_PYTHON', PYTHON_INC, NUMPY_INC]
    LINK_FLAGS    += PYTHON_SO_FLAGS
    EXE_FLAGS     += PYTHON_EXE_FLAGS

    # [3]: check that GSL is present, and find out its version (required)
    # try compiling a snippet of code into a shared library (tests if GSL has been compiled with -fPIC),
    # then compiling a test program that loads this library (tests if the correct version of GSL is loaded at link time)
    GSL_TEST_CODE = """#include <gsl/gsl_version.h>
    #if not defined(GSL_MAJOR_VERSION) || (GSL_MAJOR_VERSION == 1) && (GSL_MINOR_VERSION < 15)
    #error "GSL version is too old (need at least 1.15)"
    #endif
    #include <gsl/gsl_integration.h>
    void run() { gsl_integration_cquad_workspace_alloc(10); }
    """
    if runCompileShared(GSL_TEST_CODE, '-lgsl -lgslcblas'):
        # apparently the headers and libraries can be found in some standard location,
        LINK_FLAGS += ['-lgsl', '-lgslcblas']   # so we only list their names
    else:
        if not ask("GSL library (required) is not found\n"+
            "Should we try to download and compile it now? [Y/N] "): exit(1)
        distutils.dir_util.mkpath(EXTRAS_DIR)
        os.chdir(EXTRAS_DIR)
        say('Downloading GSL\n')
        filename = 'gsl.tar.gz'
        dirname  = 'gsl-2.6'
        try:
            urlretrieve('ftp://ftp.gnu.org/gnu/gsl/gsl-2.6.tar.gz', filename)
            if os.path.isfile(filename):
                subprocess.call(['tar', '-zxf', filename])    # unpack the archive
                os.remove(filename)  # remove the downloaded archive
                if not os.path.isdir(dirname): raise Exception("Error unpacking GSL")
        except Exception as e:
            raise CompileError(str(e) + "\nError downloading GSL library, aborting...\n"+
            "You may try to manually compile GSL and install it to "+ROOT_DIR+"/"+EXTRAS_DIR+", so that "+
            "the header files are in "+EXTRAS_DIR+"/include and library files - in "+EXTRAS_DIR+"/lib")
        say('Compiling GSL (may take a few minutes)\n')
        result = subprocess.call('(cd '+dirname+'; ./configure --prefix='+os.getcwd()+
            ' CFLAGS="-fPIC -O2" --enable-shared=no; make; make install) > gsl-install.log', shell=True)
        if result != 0 or not os.path.isfile('lib/libgsl.a'):
             raise CompileError("GSL compilation failed (check "+EXTRAS_DIR+"/gsl-install.log)")
        distutils.dir_util.remove_tree(dirname)  # clean up source and build directories
        COMPILE_FLAGS += ['-I'+EXTRAS_DIR+'/include']
        LINK_FLAGS    += [EXTRAS_DIR+'/lib/libgsl.a', EXTRAS_DIR+'/lib/libgslcblas.a']
        os.chdir(ROOT_DIR)

    # [4]: test if Eigen library is present (optional)
    if runCompiler(code='#include <Eigen/Core>\nint main(){}\n'):
        COMPILE_FLAGS += ['-DHAVE_EIGEN']
    else:
        if ask("Eigen library (recommended) is not found\n"+
            "Should we try to download it now (no compilation needed)? [Y/N] "):
            distutils.dir_util.mkpath(EXTRAS_DIR+'/include/unsupported')
            os.chdir(EXTRAS_DIR)
            say('Downloading Eigen\n')
            filename = 'Eigen.zip'
            dirname  = 'eigen-git-mirror-3.3.7'
            try:
                urlretrieve('https://github.com/eigenteam/eigen-git-mirror/archive/3.3.7.zip', filename)
                if os.path.isfile(filename):
                    subprocess.call('unzip '+filename+' >/dev/null', shell=True)  # unpack the archive
                    if os.path.isdir(dirname):
                        distutils.dir_util.copy_tree(dirname+'/Eigen', 'include/Eigen', verbose=False)  # copy the headers
                        distutils.dir_util.copy_tree(dirname+'/unsupported/Eigen', 'include/unsupported/Eigen', verbose=False)
                        distutils.dir_util.remove_tree(dirname)  # and delete the rest
                        COMPILE_FLAGS += ['-DHAVE_EIGEN', '-I'+EXTRAS_DIR+'/include']
                    os.remove(filename)                          # remove the downloaded archive
            except Exception as e:
                say("Failed to install Eigen: "+str(e)+"\n")     # didn't succeed with Eigen
            os.chdir(ROOT_DIR)

    # [5a]: test if CVXOPT is present (optional); install if needed
    try:
        import cvxopt  # import the python module
    except:  # import error or some other problem, might be corrected
        if ask("CVXOPT library (needed only for Schwarzschild modelling) is not found\n"
            "Should we try to install it now? [Y/N] "):
            try:
                subprocess.check_call([sys.executable, '-m', 'pip', 'install', '--user', 'cvxopt'])
            except Exception as e:
                say("Failed to install CVXOPT: "+str(e)+"\n")

    # [5b]: if the cvxopt module is available in Python, make sure that we also have C header files
    try:
        import cvxopt   # if this fails, skip cvxopt altogether
        if runCompiler(code='#include <cvxopt.h>\nint main(){import_cvxopt();}\n', flags=' '.join(['-c', PYTHON_INC, NUMPY_INC])):
            COMPILE_FLAGS += ['-DHAVE_CVXOPT']
        else:
            # download the C header file if it does not appear to be present in a default location
            distutils.dir_util.mkpath(EXTRAS_DIR+'/include')
            say('Downloading CVXOPT header files\n')
            try:
                urlretrieve('https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/cvxopt.h',
                    EXTRAS_DIR+'/include/cvxopt.h')
                urlretrieve('https://raw.githubusercontent.com/cvxopt/cvxopt/master/src/C/blas_redefines.h',
                    EXTRAS_DIR+'/include/blas_redefines.h')
            except: pass  # problems in downloading, skip it
            if  os.path.isfile(EXTRAS_DIR+'/include/cvxopt.h') and \
                os.path.isfile(EXTRAS_DIR+'/include/blas_redefines.h'):
                COMPILE_FLAGS += ['-DHAVE_CVXOPT', '-I'+EXTRAS_DIR+'/include']
            else:
                say("Failed to download CVXOPT header files, this feature will not be available\n")
    except: pass  # cvxopt wasn't available

    # [6]: test if GLPK is present (optional - ignored if not found)
    if runCompileShared('#include <glpk.h>\nvoid run() { glp_create_prob(); }\n', '-lglpk'):
        COMPILE_FLAGS += ['-DHAVE_GLPK']
        LINK_FLAGS    += ['-lglpk']
    else:
        say("GLPK library (optional) is not found\n")

    # [7]: test if UNSIO is present (optional), download and compile if needed
    if runCompileShared('#include <uns.h>\nvoid run() { }\n', '-lunsio -lnemo'):
        COMPILE_FLAGS += ['-DHAVE_UNSIO']
        LINK_FLAGS    += ['-lunsio', '-lnemo']
    else:
        if ask("UNSIO library (optional; used for input/output of N-body snapshots) is not found\n"+
            "Should we try to download and compile it now? [Y/N] "):
            distutils.dir_util.mkpath(EXTRAS_DIR)
            distutils.dir_util.mkpath(EXTRAS_DIR+'/include')
            distutils.dir_util.mkpath(EXTRAS_DIR+'/lib')
            say('Downloading UNSIO\n')
            filename = EXTRAS_DIR+'/unsio-master.zip'
            dirname  = EXTRAS_DIR+'/unsio-master'
            try:
                urlretrieve('https://github.com/GalacticDynamics-Oxford/unsio/archive/master.zip', filename)
                if os.path.isfile(filename):
                    subprocess.call('(cd '+EXTRAS_DIR+'; unzip ../'+filename+') >/dev/null', shell=True)  # unpack
                    os.remove(filename)  # remove the downloaded archive
                    say("Compiling UNSIO\n")
                    result = subprocess.call('(cd '+dirname+'; make) > '+EXTRAS_DIR+'/unsio-install.log', shell=True)
                    if result == 0 and os.path.isfile(dirname+'/libnemo.a') and os.path.isfile(dirname+'/libunsio.a'):
                        # successfully compiled: copy the header files to extras/include
                        for hfile in ['componentrange.h', 'ctools.h', 'snapshotinterface.h', 'uns.h', 'userselection.h']:
                            distutils.file_util.copy_file(dirname+'/unsio/'+hfile, EXTRAS_DIR+'/include')
                        # copy the static libraries to extras/lib
                        distutils.file_util.copy_file(dirname+'/libnemo.a',  EXTRAS_DIR+'/lib')
                        distutils.file_util.copy_file(dirname+'/libunsio.a', EXTRAS_DIR+'/lib')
                        # delete the compiled directory
                        distutils.dir_util.remove_tree(dirname)
                        UNSIO_COMPILE_FLAGS = ['-I'+EXTRAS_DIR+'/include']
                        UNSIO_LINK_FLAGS = [EXTRAS_DIR+'/lib/libunsio.a', EXTRAS_DIR+'/lib/libnemo.a']
                        if runCompiler(code='#include <uns.h>\nint main(){}\n', flags=' '.join(UNSIO_COMPILE_FLAGS+UNSIO_LINK_FLAGS)):
                            COMPILE_FLAGS += ['-DHAVE_UNSIO'] + UNSIO_COMPILE_FLAGS
                            LINK_FLAGS    += UNSIO_LINK_FLAGS
                        else:
                            raise CompileError('Failed to link against the just compiled UNSIO library')
                    else: raise CompileError(
                        "Failed compiling UNSIO (check "+EXTRAS_DIR+"/unsio-install.log)")
            except Exception as e:  # didn't succeed with UNSIO
                say(str(e)+'\n')

    # [99]: put everything together and create Makefile.local
    with open('Makefile.local','w') as f: f.write(
        "# set the default compiler if no value is found in the environment variables or among command-line arguments\n" +
        "ifeq ($(origin CXX),default)\nCXX = " + CC + "\nendif\n" +
        "ifeq ($(origin FC), default)\nFC  = gfortran\nendif\nLINK = $(CXX)\n" +
        "# compilation/linking flags for both the shared library and any programs that use it\n" +
        "CXXFLAGS      += " + " ".join(compressList(CXXFLAGS)) + "\n" +
        "# compilation flags for the shared library only (files in src/)\n" +
        "COMPILE_FLAGS += " + " ".join(compressList(COMPILE_FLAGS)) + "\n" +
        "# linking flags for the shared library only\n" +
        "LINK_FLAGS    += " + " ".join(compressList(LINK_FLAGS)) + "\n" +
        ("# linking flags for the example/test programs\n" +
        "EXE_FLAGS     += " + " ".join(compressList(EXE_FLAGS)) + "\n" if EXE_FLAGS else "") )