Exemple #1
0
    def run(self):
        # Compile charmonizer.
        if newer_group([CHARMONIZER_C], CHARMONIZER_EXE_PATH):
            command = [compiler_name]
            if compiler_type == 'msvc':
                command.append('/Fe' + CHARMONIZER_EXE_PATH)
            else:
                command.extend(['-o', CHARMONIZER_EXE_PATH])
            command.append(CHARMONIZER_C)
            print(" ".join(command))
            subprocess.check_call(command)

        # Run charmonizer.
        if newer_group([CHARMONIZER_EXE_PATH], CHARMONY_H_PATH):
            command = [
                CHARMONIZER_EXE_PATH,
                '--cc=' + _quotify(compiler_name),
                '--enable-c',
                '--host=python',
                '--enable-makefile',
                '--',
                cflags
            ]
            if 'CHARM_VALGRIND' in os.environ:
                command[0:0] = "valgrind", "--leak-check=yes";
            print(" ".join(command))
            subprocess.check_call(command)
Exemple #2
0
    def test_newer_group(self):
        tmpdir = self.mkdtemp()
        sources = os.path.join(tmpdir, 'sources')
        os.mkdir(sources)
        one = os.path.join(sources, 'one')
        two = os.path.join(sources, 'two')
        three = os.path.join(sources, 'three')
        old_file = os.path.abspath(__file__)

        # return true if 'old_file' is out-of-date with respect to any file
        # listed in 'sources'.
        self.write_file(one)
        self.write_file(two)
        self.write_file(three)
        self.assertTrue(newer_group([one, two, three], old_file))
        self.assertFalse(newer_group([one, two, old_file], three))

        # missing handling
        os.remove(one)
        self.assertRaises(OSError, newer_group, [one, two, old_file], three)

        self.assertFalse(newer_group([one, two, old_file], three,
                                     missing='ignore'))

        self.assertTrue(newer_group([one, two, old_file], three,
                                    missing='newer'))
 def _need_link(self, objects, output_file):
     if self.force:
         return 1
     else:
         if self.dry_run:
             newer = newer_group(objects, output_file, missing='newer')
         else:
             newer = newer_group(objects, output_file)
         return newer
Exemple #4
0
def runswig(source,cppfile,pyfile,deps,cpp=1):
    if not os.path.isfile(cppfile) or not os.path.isfile(pyfile) \
           or newer_group(deps,cppfile) or newer_group(deps,pyfile):
        try:
            if cpp:
                spawn([swig_bin,'-w402','-c++','-python','-o',cppfile,source])
            else:
                spawn([swig_bin,'-w402','-python','-o',cppfile,source])
        except:
            print 'Sorry, I am unable to swig the modified ' + lisasim_isource
            sys.exit(1)
Exemple #5
0
 def _need_link(self, objects, output_file):
     """Return true if we need to relink the files listed in 'objects'
     to recreate 'output_file'.
     """
     if self.force:
         return 1
     else:
         if self.dry_run:
             newer = newer_group (objects, output_file, missing='newer')
         else:
             newer = newer_group (objects, output_file)
         return newer
Exemple #6
0
    def build_extension(self, ext):
        # only preprocess with esql if necessary
        fullname = self.get_ext_fullname(ext.name)
        ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
        if not (self.force or newer_group(ext.sources, ext_filename, 'newer')):
            self.announce("skipping '%s' extension (up-to-date)" % ext.name)
            return

        # preprocess *.ec files with 'esql'
        for file in ext.sources:
            if file.endswith('.ec'):
                dir = os.path.dirname(file)
                f = os.path.basename(file)
                cmd = ' '.join(self.esql_parts + [ '-e', f ])
                print cmd

                curdir = os.getcwd()
                os.chdir(dir)
                os.system(cmd)
                os.chdir(curdir)

                ext.sources[ext.sources.index(file)] = file[:-3]+'.c'

        _build_ext.build_extension(self, ext)
Exemple #7
0
    def build_libraries(self, libraries):
        if not os.path.exists(self.build_bin):
            os.makedirs(self.build_bin)

        for (prog_name, build_info) in libraries:
            sources = build_info.get('sources')
            if sources is None or not isinstance(sources, (list, tuple)):
                raise DistutilsSetupError, \
                      ("in 'libraries' option ('%s'), " +
                       "'sources' must be present and must be " +
                       "a list of source filenames") % prog_name
            sources = list(sources)

            # Skip build, if program already built.
            prog_path = os.path.join(self.build_bin, prog_name)
            if not (self.force or newer_group(sources, prog_path, 'newer')):
                log.debug("skipping '%s' program (up-to-date)", prog_name)
                return

            log.info("building '%s' program", prog_name)

            macros = build_info.get('macros')
            include_dirs = build_info.get('include_dirs')
            objects = self.compiler.compile(sources,
                                            output_dir=self.build_temp,
                                            macros=macros,
                                            include_dirs=include_dirs,
                                            debug=self.debug)
            self.compiler.link_executable(objects, prog_name,
                                          output_dir=self.build_bin,
                                          debug=self.debug)
Exemple #8
0
 def run(self):
     self.run_command('charmony')
     subprocess.check_call([make_command, '-j', 'static'])
     # Touch Python binding file if the library has changed.
     cfc_c = os.path.join('src', 'cfc', '_cfc.c')
     if newer_group(['libcfc.a'], cfc_c):
         os.utime(cfc_c, None)
Exemple #9
0
    def generate_a_pyrex_source(self, base, ext_name, source, extension):
        ''' Monkey patch for numpy build_src.build_src method

        Uses Cython instead of Pyrex, iff source contains 'pymor'

        Assumes Cython is present
        '''
        if 'pymor' not in source:
            return _orig_generate_a_pyrex_source(self, base, ext_name, source, extension)

        if self.inplace:
            target_dir = dirname(base)
        else:
            target_dir = appendpath(self.build_src, dirname(base))
        target_file = pjoin(target_dir, ext_name + '.c')
        depends = [source] + extension.depends
        if self.force or newer_group(depends, target_file, 'newer'):
            import Cython.Compiler.Main
            log.info("cythonc:> %s" % (target_file))
            self.mkpath(target_dir)
            options = Cython.Compiler.Main.CompilationOptions(
                defaults=Cython.Compiler.Main.default_options,
                include_path=extension.include_dirs,
                output_file=target_file)
            cython_result = Cython.Compiler.Main.compile(source, options=options)
            if cython_result.num_errors != 0:
                raise DistutilsError("%d errors while compiling %r with Cython"
                                     % (cython_result.num_errors, source))
        return target_file
def generate_a_pyrex_source(self, base, ext_name, source, extension):
    ''' Monkey patch for numpy build_src.build_src method

    Uses Cython instead of Pyrex.

    Assumes Cython is present
    '''
    if self.inplace:
        target_dir = dirname(base)
    else:
        target_dir = appendpath(self.build_src, dirname(base))
    target_file = pjoin(target_dir, ext_name + '.c')
    depends = [source] + extension.depends
    # add distribution (package-wide) include directories, in order to
    # pick up needed .pxd files for cython compilation
    incl_dirs = extension.include_dirs[:]
    dist_incl_dirs = self.distribution.include_dirs
    if not dist_incl_dirs is None:
        incl_dirs += dist_incl_dirs
    if self.force or newer_group(depends, target_file, 'newer'):
        import Cython.Compiler.Main
        log.info("cythonc:> %s" % (target_file))
        self.mkpath(target_dir)
        options = Cython.Compiler.Main.CompilationOptions(
            defaults=Cython.Compiler.Main.default_options,
            include_path=incl_dirs,
            output_file=target_file)
        cython_result = Cython.Compiler.Main.compile(source,
                                                   options=options)
        if cython_result.num_errors != 0:
            raise DistutilsError("%d errors while compiling %r with Cython" \
                  % (cython_result.num_errors, source))
    return target_file
Exemple #11
0
 def template_sources(self, sources, extension):
     new_sources = []
     if is_sequence(extension):
         depends = extension[1].get('depends')
         include_dirs = extension[1].get('include_dirs')
     else:
         depends = extension.depends
         include_dirs = extension.include_dirs
     for source in sources:
         (base, ext) = os.path.splitext(source)
         if ext == '.src':  # Template file
             if self.inplace:
                 target_dir = os.path.dirname(base)
             else:
                 target_dir = appendpath(self.build_src, os.path.dirname(base))
             self.mkpath(target_dir)
             target_file = os.path.join(target_dir, os.path.basename(base))
             if (self.force or newer_group([source] + depends, target_file)):
                 if _f_pyf_ext_match(base):
                     log.info("from_template:> %s" % (target_file))
                     outstr = process_f_file(source)
                 else:
                     log.info("conv_template:> %s" % (target_file))
                     outstr = process_c_file(source)
                 with open(target_file, 'w') as fid:
                     fid.write(outstr)
             if _header_ext_match(target_file):
                 d = os.path.dirname(target_file)
                 if d not in include_dirs:
                     log.info("  adding '%s' to include_dirs." % (d))
                     include_dirs.append(d)
             new_sources.append(target_file)
         else:
             new_sources.append(source)
     return new_sources
Exemple #12
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or type(sources) not in (ListType, TupleType):
            raise DistutilsSetupError, ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + 'a list of source filenames') % ext.name
        sources = list(sources)
        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)
            sources = self.swig_sources(sources, ext)
            extra_args = ext.extra_compile_args or []
            macros = ext.define_macros[:]
            for undef in ext.undef_macros:
                macros.append((undef,))

            objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=ext.include_dirs, debug=self.debug, extra_postargs=extra_args, depends=ext.depends)
            self._built_objects = objects[:]
            if ext.extra_objects:
                objects.extend(ext.extra_objects)
            extra_args = ext.extra_link_args or []
            language = ext.language or self.compiler.detect_language(sources)
            self.compiler.link_shared_object(objects, ext_path, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=language)
            return
    def check_dates(self):
        # Return True if files are up-to-date
        files=self.register[:]
        files.append(self.override)
        files.append(self.defs)

        return not dep_util.newer_group(files, self.output)
Exemple #14
0
def run_cython(source, depends=(), includes=(),
               destdir_c=None, destdir_h=None,
               wdir=None, force=False, VERSION=None):
    from glob import glob
    from distutils import log
    from distutils import dep_util
    from distutils.errors import DistutilsError
    target = os.path.splitext(source)[0]+'.c'
    cwd = os.getcwd()
    try:
        if wdir: os.chdir(wdir)
        alldeps = [source]
        for dep in depends:
            alldeps += glob(dep)
        if not (force or dep_util.newer_group(alldeps, target)):
            log.debug("skipping '%s' -> '%s' (up-to-date)",
                      source, target)
            return
    finally:
        os.chdir(cwd)
    if not chk_cython(VERSION):
        raise DistutilsError("requires Cython>=%s" % VERSION)
    log.info("cythonizing '%s' -> '%s'", source, target)
    from cythonize import cythonize
    err = cythonize(source,
                    includes=includes,
                    destdir_c=destdir_c,
                    destdir_h=destdir_h,
                    wdir=wdir)
    if err:
        raise DistutilsError(
            "Cython failure: '%s' -> '%s'" % (source, target))
Exemple #15
0
    def generate_a_pyrex_source(self, base, ext_name, source, extension):
        if self.inplace or not have_pyrex():
            target_dir = os.path.dirname(base)
        else:
            target_dir = appendpath(self.build_src, os.path.dirname(base))
        target_file = os.path.join(target_dir, ext_name + ".c")
        depends = [source] + extension.depends
        if self.force or newer_group(depends, target_file, "newer"):
            if have_pyrex():
                import Pyrex.Compiler.Main

                log.info("pyrexc:> %s" % (target_file))
                self.mkpath(target_dir)
                options = Pyrex.Compiler.Main.CompilationOptions(
                    defaults=Pyrex.Compiler.Main.default_options,
                    include_path=extension.include_dirs,
                    output_file=target_file,
                )
                pyrex_result = Pyrex.Compiler.Main.compile(source, options=options)
                if pyrex_result.num_errors != 0:
                    raise DistutilsError("%d errors while compiling %r with Pyrex" % (pyrex_result.num_errors, source))
            elif os.path.isfile(target_file):
                log.warn(
                    "Pyrex required for compiling %r but not available," " using old target %r" % (source, target_file)
                )
            else:
                raise DistutilsError("Pyrex required for compiling %r" " but notavailable" % (source,))
        return target_file
Exemple #16
0
def generate_a_cython_source(self, base, ext_name, source, extension):
        if self.inplace or not have_cython():
            target_dir = os.path.dirname(base)
        else:
            target_dir = appendpath(self.build_src, os.path.dirname(base))
        target_file = os.path.join(target_dir, ext_name + '.c')
        depends = [source] + extension.depends
        if self.force or newer_group(depends, target_file, 'newer'):
            if have_cython():
                import Cython.Compiler.Main
                log.info("cythonc:> %s: %s " % (target_dir, target_file))
                log.info("cwd %s " % (os.getcwd()))
                self.mkpath(target_dir)
                options = Cython.Compiler.Main.CompilationOptions(
                    defaults=Cython.Compiler.Main.default_options,
                    include_path=extension.include_dirs,
                    output_file=target_file
                    )
                #log.info('\n'.join([s + ' ' + str(getattr(options, s)) for s in dir(options)]))
                # avoid calling compile_single, because it will give wrong module names.
                cython_result = Cython.Compiler.Main.compile([source],
                                                           options=options)
                if cython_result.num_errors != 0:
                    raise DistutilsError("%d errors while compiling %r with Cython" \
                          % (cython_result.num_errors, source))
            elif os.path.isfile(target_file):
                log.warn("Cython required for compiling %r but not available,"\
                         " using old target %r"\
                         % (source, target_file))
            else:
                raise DistutilsError("Cython required for compiling %r"\
                                     " but notavailable" % (source,))
        return target_file
    def make_file(self, infiles, outfile, func, args,
                  exec_msg=None, skip_msg=None, level=1):
        """Special case of 'execute()' for operations that process one or
        more input files and generate one output file.  Works just like
        'execute()', except the operation is skipped and a different
        message printed if 'outfile' already exists and is newer than all
        files listed in 'infiles'.  If the command defined 'self.force',
        and it is true, then the command is unconditionally run -- does no
        timestamp checks.
        """
        if exec_msg is None:
            exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
        if skip_msg is None:
            skip_msg = "skipping %s (inputs unchanged)" % outfile


        # Allow 'infiles' to be a single string
        if isinstance(infiles, str):
            infiles = (infiles,)
        elif not isinstance(infiles, (list, tuple)):
            raise TypeError(
                  "'infiles' must be a string, or a list or tuple of strings")

        # If 'outfile' must be regenerated (either because it doesn't
        # exist, is out-of-date, or the 'force' flag is true) then
        # perform the action that presumably regenerates it
        if self.force or dep_util.newer_group (infiles, outfile):
            self.execute(func, args, exec_msg, level)
        # Otherwise, print the "skip" message
        else:
            log.debug(skip_msg)
    def swig_sources (self, sources, extension):
        """
        Run our ETG scripts to generate their .sip files, and adjust
        the sources list before passing on to the base class, which
        will then be responsible for running SIP and building the
        generated C++ files.
        """
        if not self.extensions:
            return

        cfg = Config()

        etg_sources = [s for s in sources if s.startswith('etg/')]
        other_sources = [s for s in sources if not s.startswith('etg/')]

        for etg in etg_sources:
            sipfile = etg2sip(etg)

            deps = [etg]
            ns = loadETG(etg)
            if hasattr(ns, 'OTHERDEPS'):
                deps += ns.OTHERDEPS
            if newer_group(deps, sipfile):
                cmd = [sys.executable, etg, '--sip']
                #if cfg.verbose:
                #    cmd.append('--verbose')
                self.spawn(cmd)

            if '%Module(' in file(sipfile).read():
                other_sources.append(sipfile)

        # now call the base class version of this method
        return build_ext.swig_sources(self, other_sources, extension)
Exemple #19
0
def generate_files():
    home = os.getcwd()
    dir = os.path.join(topdir, 'lib', 'nfs4')
    use_xdr(dir, 'nfs4.x')
    import ops_gen # this must be delayed until nfs4.x is parsed
    sources = [ os.path.join(topdir, 'lib', 'ops_gen.py'),
                'nfs4_const.py', 'nfs4_type.py' ]
    if newer_group(sources, 'nfs4_ops.py'):
        print "Generating nfs4_ops.py"
        ops_gen.run()
    dir = os.path.join(topdir, 'lib', 'rpc')
    use_xdr(dir, 'rpc.x')
    dir = os.path.join(topdir, 'lib', 'rpc', 'rpcsec')
    use_xdr(dir, 'gss.x')
    dir = os.path.join(topdir, 'lib', 'nlm')
    use_xdr(dir, 'nlm_prot.x')
    dir = os.path.join(topdir, 'lib', 'nsm')
    use_xdr(dir, 'nsm.x')

    # Handle NFS3
    dir = os.path.join(topdir, 'lib', 'nfs3')
    use_xdr(dir, 'nfs3.x')
    use_xdr(dir, 'mount.x')
    use_xdr(dir, 'rpcb.x')

    os.chdir(home)
 def test_newer_group(self):
     tmpdir = self.mkdtemp()
     sources = os.path.join(tmpdir, 'sources')
     os.mkdir(sources)
     one = os.path.join(sources, 'one')
     two = os.path.join(sources, 'two')
     three = os.path.join(sources, 'three')
     old_file = os.path.abspath(__file__)
     self.write_file(one)
     self.write_file(two)
     self.write_file(three)
     self.assertTrue(newer_group([one, two, three], old_file))
     self.assertFalse(newer_group([one, two, old_file], three))
     os.remove(one)
     self.assertRaises(OSError, newer_group, [one, two, old_file], three)
     self.assertFalse(newer_group([one, two, old_file], three, missing='ignore'))
     self.assertTrue(newer_group([one, two, old_file], three, missing='newer'))
Exemple #21
0
 def test_newer_group(self):
     tmpdir = self.mkdtemp()
     sources = os.path.join(tmpdir, "sources")
     os.mkdir(sources)
     one = os.path.join(sources, "one")
     two = os.path.join(sources, "two")
     three = os.path.join(sources, "three")
     old_file = os.path.abspath(__file__)
     self.write_file(one)
     self.write_file(two)
     self.write_file(three)
     self.assertTrue(newer_group([one, two, three], old_file))
     self.assertFalse(newer_group([one, two, old_file], three))
     os.remove(one)
     self.assertRaises(OSError, newer_group, [one, two, old_file], three)
     self.assertFalse(newer_group([one, two, old_file], three, missing="ignore"))
     self.assertTrue(newer_group([one, two, old_file], three, missing="newer"))
Exemple #22
0
    def swig_sources (self, sources, extension=None):
        if not self.extensions:
            return

        cfg = PyQt4.pyqtconfig.Configuration()

        # add directory of input files as include path
        indirs = list(set([os.path.dirname(x) for x in sources]))

        # Add the SIP and Qt include directories to the include path
        extension.include_dirs += [
            cfg.sip_inc_dir,
            cfg.qt_inc_dir,
            ] + self.get_includes(cfg) + indirs

        # link against libraries
        if cfg.qt_framework:
            extension.extra_link_args = ['-framework', 'QtGui',
                                         '-framework', 'QtCore',
                                         '-framework', 'QtXml']
        elif sys.platform == 'win32':
            extension.libraries = ['QtGui4', 'QtCore4', 'QtXml4']
        else:
            extension.libraries = ['QtGui', 'QtCore', 'QtXml']
        extension.library_dirs = [cfg.qt_lib_dir]

        depends = extension.depends

        # Filter dependencies list: we are interested only in .sip files,
        # since the main .sip files can only depend on additional .sip
        # files. For instance, if a .h changes, there is no need to
        # run sip again.
        depends = [f for f in depends if os.path.splitext(f)[1] == '.sip']

        # Create the temporary directory if it does not exist already
        if not os.path.isdir(self.build_temp):
            os.makedirs(self.build_temp)

        # Collect the names of the source (.sip) files
        sip_sources = []
        sip_sources = [source for source in sources if source.endswith('.sip')]
        other_sources = [source for source in sources
                         if not source.endswith('.sip')]
        generated_sources = []

        sip_bin = self._find_sip()

        for sip in sip_sources:
            # Use the sbf file as dependency check
            sipbasename = os.path.basename(sip)
            sbf = os.path.join(self.build_temp,
                               replace_suffix(sipbasename, '.sbf'))
            if newer_group([sip]+depends, sbf) or self.force:
                self._sip_compile(sip_bin, sip, sbf)
            out = self._get_sip_output_list(sbf)
            generated_sources.extend(out)

        return generated_sources + other_sources
Exemple #23
0
	def togl_build(self, libs):
		"""Core functionality of the togl_build command"""
		# we subclass build_ext because need an initialized
		# compiler object from build_ext, but we also need a pointer
		# to the build command instance
		build = self.get_finalized_command ('build')
		
		libs = libs + []
		
		extra_link_args = [] # ['-s']
		# Name for the shared lib: (distutils will change it to Togl.{so|dll})
		output_name = 'Togl'
		# where to put the built shared object (only for build process)
		output_dir = Togl + '-tk' + tk.getvar('tk_version')
		export_symbols = ['Togl_Init']
		sources = [os.path.join('src',Togl,'togl.c')]
		
		runtime_library_dirs = None


		# rest of this function was inspired by build_ext.py , build_extensions() 
		
		if not self.inplace:
			output_dir = os.path.join (build.build_base, output_dir)

		# what is the name of the resulting file
		output_filename = self.compiler.shared_object_filename(
				basename=output_name,
				output_dir=output_dir)

		if not (self.force or newer_group(sources, output_filename, 'newer')):
			self.announce ('skipping "%s" (up-to-date)' % output_name)  
		else:
			self.announce ('building "%s"' % output_name)

			# compile source files        
			objects = self.compiler.compile (sources,
								output_dir=self.build_temp,
								macros=[
									('USE_TCL_STUBS',1),
									('USE_TK_STUBS',1),
								],
								include_dirs=self.include_dirs,
								debug=self.debug,
								extra_postargs=self.extra_compile_args)

			# link all together
			self.compiler.link_shared_object (
					objects, 
					output_filename,
					'', # <= output_dir
					libraries=libs,
					library_dirs=self.library_dirs,
					runtime_library_dirs=runtime_library_dirs,
					extra_postargs=extra_link_args,
					export_symbols=export_symbols, 
					debug=self.debug,
					build_temp=self.build_temp)
Exemple #24
0
def build_extensions():
    '''Compile C and Cython files as needed'''
    import subprocess
    import cellprofiler.cpmath.setup
    import cellprofiler.utilities.setup
    from distutils.dep_util import newer_group
    #
    # Check for dependencies and compile if necessary
    #
    compile_scripts = [(os.path.join('cellprofiler', 'cpmath', 'setup.py'),
                        cellprofiler.cpmath.setup),
                       (os.path.join('cellprofiler', 'utilities', 'setup.py'),
                        cellprofiler.utilities.setup)]
    env = os.environ.copy()
    old_pythonpath = os.getenv('PYTHONPATH', None)

    # if we're using a local site_packages, the subprocesses will need
    # to be able to find it.
    
    if old_pythonpath:
        env['PYTHONPATH'] = site_packages + os.pathsep + old_pythonpath
    else:
        env['PYTHONPATH'] = site_packages

    use_mingw = (sys.platform == 'win32' and sys.version_info[0] <= 2 and
                 sys.version_info[1] <= 5)
    for key in list(env.keys()):
        value = env[key]
        if isinstance(key, unicode):
            key = key.encode("utf-8")
        if isinstance(value, unicode):
            value = value.encode("utf-8")
        env[key] = value
    for compile_script, my_module in compile_scripts:
        script_path, script_file = os.path.split(compile_script)
        script_path = os.path.join(root, script_path)
        configuration = my_module.configuration()
        needs_build = False
        for extension in configuration['ext_modules']:
            target = extension.name + '.pyd'
            if newer_group(extension.sources, target):
                needs_build = True
        if not needs_build:
            continue
        if use_mingw:
            p = subprocess.Popen([sys.executable,
                                  script_file,
                                  "build_ext", "-i",
                                  "--compiler=mingw32"],
                                 cwd=script_path,
                                 env=env)
        else:
            p = subprocess.Popen([sys.executable,
                                  script_file,
                                  "build_ext", "-i"],
                                 cwd=script_path,
                                 env=env)
        p.communicate()
Exemple #25
0
def should_rebuild(targets, source_files):
    from distutils.dep_util import newer_group
    for t in targets:
        if not os.path.exists(t):
            return True
    sources = API_FILES + list(source_files) + [__file__]
    if newer_group(sources, targets[0], missing='newer'):
        return True
    return False
Exemple #26
0
    def build_static_extension(self, ext):
        from distutils import log

        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                (
                    "in 'ext_modules' option (extension '%s'), "
                    + "'sources' must be present and must be "
                    + "a list of source filenames"
                )
                % ext.name
            )
        sources = list(sources)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, "newer")):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        extra_args = ext.extra_compile_args or []
        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef,))
        objects = self.compiler.compile(
            sources,
            output_dir=self.build_temp,
            macros=macros,
            include_dirs=ext.include_dirs,
            debug=self.debug,
            extra_postargs=extra_args,
            depends=ext.depends,
        )
        self._built_objects = objects[:]
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        language = ext.language or self.compiler.detect_language(sources)

        libname = os.path.splitext(os.path.basename(ext_path))[0]
        output_dir = os.path.dirname(ext_path)
        if self.compiler.static_lib_format.startswith("lib") and libname.startswith("lib"):
            libname = libname[3:]

        if not os.path.exists(output_dir):
            # necessary for windows
            os.makedirs(output_dir)

        self.compiler.create_static_lib(objects, output_libname=libname, output_dir=output_dir, target_lang=language)

        for item in ext.export_include:
            shutil.copy(item, output_dir)
Exemple #27
0
def needs_updating(xdrfile):
    name_base = xdrfile[:xdrfile.rfind(".")]
    sources = [xdrfile]
    targets = [ name_base + "_const.py",
                name_base + "_type.py",
                name_base + "_pack.py" ]
    for t in targets:
        if newer_group(sources, t):
            return True
    return False
Exemple #28
0
    def prepare_extension(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(("in 'ext_modules' option (extension '%s'), " +
                   "'sources' must be present and must be " +
                   "a list of source filenames") % ext.name)
        sources = list(sources)

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            # ignore build-lib -- put the compiled extension into
            # the source tree along with pure Python modules

            modpath = fullname.split('.')
            package = '.'.join(modpath[0:-1])
            base = modpath[-1]

            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
            relative_ext_filename = self.get_ext_filename(base)
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
            relative_ext_filename = self.get_ext_filename(fullname)

        # while dispatching the calls to gcc in parallel, we sometimes
        # hit a race condition where two separate build_ext objects
        # try to create a given directory at the same time; whoever
        # loses the race then seems to throw an error, saying that
        # the directory already exists. so, instead of fighting to
        # fix the race condition, we simply make sure the entire
        # directory tree exists now, while we're processing the
        # extensions in serial.
        relative_ext_dir = os.path.split(relative_ext_filename)[0]
        prefixes = ['', self.build_lib, self.build_temp]
        for prefix in prefixes:
            path = os.path.join(prefix, relative_ext_dir)
            try:
                os.makedirs(path)
            except OSError as e:
                assert e.errno==errno.EEXIST, 'Cannot create %s.' % path
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            need_to_compile = False
        elif getattr(ext, "skip_build", False):
            log.debug("skipping '%s' extension (optional)", ext.name)
            need_to_compile = False
        else:
            log.info("building '%s' extension", ext.name)
            need_to_compile = True

        return need_to_compile, (sources, ext, ext_filename)
Exemple #29
0
def needs_updating(xdrfile):
    gen_path = os.path.join(topdir, 'lib', 'rpcgen.py')
    name_base = xdrfile[:xdrfile.rfind(".")]
    sources = [gen_path, xdrfile]
    targets = [ name_base + "_const.py",
                name_base + "_type.py",
                name_base + "_pack.py" ]
    for t in targets:
        if newer_group(sources, t):
            return True
    return False
def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
    macros, objects, extra, pp_opts, build = \
          _orig_setup_compile(self, outdir, macros, incdirs, sources, depends, extra)

    # Remove items from the build collection that don't need to be built
    # because their obj file is newer than the source fle and any other
    # dependencies.
    for obj in objects:
        src, ext = build[obj]
        if not newer_group([src] + depends, obj):
            del build[obj]
    return macros, objects, extra, pp_opts, build
Exemple #31
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name)
        sources = list(sources)

        if not sources:
            return

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            modpath = fullname.split('.')
            package = '.'.join(modpath[0:-1])
            base = modpath[-1]
            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
        depends = sources + ext.depends

        if not (self.force or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        extra_args = ext.extra_compile_args or []
        cxx_extra_args = ext.extra_compile_args or []
        extra_link_args = ext.extra_link_args or []

        c = os.path.basename(self.compiler.compiler[0])
        cxx = os.path.basename(self.compiler.compiler_cxx[0])
        if None in copt:
            extra_args += copt[None]
        if c in copt:
            extra_args += copt[c]
        if None in cxxopt:
            cxx_extra_args += cxxopt[None]
        if cxx in cxxopt:
            cxx_extra_args += cxxopt[cxx]
        if None in lopt:
            extra_link_args += lopt[None]
        if c in lopt:
            extra_link_args += lopt[c]
        if cxx in lopt:
            extra_link_args += lopt[cxx]

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        c_sources, cxx_sources, f_sources, fmodule_sources = \
                   filter_sources(ext.sources)

        if self.compiler.compiler_type == 'msvc':
            if cxx_sources:
                # Needed to compile kiva.agg._agg extension.
                cxx_extra_args.append('/Zm1000')
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        # Set Fortran/C++ compilers for compilation and linking.
        if ext.language == 'f90':
            fcompiler = self._f90_compiler
        elif ext.language == 'f77':
            fcompiler = self._f77_compiler
        else:  # in case ext.language is c++, for instance
            fcompiler = self._f90_compiler or self._f77_compiler
        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = (
                ext.extra_f77_compile_args or []) if hasattr(
                    ext, 'extra_f77_compile_args') else []
            fcompiler.extra_f90_compile_args = (
                ext.extra_f90_compile_args or []) if hasattr(
                    ext, 'extra_f90_compile_args') else []
        cxx_compiler = self._cxx_compiler

        # check for the availability of required compilers
        if cxx_sources and cxx_compiler is None:
            raise DistutilsError("extension %r has C++ sources" \
                  "but no C++ compiler found" % (ext.name))
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError("extension %r has Fortran sources " \
                  "but no Fortran compiler found" % (ext.name))
        if ext.language in ['f77', 'f90'] and fcompiler is None:
            self.warn("extension %r has Fortran libraries " \
                  "but no Fortran linker found, using default linker" % (ext.name))
        if ext.language == 'c++' and cxx_compiler is None:
            self.warn("extension %r has C++ libraries " \
                  "but no C++ linker found, using default linker" % (ext.name))

        kws = {'depends': ext.depends}
        output_dir = self.build_temp

        include_dirs = ext.include_dirs + get_numpy_include_dirs()

        c_objects = []
        if c_sources:
            log.info("compiling C sources with arguments %r", extra_args)
            c_objects = self.compiler.compile(c_sources,
                                              output_dir=output_dir,
                                              macros=macros,
                                              include_dirs=include_dirs,
                                              debug=self.debug,
                                              extra_postargs=extra_args,
                                              **kws)

        if cxx_sources:
            log.info("compiling C++ sources with arguments %r", cxx_extra_args)
            c_objects += cxx_compiler.compile(cxx_sources,
                                              output_dir=output_dir,
                                              macros=macros,
                                              include_dirs=include_dirs,
                                              debug=self.debug,
                                              extra_postargs=cxx_extra_args,
                                              **kws)

        extra_postargs = []
        f_objects = []
        if fmodule_sources:
            log.info("compiling Fortran 90 module sources")
            module_dirs = ext.module_dirs[:]
            module_build_dir = os.path.join(
                self.build_temp,
                os.path.dirname(self.get_ext_filename(fullname)))

            self.mkpath(module_build_dir)
            if fcompiler.module_dir_switch is None:
                existing_modules = glob('*.mod')
            extra_postargs += fcompiler.module_options(module_dirs,
                                                       module_build_dir)
            f_objects += fcompiler.compile(fmodule_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

            if fcompiler.module_dir_switch is None:
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' %
                                 (f, module_build_dir))
        if f_sources:
            log.info("compiling Fortran sources")
            f_objects += fcompiler.compile(f_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

        objects = c_objects + f_objects

        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        libraries = self.get_libraries(ext)[:]
        library_dirs = ext.library_dirs[:]

        linker = self.compiler.link_shared_object
        # Always use system linker when using MSVC compiler.
        if self.compiler.compiler_type == 'msvc':
            # expand libraries with fcompiler libraries as we are
            # not using fcompiler linker
            self._libs_with_msvc_and_fortran(fcompiler, libraries,
                                             library_dirs)

        elif ext.language in ['f77', 'f90'] and fcompiler is not None:
            linker = fcompiler.link_shared_object
        if ext.language == 'c++' and cxx_compiler is not None:
            linker = cxx_compiler.link_shared_object

        if sys.version[:3] >= '2.3':
            kws = {'target_lang': ext.language}
        else:
            kws = {}

        linker(objects,
               ext_filename,
               libraries=libraries,
               library_dirs=library_dirs,
               runtime_library_dirs=ext.runtime_library_dirs,
               extra_postargs=extra_link_args,
               export_symbols=self.get_export_symbols(ext),
               debug=self.debug,
               build_temp=self.build_temp,
               **kws)
Exemple #32
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                "in 'ext_modules' option (extension '%s'), "
                "'sources' must be present and must be "
                "a list of source filenames" % ext.name)
        sources = list(sources)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        # do compiler specific customizations
        compiler_type = self.compiler.compiler_type

        # strip compile flags that are not valid for C++ to avoid warnings
        if compiler_type == "unix" and language == "c++":
            if "-Wstrict-prototypes" in self.compiler.compiler_so:
                self.compiler.compiler_so.remove("-Wstrict-prototypes")

        if isinstance(ext.extra_compile_args, dict):
            if compiler_type in ext.extra_compile_args:
                extra_compile_args = ext.extra_compile_args[compiler_type]
            else:
                extra_compile_args = ext.extra_compile_args.get("", [])
        else:
            extra_compile_args = ext.extra_compile_args or []

        if isinstance(ext.extra_link_args, dict):
            if compiler_type in ext.extra_link_args:
                extra_link_args = ext.extra_link_args[compiler_type]
            else:
                extra_link_args = ext.extra_link_args.get("", [])
        else:
            extra_link_args = ext.extra_link_args or []

        if isinstance(ext.define_macros, dict):
            if compiler_type in ext.define_macros:
                macros = ext.define_macros[compiler_type]
            else:
                macros = ext.define_macros.get("", [])
        else:
            macros = ext.define_macros or []

        if isinstance(ext.undef_macros, dict):
            for tp, undef in ext.undef_macros.items():
                if tp == compiler_type:
                    macros.append((undef, ))
        else:
            for undef in ext.undef_macros:
                macros.append((undef, ))

        if os.environ.get("CYTHON_TRACE") == "1":
            log.debug("adding -DCYTHON_TRACE to preprocessor macros")
            macros.append(("CYTHON_TRACE", 1))

        # compile the source code to object files.
        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_compile_args,
                                        depends=ext.depends)

        # Now link the object files together into a "shared object"
        if ext.extra_objects:
            objects.extend(ext.extra_objects)

        self.compiler.link_shared_object(
            objects,
            ext_path,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_link_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language)
Exemple #33
0
    def f2py_sources(self, sources, extension):
        new_sources = []
        f2py_sources = []
        f_sources = []
        f2py_targets = {}
        target_dirs = []
        ext_name = extension.name.split('.')[-1]
        skip_f2py = 0

        for source in sources:
            (base, ext) = os.path.splitext(source)
            if ext == '.pyf':  # F2PY interface file
                if self.inplace:
                    target_dir = os.path.dirname(base)
                else:
                    target_dir = appendpath(self.build_src,
                                            os.path.dirname(base))
                if os.path.isfile(source):
                    name = get_f2py_modulename(source)
                    if name != ext_name:
                        raise DistutilsSetupError(
                            'mismatch of extension names: %s '
                            'provides %r but expected %r' %
                            (source, name, ext_name))
                    target_file = os.path.join(target_dir, name + 'module.c')
                else:
                    log.debug('  source %s does not exist: skipping f2py\'ing.' \
                              % (source))
                    name = ext_name
                    skip_f2py = 1
                    target_file = os.path.join(target_dir, name + 'module.c')
                    if not os.path.isfile(target_file):
                        log.warn('  target %s does not exist:\n   '\
                                 'Assuming %smodule.c was generated with '\
                                 '"build_src --inplace" command.' \
                                 % (target_file, name))
                        target_dir = os.path.dirname(base)
                        target_file = os.path.join(target_dir,
                                                   name + 'module.c')
                        if not os.path.isfile(target_file):
                            raise DistutilsSetupError("%r missing" %
                                                      (target_file, ))
                        log.info('   Yes! Using %r as up-to-date target.' \
                                 % (target_file))
                target_dirs.append(target_dir)
                f2py_sources.append(source)
                f2py_targets[source] = target_file
                new_sources.append(target_file)
            elif fortran_ext_match(ext):
                f_sources.append(source)
            else:
                new_sources.append(source)

        if not (f2py_sources or f_sources):
            return new_sources

        for d in target_dirs:
            self.mkpath(d)

        f2py_options = extension.f2py_options + self.f2py_opts

        if self.distribution.libraries:
            for name, build_info in self.distribution.libraries:
                if name in extension.libraries:
                    f2py_options.extend(build_info.get('f2py_options', []))

        log.info("f2py options: %s" % (f2py_options))

        if f2py_sources:
            if len(f2py_sources) != 1:
                raise DistutilsSetupError(
                    'only one .pyf file is allowed per extension module but got'\
                    ' more: %r' % (f2py_sources,))
            source = f2py_sources[0]
            target_file = f2py_targets[source]
            target_dir = os.path.dirname(target_file) or '.'
            depends = [source] + extension.depends
            if (self.force or newer_group(depends, target_file, 'newer')) \
                   and not skip_f2py:
                log.info("f2py: %s" % (source))
                import numpy.f2py
                numpy.f2py.run_main(f2py_options +
                                    ['--build-dir', target_dir, source])
            else:
                log.debug("  skipping '%s' f2py interface (up-to-date)" %
                          (source))
        else:
            #XXX TODO: --inplace support for sdist command
            if is_sequence(extension):
                name = extension[0]
            else:
                name = extension.name
            target_dir = os.path.join(*([self.build_src] +
                                        name.split('.')[:-1]))
            target_file = os.path.join(target_dir, ext_name + 'module.c')
            new_sources.append(target_file)
            depends = f_sources + extension.depends
            if (self.force or newer_group(depends, target_file, 'newer')) \
                   and not skip_f2py:
                log.info("f2py:> %s" % (target_file))
                self.mkpath(target_dir)
                import numpy.f2py
                numpy.f2py.run_main(f2py_options + ['--lower',
                                                '--build-dir', target_dir]+\
                                ['-m', ext_name]+f_sources)
            else:
                log.debug("  skipping f2py fortran files for '%s' (up-to-date)"\
                          % (target_file))

        if not os.path.isfile(target_file):
            raise DistutilsError("f2py target file %r not generated" %
                                 (target_file, ))

        build_dir = os.path.join(self.build_src, target_dir)
        target_c = os.path.join(build_dir, 'fortranobject.c')
        target_h = os.path.join(build_dir, 'fortranobject.h')
        log.info("  adding '%s' to sources." % (target_c))
        new_sources.append(target_c)
        if build_dir not in extension.include_dirs:
            log.info("  adding '%s' to include_dirs." % (build_dir))
            extension.include_dirs.append(build_dir)

        if not skip_f2py:
            import numpy.f2py
            d = os.path.dirname(numpy.f2py.__file__)
            source_c = os.path.join(d, 'src', 'fortranobject.c')
            source_h = os.path.join(d, 'src', 'fortranobject.h')
            if newer(source_c, target_c) or newer(source_h, target_h):
                self.mkpath(os.path.dirname(target_c))
                self.copy_file(source_c, target_c)
                self.copy_file(source_h, target_h)
        else:
            if not os.path.isfile(target_c):
                raise DistutilsSetupError("f2py target_c file %r not found" %
                                          (target_c, ))
            if not os.path.isfile(target_h):
                raise DistutilsSetupError("f2py target_h file %r not found" %
                                          (target_h, ))

        for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']:
            filename = os.path.join(target_dir, ext_name + name_ext)
            if os.path.isfile(filename):
                log.info("  adding '%s' to sources." % (filename))
                f_sources.append(filename)

        return new_sources + f_sources
Exemple #34
0
    def build_a_library(self, build_info, lib_name, libraries):
        # default compilers
        compiler = self.compiler
        fcompiler = self.fcompiler

        sources = build_info.get('sources')
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError, \
                  ("in 'libraries' option (library '%s'), " +
                   "'sources' must be present and must be " +
                   "a list of source filenames") % lib_name
        sources = list(sources)

        c_sources, cxx_sources, f_sources, fmodule_sources \
                   = filter_sources(sources)
        requiref90 = not not fmodule_sources or \
                     build_info.get('language','c')=='f90'

        # save source type information so that build_ext can use it.
        source_languages = []
        if c_sources: source_languages.append('c')
        if cxx_sources: source_languages.append('c++')
        if requiref90: source_languages.append('f90')
        elif f_sources: source_languages.append('f77')
        build_info['source_languages'] = source_languages

        lib_file = compiler.library_filename(lib_name,
                                             output_dir=self.build_clib)
        depends = sources + build_info.get('depends', [])
        if not (self.force or newer_group(depends, lib_file, 'newer')):
            log.debug("skipping '%s' library (up-to-date)", lib_name)
            return
        else:
            log.info("building '%s' library", lib_name)

        config_fc = build_info.get('config_fc', {})
        if fcompiler is not None and config_fc:
            log.info('using additional config_fc from setup script '\
                     'for fortran compiler: %s' \
                     % (config_fc,))
            from numpy.distutils.fcompiler import new_fcompiler
            fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
                                      verbose=self.verbose,
                                      dry_run=self.dry_run,
                                      force=self.force,
                                      requiref90=requiref90,
                                      c_compiler=self.compiler)
            if fcompiler is not None:
                dist = self.distribution
                base_config_fc = dist.get_option_dict('config_fc').copy()
                base_config_fc.update(config_fc)
                fcompiler.customize(base_config_fc)

        # check availability of Fortran compilers
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError, "library %s has Fortran sources"\
                  " but no Fortran compiler found" % (lib_name)

        macros = build_info.get('macros')
        include_dirs = build_info.get('include_dirs')
        if include_dirs is None:
            include_dirs = []
        extra_postargs = build_info.get('extra_compiler_args') or []

        include_dirs.extend(get_numpy_include_dirs())
        # where compiled F90 module files are:
        module_dirs = build_info.get('module_dirs') or []
        module_build_dir = os.path.dirname(lib_file)
        if requiref90: self.mkpath(module_build_dir)

        if compiler.compiler_type == 'msvc':
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        objects = []
        if c_sources:
            log.info("compiling C sources")
            objects = compiler.compile(c_sources,
                                       output_dir=self.build_temp,
                                       macros=macros,
                                       include_dirs=include_dirs,
                                       debug=self.debug,
                                       extra_postargs=extra_postargs)

        if cxx_sources:
            log.info("compiling C++ sources")
            cxx_compiler = compiler.cxx_compiler()
            cxx_objects = cxx_compiler.compile(cxx_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)
            objects.extend(cxx_objects)

        if f_sources or fmodule_sources:
            extra_postargs = []
            f_objects = []

            if requiref90:
                if fcompiler.module_dir_switch is None:
                    existing_modules = glob('*.mod')
                extra_postargs += fcompiler.module_options(\
                    module_dirs,module_build_dir)

            if fmodule_sources:
                log.info("compiling Fortran 90 module sources")
                f_objects += fcompiler.compile(fmodule_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)

            if requiref90 and self.fcompiler.module_dir_switch is None:
                # move new compiled F90 module files to module_build_dir
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' \
                                 % (f, module_build_dir))

            if f_sources:
                log.info("compiling Fortran sources")
                f_objects += fcompiler.compile(f_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)
        else:
            f_objects = []

        objects.extend(f_objects)

        # assume that default linker is suitable for
        # linking Fortran object files
        compiler.create_static_lib(objects,
                                   lib_name,
                                   output_dir=self.build_clib,
                                   debug=self.debug)

        # fix library dependencies
        clib_libraries = build_info.get('libraries', [])
        for lname, binfo in libraries:
            if lname in clib_libraries:
                clib_libraries.extend(binfo[1].get('libraries', []))
        if clib_libraries:
            build_info['libraries'] = clib_libraries
Exemple #35
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                "in 'ext_modules' option (extension '%s'), "
                "'sources' must be present and must be "
                "a list of source filenames" % ext.name)
        # sort to make the resulting .so file build reproducible
        sources = sorted(sources)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # First, scan the sources for SWIG definition files (.i), run
        # SWIG on 'em to create .c files, and modify the sources list
        # accordingly.
        sources = self.swig_sources(sources, ext)

        # Next, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        objects = self.compiler.compile(
            sources,
            output_dir=self.build_temp,
            macros=macros,
            include_dirs=ext.include_dirs,
            debug=self.debug,
            extra_postargs=extra_args,
            depends=ext.depends,
        )

        # XXX outdated variable, kept here in case third-part code
        # needs it.
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        self.compiler.link_shared_object(
            objects,
            ext_path,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language,
        )
Exemple #36
0
<<<<<<< HEAD

=======
>>>>>>> b66a76afa15ab74019740676a52a071b85ed8f71
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
def newer_pairwise_group(sources_groups, targets):
    """Walk both arguments in parallel, testing if each source group is newer
    than its corresponding target. Returns a pair of lists (sources_groups,
    targets) where sources is newer than target, according to the semantics
    of 'newer_group()'.
    """
    if len(sources_groups) != len(targets):
<<<<<<< HEAD
        raise ValueError(
            "'sources_group' and 'targets' must be the same length")
=======
        raise ValueError("'sources_group' and 'targets' must be the same length")
>>>>>>> b66a76afa15ab74019740676a52a071b85ed8f71

    # build a pair of lists (sources_groups, targets) where source is newer
    n_sources = []
    n_targets = []
    for i in range(len(sources_groups)):
        if newer_group(sources_groups[i], targets[i]):
            n_sources.append(sources_groups[i])
            n_targets.append(targets[i])

    return n_sources, n_targets
Exemple #37
0
 def stale_win_go(self):
     existing_archive = 'pykubectl/lib/libgokubectl.dll'
     go_package = pathlib.Path('main')
     go_sources = list(str(f.resolve()) for f in go_package.rglob('*.go'))
     return newer_group(go_sources, existing_archive)
Exemple #38
0
 def stale_go(self):
     installed_lib = os.path.join(LIB_DIR, 'libgokubectl.so')
     go_package = pathlib.Path('main')
     go_sources = list(str(f.resolve()) for f in go_package.rglob('*.go'))
     return newer_group(go_sources, installed_lib)
Exemple #39
0
    def cython_sources(self, sources, extension):
        """
        Walk the list of source files in 'sources', looking for Cython
        source files (.pyx and .py).  Run Cython on all that are
        found, and return a modified 'sources' list with Cython source
        files replaced by the generated C (or C++) files.
        """
        new_sources = []
        cython_sources = []
        cython_targets = {}

        # Setup create_list and cplus from the extension options if
        # Cython.Distutils.extension.Extension is used, otherwise just
        # use what was parsed from the command-line or the configuration file.
        # cplus will also be set to true is extension.language is equal to
        # 'C++' or 'c++'.
        #try:
        #    create_listing = self.cython_create_listing or \
        #                        extension.cython_create_listing
        #    cplus = self.cython_cplus or \
        #                extension.cython_cplus or \
        #                (extension.language != None and \
        #                    extension.language.lower() == 'c++')
        #except AttributeError:
        #    create_listing = self.cython_create_listing
        #    cplus = self.cython_cplus or \
        #                (extension.language != None and \
        #                    extension.language.lower() == 'c++')

        create_listing = self.cython_create_listing or \
            getattr(extension, 'cython_create_listing', 0)
        line_directives = self.cython_line_directives or \
            getattr(extension, 'cython_line_directives', 0)
        no_c_in_traceback = self.no_c_in_traceback or \
            getattr(extension, 'no_c_in_traceback', 0)
        cplus = self.cython_cplus or getattr(extension, 'cython_cplus', 0) or \
                (extension.language and extension.language.lower() == 'c++')
        cython_gen_pxi = self.cython_gen_pxi or getattr(extension, 'cython_gen_pxi', 0)
        cython_gdb = self.cython_gdb or getattr(extension, 'cython_gdb', False)
        cython_compile_time_env = self.cython_compile_time_env or \
            getattr(extension, 'cython_compile_time_env', None)

        # Set up the include_path for the Cython compiler:
        #    1.    Start with the command line option.
        #    2.    Add in any (unique) paths from the extension
        #        cython_include_dirs (if Cython.Distutils.extension is used).
        #    3.    Add in any (unique) paths from the extension include_dirs
        includes = list(self.cython_include_dirs)
        try:
            for i in extension.cython_include_dirs:
                if not i in includes:
                    includes.append(i)
        except AttributeError:
            pass

        # In case extension.include_dirs is a generator, evaluate it and keep
        # result
        extension.include_dirs = list(extension.include_dirs)
        for i in extension.include_dirs:
            if not i in includes:
                includes.append(i)

        # Set up Cython compiler directives:
        #    1. Start with the command line option.
        #    2. Add in any (unique) entries from the extension
        #         cython_directives (if Cython.Distutils.extension is used).
        directives = dict(self.cython_directives)
        if hasattr(extension, "cython_directives"):
            directives.update(extension.cython_directives)

        # Set the target file extension for C/C++ mode.
        if cplus:
            target_ext = '.cpp'
        else:
            target_ext = '.c'

        # Decide whether to drop the generated C files into the temp dir
        # or the source tree.

        if not self.inplace and (self.cython_c_in_temp
                or getattr(extension, 'cython_c_in_temp', 0)):
            target_dir = os.path.join(self.build_temp, "pyrex")
            for package_name in extension.name.split('.')[:-1]:
                target_dir = os.path.join(target_dir, package_name)
        else:
            target_dir = None

        newest_dependency = None
        for source in sources:
            (base, ext) = os.path.splitext(os.path.basename(source))
            if ext == ".py":
                # FIXME: we might want to special case this some more
                ext = '.pyx'
            if ext == ".pyx":              # Cython source file
                output_dir = target_dir or os.path.dirname(source)
                new_sources.append(os.path.join(output_dir, base + target_ext))
                cython_sources.append(source)
                cython_targets[source] = new_sources[-1]
            elif ext == '.pxi' or ext == '.pxd':
                if newest_dependency is None \
                        or newer(source, newest_dependency):
                    newest_dependency = source
            else:
                new_sources.append(source)

        if not cython_sources:
            return new_sources

        try:
            from Cython.Compiler.Main \
                import CompilationOptions, \
                       default_options as cython_default_options, \
                       compile as cython_compile
            from Cython.Compiler.Errors import PyrexError
        except ImportError:
            e = sys.exc_info()[1]
            print("failed to import Cython: %s" % e)
            raise DistutilsPlatformError("Cython does not appear to be installed")

        module_name = extension.name

        for source in cython_sources:
            target = cython_targets[source]
            depends = [source] + list(extension.depends or ())
            if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")):
                depends += [source[:-3]+"pxd"]
            rebuild = self.force or newer_group(depends, target, 'newer')
            if not rebuild and newest_dependency is not None:
                rebuild = newer(newest_dependency, target)
            if rebuild:
                log.info("cythoning %s to %s", source, target)
                self.mkpath(os.path.dirname(target))
                if self.inplace:
                    output_dir = os.curdir
                else:
                    output_dir = self.build_lib
                options = CompilationOptions(cython_default_options,
                    use_listing_file = create_listing,
                    include_path = includes,
                    compiler_directives = directives,
                    output_file = target,
                    cplus = cplus,
                    emit_linenums = line_directives,
                    c_line_in_traceback = not no_c_in_traceback,
                    generate_pxi = cython_gen_pxi,
                    output_dir = output_dir,
                    gdb_debug = cython_gdb,
                    compile_time_env = cython_compile_time_env)
                result = cython_compile(source, options=options,
                                        full_module_name=module_name)
            else:
                log.info("skipping '%s' Cython extension (up-to-date)", target)

        return new_sources
Exemple #40
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                "in 'ext_modules' option (extension '%s'), "
                "'sources' must be present and must be "
                "a list of source filenames" % ext.name)
        sources = list(sources)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # do compiler specific customizations
        compiler_type = self.compiler.compiler_type

        if isinstance(ext.extra_compile_args, dict):
            extra_args_dict = ext.extra_compile_args or {}
            if compiler_type in extra_args_dict:
                extra_args = extra_args_dict[compiler_type]
            else:
                extra_args = extra_args_dict.get("default", [])
        else:
            extra_args = ext.extra_compile_args or []

        if isinstance(ext.define_macros, dict):
            macros_dict = ext.define_macros or {}
            if compiler_type in macros_dict:
                macros = macros_dict[compiler_type]
            else:
                macros = macros_dict.get("default", [])
        else:
            macros = ext.define_macros or []

        if isinstance(ext.undef_macros, dict):
            undef_macros_dict = ext.undef_macros
            for tp, undef in undef_macros_dict.items():
                if tp == compiler_type:
                    macros.append((undef, ))
        else:
            for undef in ext.undef_macros:
                macros.append((undef, ))

        # compile the source code to object files.
        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_args,
                                        depends=ext.depends)

        # Now link the object files together into a "shared object"
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        # TODO: do compiler-specific extra link args?
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        self.compiler.link_shared_object(
            objects,
            ext_path,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language)
Exemple #41
0
    def swig_sources(self, sources, extension=None):
        if not self.extensions:
            return

        # add directory of input files as include path
        indirs = list(set([os.path.dirname(x) for x in sources]))

        # Add the SIP and Qt include directories to the include path
        extension.include_dirs += [
            SIP_INC_DIR,
            QT_INC_DIR,
        ] + self.get_includes() + indirs

        # link against libraries
        if QT_IS_FRAMEWORK:
            extension.extra_link_args = [
                '-F',
                os.path.join(QT_LIB_DIR),
                '-framework',
                'QtGui',
                '-framework',
                'QtCore',
                '-framework',
                'QtXml',
                '-framework',
                'QtWidgets',
            ]
        elif sys.platform == 'win32':
            extension.libraries = ['QtGui5', 'QtCore5', 'QtXml5', 'QtWidgets5']
        else:
            extension.libraries = ['Qt5Gui', 'Qt5Core', 'Qt5Xml', 'Qt5Widgets']
        extension.library_dirs = [QT_LIB_DIR]

        depends = extension.depends

        # Filter dependencies list: we are interested only in .sip files,
        # since the main .sip files can only depend on additional .sip
        # files. For instance, if a .h changes, there is no need to
        # run sip again.
        depends = [f for f in depends if os.path.splitext(f)[1] == '.sip']

        # Create the temporary directory if it does not exist already
        if not os.path.isdir(self.build_temp):
            os.makedirs(self.build_temp)

        # Collect the names of the source (.sip) files
        sip_sources = []
        sip_sources = [source for source in sources if source.endswith('.sip')]
        other_sources = [
            source for source in sources if not source.endswith('.sip')
        ]
        generated_sources = []

        for sip in sip_sources:
            # Use the sbf file as dependency check
            sipbasename = os.path.basename(sip)
            sbf = os.path.join(self.build_temp,
                               replace_suffix(sipbasename, '.sbf'))
            if newer_group([sip] + depends, sbf) or self.force:
                self._sip_compile(sip, sbf)
            out = self._get_sip_output_list(sbf)
            generated_sources.extend(out)

        return generated_sources + other_sources
Exemple #42
0
    def build_static(self, ext):
        ## mostly copied from build_extension, changed
        sources = ext.sources
        if sources is None or type(sources) not in (types.ListType,
                                                    types.TupleType):
            raise DistutilsSetupError, \
                  ("in 'ext_modules' option (extension '%s'), " +
                   "'sources' must be present and must be " +
                   "a list of source filenames") % ext.name
        sources = list(sources)

        # Static libs get build in the build_temp directory
        output_dir = self.build_temp
        if not os.path.exists(
                output_dir):  #VSC fails if the dir does not exist
            os.makedirs(output_dir)

        lib_filename = self.compiler.library_filename(ext.name,
                                                      lib_type='static',
                                                      output_dir=output_dir)

        depends = sources + ext.depends
        if not (self.force or newer_group(depends, lib_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # First, scan the sources for SWIG definition files (.i), run
        # SWIG on 'em to create .c files, and modify the sources list
        # accordingly.
        sources = self.swig_sources(sources, ext)

        # Next, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_args,
                                        depends=ext.depends)

        # XXX -- this is a Vile HACK!
        #
        # The setup.py script for Python on Unix needs to be able to
        # get this list so it can perform all the clean up needed to
        # avoid keeping object files around when cleaning out a failed
        # build of an extension module.  Since Distutils does not
        # track dependencies, we have to get rid of intermediates to
        # ensure all the intermediates will be properly re-built.
        #
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        #first remove old library (ar only appends the contents if archive already exists)
        try:
            os.remove(lib_filename)
        except OSError, ex:
            log.debug("failed to remove obsolete static library %s: %s" %
                      (ext.name, str(ex)))
Exemple #43
0
    def _build_extension(self, ext):
        sources = ext.sources
        if sources is None or type(sources) not in (ListType, TupleType):
            raise DistutilsSetupError, \
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name
        sources = list(sources)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # First, scan the sources for SWIG definition files (.i), run
        # SWIG on 'em to create .c files, and modify the sources list
        # accordingly.
        sources = self.swig_sources(sources, ext)

        # Next, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        # XXX debug
        #objects = []
        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_args,
                                        depends=ext.depends)

        # XXX -- this is a Vile HACK!
        #
        # The setup.py script for Python on Unix needs to be able to
        # get this list so it can perform all the clean up needed to
        # avoid keeping object files around when cleaning out a failed
        # build of an extension module.  Since Distutils does not
        # track dependencies, we have to get rid of intermediates to
        # ensure all the intermediates will be properly re-built.
        #
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        #self.compiler.link_shared_object(
        #objects, ext_path,
        #libraries=self.get_libraries(ext),
        #library_dirs=ext.library_dirs,
        #runtime_library_dirs=ext.runtime_library_dirs,
        #extra_postargs=extra_args,
        #export_symbols=self.get_export_symbols(ext),
        #debug=self.debug,
        #build_temp=self.build_temp,
        #target_lang=language)

        # XXX may I have a static lib please?
        # hmm but then I cannot load that extension, or can I?
        output_dir = os.path.sep.join(ext_path.split(os.path.sep)[:-1])

        self.compiler.create_static_lib(
            objects,
            #XXX get library name ... splitting ext_path?
            "sqlite",
            output_dir=output_dir,
            target_lang=language)
Exemple #44
0
"""distutils.command.build_ext
    def build_a_library(self, build_info, lib_name, libraries):
        # default compilers
        compiler = self.compiler
        fcompiler = self._f_compiler

        sources = build_info.get("sources")
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'libraries' option (library '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % lib_name)
        sources = list(sources)

        c_sources, cxx_sources, f_sources, fmodule_sources = filter_sources(
            sources)
        requiref90 = not not fmodule_sources or build_info.get(
            "language", "c") == "f90"

        # save source type information so that build_ext can use it.
        source_languages = []
        if c_sources:
            source_languages.append("c")
        if cxx_sources:
            source_languages.append("c++")
        if requiref90:
            source_languages.append("f90")
        elif f_sources:
            source_languages.append("f77")
        build_info["source_languages"] = source_languages

        lib_file = compiler.library_filename(lib_name,
                                             output_dir=self.build_clib)
        depends = sources + build_info.get("depends", [])
        if not (self.force or newer_group(depends, lib_file, "newer")):
            log.debug("skipping '%s' library (up-to-date)", lib_name)
            return
        else:
            log.info("building '%s' library", lib_name)

        config_fc = build_info.get("config_fc", {})
        if fcompiler is not None and config_fc:
            log.info(
                "using additional config_fc from setup script for fortran compiler: %s"
                % (config_fc, ))
            from numpy.distutils.fcompiler import new_fcompiler

            fcompiler = new_fcompiler(
                compiler=fcompiler.compiler_type,
                verbose=self.verbose,
                dry_run=self.dry_run,
                force=self.force,
                requiref90=requiref90,
                c_compiler=self.compiler,
            )
            if fcompiler is not None:
                dist = self.distribution
                base_config_fc = dist.get_option_dict("config_fc").copy()
                base_config_fc.update(config_fc)
                fcompiler.customize(base_config_fc)

        # check availability of Fortran compilers
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError(
                "library %s has Fortran sources but no Fortran compiler found"
                % (lib_name))

        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = (
                build_info.get("extra_f77_compile_args") or [])
            fcompiler.extra_f90_compile_args = (
                build_info.get("extra_f90_compile_args") or [])

        macros = build_info.get("macros")
        include_dirs = build_info.get("include_dirs")
        if include_dirs is None:
            include_dirs = []
        extra_postargs = build_info.get("extra_compiler_args") or []

        include_dirs.extend(get_numpy_include_dirs())
        # where compiled F90 module files are:
        module_dirs = build_info.get("module_dirs") or []
        module_build_dir = os.path.dirname(lib_file)
        if requiref90:
            self.mkpath(module_build_dir)

        if compiler.compiler_type == "msvc":
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        objects = []
        if c_sources:
            log.info("compiling C sources")
            objects = compiler.compile(
                c_sources,
                output_dir=self.build_temp,
                macros=macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_postargs,
            )

        if cxx_sources:
            log.info("compiling C++ sources")
            cxx_compiler = compiler.cxx_compiler()
            cxx_objects = cxx_compiler.compile(
                cxx_sources,
                output_dir=self.build_temp,
                macros=macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_postargs,
            )
            objects.extend(cxx_objects)

        if f_sources or fmodule_sources:
            extra_postargs = []
            f_objects = []

            if requiref90:
                if fcompiler.module_dir_switch is None:
                    existing_modules = glob("*.mod")
                extra_postargs += fcompiler.module_options(
                    module_dirs, module_build_dir)

            if fmodule_sources:
                log.info("compiling Fortran 90 module sources")
                f_objects += fcompiler.compile(
                    fmodule_sources,
                    output_dir=self.build_temp,
                    macros=macros,
                    include_dirs=include_dirs,
                    debug=self.debug,
                    extra_postargs=extra_postargs,
                )

            if requiref90 and self._f_compiler.module_dir_switch is None:
                # move new compiled F90 module files to module_build_dir
                for f in glob("*.mod"):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn("failed to move %r to %r" %
                                 (f, module_build_dir))

            if f_sources:
                log.info("compiling Fortran sources")
                f_objects += fcompiler.compile(
                    f_sources,
                    output_dir=self.build_temp,
                    macros=macros,
                    include_dirs=include_dirs,
                    debug=self.debug,
                    extra_postargs=extra_postargs,
                )
        else:
            f_objects = []

        if f_objects and not fcompiler.can_ccompiler_link(compiler):
            # Default linker cannot link Fortran object files, and results
            # need to be wrapped later. Instead of creating a real static
            # library, just keep track of the object files.
            listfn = os.path.join(self.build_clib, lib_name + ".fobjects")
            with open(listfn, "w") as f:
                f.write("\n".join(os.path.abspath(obj) for obj in f_objects))

            listfn = os.path.join(self.build_clib, lib_name + ".cobjects")
            with open(listfn, "w") as f:
                f.write("\n".join(os.path.abspath(obj) for obj in objects))

            # create empty "library" file for dependency tracking
            lib_fname = os.path.join(self.build_clib,
                                     lib_name + compiler.static_lib_extension)
            with open(lib_fname, "wb") as f:
                pass
        else:
            # assume that default linker is suitable for
            # linking Fortran object files
            objects.extend(f_objects)
            compiler.create_static_lib(objects,
                                       lib_name,
                                       output_dir=self.build_clib,
                                       debug=self.debug)

        # fix library dependencies
        clib_libraries = build_info.get("libraries", [])
        for lname, binfo in libraries:
            if lname in clib_libraries:
                clib_libraries.extend(binfo.get("libraries", []))
        if clib_libraries:
            build_info["libraries"] = clib_libraries
Exemple #46
0
    def prepare_extension(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name)
        sources = list(sources)

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            # ignore build-lib -- put the compiled extension into
            # the source tree along with pure Python modules

            modpath = string.split(fullname, '.')
            package = string.join(modpath[0:-1], '.')
            base = modpath[-1]

            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
            relative_ext_filename = self.get_ext_filename(base)
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
            relative_ext_filename = self.get_ext_filename(fullname)

        # while dispatching the calls to gcc in parallel, we sometimes
        # hit a race condition where two separate build_ext objects
        # try to create a given directory at the same time; whoever
        # loses the race then seems to throw an error, saying that
        # the directory already exists. so, instead of fighting to
        # fix the race condition, we simply make sure the entire
        # directory tree exists now, while we're processing the
        # extensions in serial.
        relative_ext_dir = os.path.split(relative_ext_filename)[0]
        prefixes = ['', self.build_lib, self.build_temp]
        for prefix in prefixes:
            path = os.path.join(prefix, relative_ext_dir)
            try:
                os.makedirs(path)
            except OSError as e:
                assert e.errno == errno.EEXIST, 'Cannot create %s.' % path
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            need_to_compile = False
        elif getattr(ext, "skip_build", False):
            log.debug("skipping '%s' extension (optional)", ext.name)
            need_to_compile = False
        else:
            log.info("building '%s' extension", ext.name)
            need_to_compile = True

        # If we need to compile, adjust the given extension
        if need_to_compile:
            libs = ext.libraries
            if ext.language == 'c++' and 'stdc++' not in libs:
                libs = libs + ['stdc++']

            # Sort libraries according to library_order
            ext.libraries = sorted(libs, key=lambda x: library_order.get(x, 0))

        return need_to_compile, (sources, ext, ext_filename)
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name)
        sources = list(sources)

        if not sources:
            return

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            modpath = fullname.split('.')
            package = '.'.join(modpath[0:-1])
            base = modpath[-1]
            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
        depends = sources + ext.depends

        force_rebuild = self.force
        if not self.disable_optimization and not self.compiler_opt.is_cached():
            log.debug("Detected changes on compiler optimizations")
            force_rebuild = True
        if not (force_rebuild or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        extra_args = ext.extra_compile_args or []
        extra_cflags = getattr(ext, 'extra_c_compile_args', None) or []
        extra_cxxflags = getattr(ext, 'extra_cxx_compile_args', None) or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        c_sources, cxx_sources, f_sources, fmodule_sources = \
            filter_sources(ext.sources)

        if self.compiler.compiler_type == 'msvc':
            if cxx_sources:
                # Needed to compile kiva.agg._agg extension.
                extra_args.append('/Zm1000')
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        # Set Fortran/C++ compilers for compilation and linking.
        if ext.language == 'f90':
            fcompiler = self._f90_compiler
        elif ext.language == 'f77':
            fcompiler = self._f77_compiler
        else:  # in case ext.language is c++, for instance
            fcompiler = self._f90_compiler or self._f77_compiler
        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = (
                ext.extra_f77_compile_args or []) if hasattr(
                    ext, 'extra_f77_compile_args') else []
            fcompiler.extra_f90_compile_args = (
                ext.extra_f90_compile_args or []) if hasattr(
                    ext, 'extra_f90_compile_args') else []
        cxx_compiler = self._cxx_compiler

        # check for the availability of required compilers
        if cxx_sources and cxx_compiler is None:
            raise DistutilsError("extension %r has C++ sources"
                                 "but no C++ compiler found" % (ext.name))
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError("extension %r has Fortran sources "
                                 "but no Fortran compiler found" % (ext.name))
        if ext.language in ['f77', 'f90'] and fcompiler is None:
            self.warn("extension %r has Fortran libraries "
                      "but no Fortran linker found, using default linker" %
                      (ext.name))
        if ext.language == 'c++' and cxx_compiler is None:
            self.warn("extension %r has C++ libraries "
                      "but no C++ linker found, using default linker" %
                      (ext.name))

        kws = {'depends': ext.depends}
        output_dir = self.build_temp

        include_dirs = ext.include_dirs + get_numpy_include_dirs()

        # filtering C dispatch-table sources when optimization is not disabled,
        # otherwise treated as normal sources.
        copt_c_sources = []
        copt_cxx_sources = []
        copt_baseline_flags = []
        copt_macros = []
        if not self.disable_optimization:
            bsrc_dir = self.get_finalized_command("build_src").build_src
            dispatch_hpath = os.path.join("numpy", "distutils", "include")
            dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath)
            include_dirs.append(dispatch_hpath)

            copt_build_src = None if self.inplace else bsrc_dir
            for _srcs, _dst, _ext in (((c_sources, ), copt_c_sources,
                                       ('.dispatch.c', )),
                                      ((c_sources, cxx_sources),
                                       copt_cxx_sources, ('.dispatch.cpp',
                                                          '.dispatch.cxx'))):
                for _src in _srcs:
                    _dst += [
                        _src.pop(_src.index(s)) for s in _src[:]
                        if s.endswith(_ext)
                    ]
            copt_baseline_flags = self.compiler_opt.cpu_baseline_flags()
        else:
            copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1))

        c_objects = []
        if copt_cxx_sources:
            log.info("compiling C++ dispatch-able sources")
            c_objects += self.compiler_opt.try_dispatch(
                copt_cxx_sources,
                output_dir=output_dir,
                src_dir=copt_build_src,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_args + extra_cxxflags,
                ccompiler=cxx_compiler,
                **kws)
        if copt_c_sources:
            log.info("compiling C dispatch-able sources")
            c_objects += self.compiler_opt.try_dispatch(
                copt_c_sources,
                output_dir=output_dir,
                src_dir=copt_build_src,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_args + extra_cflags,
                **kws)
        if c_sources:
            log.info("compiling C sources")
            c_objects += self.compiler.compile(
                c_sources,
                output_dir=output_dir,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=(extra_args + copt_baseline_flags +
                                extra_cflags),
                **kws)
        if cxx_sources:
            log.info("compiling C++ sources")
            c_objects += cxx_compiler.compile(
                cxx_sources,
                output_dir=output_dir,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=(extra_args + copt_baseline_flags +
                                extra_cxxflags),
                **kws)

        extra_postargs = []
        f_objects = []
        if fmodule_sources:
            log.info("compiling Fortran 90 module sources")
            module_dirs = ext.module_dirs[:]
            module_build_dir = os.path.join(
                self.build_temp,
                os.path.dirname(self.get_ext_filename(fullname)))

            self.mkpath(module_build_dir)
            if fcompiler.module_dir_switch is None:
                existing_modules = glob('*.mod')
            extra_postargs += fcompiler.module_options(module_dirs,
                                                       module_build_dir)
            f_objects += fcompiler.compile(fmodule_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

            if fcompiler.module_dir_switch is None:
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' %
                                 (f, module_build_dir))
        if f_sources:
            log.info("compiling Fortran sources")
            f_objects += fcompiler.compile(f_sources,
                                           output_dir=self.build_temp,
                                           macros=macros,
                                           include_dirs=include_dirs,
                                           debug=self.debug,
                                           extra_postargs=extra_postargs,
                                           depends=ext.depends)

        if f_objects and not fcompiler.can_ccompiler_link(self.compiler):
            unlinkable_fobjects = f_objects
            objects = c_objects
        else:
            unlinkable_fobjects = []
            objects = c_objects + f_objects

        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []
        libraries = self.get_libraries(ext)[:]
        library_dirs = ext.library_dirs[:]

        linker = self.compiler.link_shared_object
        # Always use system linker when using MSVC compiler.
        if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):
            # expand libraries with fcompiler libraries as we are
            # not using fcompiler linker
            self._libs_with_msvc_and_fortran(fcompiler, libraries,
                                             library_dirs)

        elif ext.language in ['f77', 'f90'] and fcompiler is not None:
            linker = fcompiler.link_shared_object
        if ext.language == 'c++' and cxx_compiler is not None:
            linker = cxx_compiler.link_shared_object

        if fcompiler is not None:
            objects, libraries = self._process_unlinkable_fobjects(
                objects, libraries, fcompiler, library_dirs,
                unlinkable_fobjects)

        linker(objects,
               ext_filename,
               libraries=libraries,
               library_dirs=library_dirs,
               runtime_library_dirs=ext.runtime_library_dirs,
               extra_postargs=extra_args,
               export_symbols=self.get_export_symbols(ext),
               debug=self.debug,
               build_temp=self.build_temp,
               target_lang=ext.language)
Exemple #48
0
def build_extension(self, ext):
    """Modified version of build_extension method from distutils.
       Can handle compiler args for different files"""

    sources = ext.sources
    if sources is None or not isinstance(sources, (list, tuple)):
        raise DistutilsSetupError(
              "in 'ext_modules' option (extension '%s'), "
              "'sources' must be present and must be "
              "a list of source filenames" % ext.name)

    sources = list(sources)
    ext_path = self.get_ext_fullpath(ext.name)
    depends = sources + ext.depends
    if not (self.force or newer_group(depends, ext_path, 'newer')):
        log.debug("skipping '%s' extension (up-to-date)", ext.name)
        return
    else:
        log.info("building '%s' extension", ext.name)

    sources = self.swig_sources(sources, ext)

    extra_args = ext.extra_compile_args or []
    extra_c_args = getattr(ext, "extra_compile_c_args", [])
    extra_cpp_args = getattr(ext, "extra_compile_cpp_args", [])
    extra_objc_args = getattr(ext, "extra_compile_objc_args", [])
    macros = ext.define_macros[:]
    for undef in ext.undef_macros:
        macros.append((undef,))

    c_sources, cpp_sources, objc_sources, other_sources = filter_sources(sources)

    def _compile(src, args):
        return self.compiler.compile(src,
                                     output_dir=self.build_temp,
                                     macros=macros,
                                     include_dirs=ext.include_dirs,
                                     debug=self.debug,
                                     extra_postargs=extra_args + args,
                                     depends=ext.depends)

    objects = []
    objects += _compile(c_sources, extra_c_args)
    objects += _compile(cpp_sources, extra_cpp_args)
    objects += _compile(objc_sources, extra_objc_args)
    objects += _compile(other_sources, [])

    self._built_objects = objects[:]
    if ext.extra_objects:
        objects.extend(ext.extra_objects)

    extra_args = ext.extra_link_args or []

    language = ext.language or self.compiler.detect_language(sources)
    self.compiler.link_shared_object(
        objects, ext_path,
        libraries=self.get_libraries(ext),
        library_dirs=ext.library_dirs,
        runtime_library_dirs=ext.runtime_library_dirs,
        extra_postargs=extra_args,
        export_symbols=self.get_export_symbols(ext),
        debug=self.debug,
        build_temp=self.build_temp,
        target_lang=language)
Exemple #49
0
    def func(extension, build_dir):

        # first ensure libraries are up to date
        for (dir, target) in targets:
            command = "cd %s && make %s" % (dir, target)
            print 'Rebuilding target %s with command "%s"' % (target, command)
            if os.system(command) != 0:
                raise SystemExit('Command "%s" failed' % command)

        # Have any of wrap_sources changed since we last scanned source files?
        f90doc_file = os.path.join(build_dir, '../../%s.f90doc' % modname)
        if newer_group(wrap_sources, f90doc_file):

            # Rebuild .f90doc file containing interfaces of wrapped routines
            tmp_wrap_sources = []
            cpp_opt = ' '.join(gen_preprocess_options(macros, include_dirs))
            for src in wrap_sources:
                tmp_file = os.path.join(build_dir.replace('src', 'temp'),
                                        os.path.basename(src))
                if not os.path.exists(os.path.dirname(tmp_file)):
                    os.makedirs(os.path.dirname(tmp_file))
                command = "%s %s %s | grep -v '^#' > %s" % (
                    ' '.join(cpp), cpp_opt, src, tmp_file)
                print 'Executing command %s' % command
                os.system(command)
                if os.path.exists(src[:-4] + '.s'): os.remove(src[:-4] + '.s')
                tmp_wrap_sources.append(tmp_file)

            programs, modules, functs, subts = f90doc.read_files(
                tmp_wrap_sources)
            cPickle.dump((programs, modules, functs, subts),
                         open(f90doc_file, 'w'))
        else:
            # Read previous .f90doc file
            (programs, modules, functs,
             subts) = cPickle.load(open(f90doc_file))

        # Update map from type names to module in which they are defined
        for mod, name in modules:
            for n in [t.name for t in mod.types]:
                type_map[n.lower()] = mod.name

        for item in dep_type_maps:
            if hasattr(item, '__getitem__') and hasattr(item,
                                                        'keys'):  # dictionary
                type_map.update(item)
            else:  # assume it's a string
                type_map.update(cPickle.load(open('%s.type' % item)))

        # Try to load previous .spec file
        res = []
        fortran_spec = {}
        spec_file = os.path.join(build_dir, '../../%s.spec' % modname)
        if os.path.exists(spec_file):
            fortran_spec = cPickle.load(open(spec_file))

        # Write new wrapper files and update .spec file
        wrap_modules = []
        for file in wrap_sources:

            for mod, name in modules:
                if os.path.basename(name) == os.path.basename(file):
                    break
            else:
                raise ValueError(
                    "Can't find Fortran module corresponding to file %s" %
                    file)

            wrap_mod_name = mod.name.lower()[:-7]
            rel_filename = file[len(quip_root):]
            if rel_filename.startswith('/'):
                rel_filename = rel_filename[1:]
            wrap_modules.append((wrap_mod_name, rel_filename))

            wrapper = '%s/%s_%s_wrap.f90' % (build_dir, modname, wrap_mod_name)

            if not newer(name, wrapper):
                res.append(wrapper)
                continue

            public_symbols = f2py_wrapper_gen.find_public_symbols(file)

            print 'public_symbols = ', public_symbols

            tmpf = StringIO.StringIO()
            new_spec = f2py_wrapper_gen.wrap_mod(
                mod,
                type_map,
                tmpf,
                kindlines=kindlines,
                initlines=initlines,
                filtertypes=filtertypes,
                prefix=prefix,
                callback_routines=callback_routines,
                public_symbols=public_symbols,
                sizeof_fortran_t=sizeof_fortran_t)

            if (not os.path.exists(wrapper)
                    or new_spec[wrap_mod_name] != fortran_spec.get(
                        wrap_mod_name, None)):
                #                (not f2py_wrapper_gen.cmp_nested_dict(new_spec[wrap_mod_name],
                #                                       fortran_spec.get(wrap_mod_name, None)))):
                print 'Interface for module %s has changed. Rewriting wrapper file' % mod.name
                wrapperf = open(wrapper, 'w')
                wrapperf.write(tmpf.getvalue())
                wrapperf.close()
            else:
                print 'Interface for module %s unchanged' % mod.name

            fortran_spec.update(new_spec)
            tmpf.close()
            res.append(wrapper)

        fortran_spec['wrap_modules'] = wrap_modules
        fortran_spec['short_names'] = short_names
        fortran_spec['quip_root'] = quip_root
        fortran_spec['quip_arch'] = quip_arch
        fortran_spec['quip_makefile'] = makefile
        cPickle.dump(
            fortran_spec,
            open(os.path.join(build_dir, '../../%s.spec' % modname), 'w'))

        import pprint
        spec_py_name = '%s/spec.py' % build_dir
        spec_py = open(spec_py_name, 'w')
        spec_py.write('spec = %s\n' % pprint.pformat(fortran_spec))
        spec_py.close()
        res.append(spec_py_name)

        return res
Exemple #50
0
def build_extension(self, ext):
    """Modified version of build_extension method from distutils.
       Can handle compiler args for different files"""

    sources = ext.sources
    if sources is None or not isinstance(sources, (list, tuple)):
        raise DistutilsSetupError(
              "in 'ext_modules' option (extension '%s'), "
              "'sources' must be present and must be "
              "a list of source filenames" % ext.name)

    sources = list(sources)
    ext_path = self.get_ext_fullpath(ext.name)
    depends = sources + ext.depends
    if not (self.force or newer_group(depends, ext_path, 'newer')):
        log.debug("skipping '%s' extension (up-to-date)", ext.name)
        return
    else:
        log.info("building '%s' extension", ext.name)

    sources = self.swig_sources(sources, ext)

    extra_args = ext.extra_compile_args or []
    extra_c_args = getattr(ext, "extra_compile_c_args", [])
    extra_cpp_args = getattr(ext, "extra_compile_cpp_args", [])
    extra_objc_args = getattr(ext, "extra_compile_objc_args", [])
    extra_asm_args = getattr(ext, "extra_compile_asm_args", [])
    file_specific_definitions = getattr(ext, "file_specific_definitions", {})
    asm_include = getattr(ext, "asm_include", [])

    macros = ext.define_macros[:]
    for undef in ext.undef_macros:
        macros.append((undef,))

    c_sources, cpp_sources, objc_sources, asm_sources, other_sources = filter_sources(sources)

    self.compiler.src_extensions += ['.asm']

    self.compiler.set_executable('assembler', ['nasm'])

    def _compile(src, args):
        obj = []
        for s in src:
            additional_macros = []
            if s in file_specific_definitions.keys():
                additional_macros += file_specific_definitions[s]
            obj += self.compiler.compile([s],
                                         output_dir=self.build_temp,
                                         macros=macros + additional_macros,
                                         include_dirs=ext.include_dirs,
                                         debug=self.debug,
                                         extra_postargs=extra_args + args,
                                         depends=ext.depends)
        return obj

    def _compile_asm(src):
        obj = []
        for s in src:
            additional_macros = []
            if s in file_specific_definitions.keys():
                additional_macros += file_specific_definitions[s]
            macros_, objects, extra_postargs, asm_args, build = \
                self.compiler._setup_compile(self.build_temp, macros + additional_macros, asm_include, [s],
                                             depends, extra_asm_args)

            for o in objects:
                try:
                    src, ext = build[o]
                except KeyError:
                    continue
                try:
                    self.spawn(self.compiler.assembler + extra_postargs + asm_args + ['-o', o, src])
                except DistutilsExecError as msg:
                    raise CompileError(msg)
            obj += objects

        return obj

    objects = []
    objects += _compile_asm(asm_sources)
    objects += _compile(c_sources, extra_c_args)
    objects += _compile(cpp_sources, extra_cpp_args)
    objects += _compile(objc_sources, extra_objc_args)
    objects += _compile(other_sources, [])

    self._built_objects = objects[:]
    if ext.extra_objects:
        objects.extend(ext.extra_objects)

    extra_args = ext.extra_link_args or []

    language = ext.language or self.compiler.detect_language(sources)
    self.compiler.link_shared_object(
        objects, ext_path,
        libraries=self.get_libraries(ext),
        library_dirs=ext.library_dirs,
        runtime_library_dirs=ext.runtime_library_dirs,
        extra_postargs=extra_args,
        export_symbols=self.get_export_symbols(ext),
        debug=self.debug,
        build_temp=self.build_temp,
        target_lang=language)
Exemple #51
0
    def swig_sources(self, sources, extension):
        # Assuming SWIG 1.3.14 or later. See compatibility note in
        #   http://www.swig.org/Doc1.3/Python.html#Python_nn6

        new_sources = []
        swig_sources = []
        swig_targets = {}
        target_dirs = []
        py_files = []  # swig generated .py files
        target_ext = '.c'
        if '-c++' in extension.swig_opts:
            typ = 'c++'
            is_cpp = True
            extension.swig_opts.remove('-c++')
        elif self.swig_cpp:
            typ = 'c++'
            is_cpp = True
        else:
            typ = None
            is_cpp = False
        skip_swig = 0
        ext_name = extension.name.split('.')[-1]

        for source in sources:
            (base, ext) = os.path.splitext(source)
            if ext == '.i':  # SWIG interface file
                # the code below assumes that the sources list
                # contains not more than one .i SWIG interface file
                if self.inplace:
                    target_dir = os.path.dirname(base)
                    py_target_dir = self.ext_target_dir
                else:
                    target_dir = appendpath(self.build_src,
                                            os.path.dirname(base))
                    py_target_dir = target_dir
                if os.path.isfile(source):
                    name = get_swig_modulename(source)
                    if name != ext_name[1:]:
                        raise DistutilsSetupError(
                            'mismatch of extension names: %s provides %r'
                            ' but expected %r' % (source, name, ext_name[1:]))
                    if typ is None:
                        typ = get_swig_target(source)
                        is_cpp = typ == 'c++'
                    else:
                        typ2 = get_swig_target(source)
                        if typ2 is None:
                            log.warn('source %r does not define swig target, assuming %s swig target' \
                                     % (source, typ))
                        elif typ != typ2:
                            log.warn('expected %r but source %r defines %r swig target' \
                                     % (typ, source, typ2))
                            if typ2 == 'c++':
                                log.warn(
                                    'resetting swig target to c++ (some targets may have .c extension)'
                                )
                                is_cpp = True
                            else:
                                log.warn(
                                    'assuming that %r has c++ swig target' %
                                    (source))
                    if is_cpp:
                        target_ext = '.cpp'
                    target_file = os.path.join(target_dir, '%s_wrap%s' \
                                               % (name, target_ext))
                else:
                    log.warn('  source %s does not exist: skipping swig\'ing.' \
                             % (source))
                    name = ext_name[1:]
                    skip_swig = 1
                    target_file = _find_swig_target(target_dir, name)
                    if not os.path.isfile(target_file):
                        log.warn('  target %s does not exist:\n   '\
                                 'Assuming %s_wrap.{c,cpp} was generated with '\
                                 '"build_src --inplace" command.' \
                                 % (target_file, name))
                        target_dir = os.path.dirname(base)
                        target_file = _find_swig_target(target_dir, name)
                        if not os.path.isfile(target_file):
                            raise DistutilsSetupError("%r missing" %
                                                      (target_file, ))
                        log.warn('   Yes! Using %r as up-to-date target.' \
                                 % (target_file))
                target_dirs.append(target_dir)
                new_sources.append(target_file)
                py_files.append(os.path.join(py_target_dir, name + '.py'))
                swig_sources.append(source)
                swig_targets[source] = new_sources[-1]
            else:
                new_sources.append(source)

        if not swig_sources:
            return new_sources

        if skip_swig:
            return new_sources + py_files

        for d in target_dirs:
            self.mkpath(d)

        swig = self.swig or self.find_swig()
        swig_cmd = [swig, "-python"] + extension.swig_opts
        if is_cpp:
            swig_cmd.append('-c++')
        for d in extension.include_dirs:
            swig_cmd.append('-I' + d)
        for source in swig_sources:
            target = swig_targets[source]
            depends = [source] + extension.depends
            if self.force or newer_group(depends, target, 'newer'):
                log.info("%s: %s" % (os.path.basename(swig) \
                                     + (is_cpp and '++' or ''), source))
                self.spawn(swig_cmd + self.swig_opts \
                           + ["-o", target, '-outdir', py_target_dir, source])
            else:
                log.debug("  skipping '%s' swig interface (up-to-date)" \
                         % (source))

        return new_sources + py_files
Exemple #52
0
    def build_extension(self, ext):
        if ext.sources is None or not isinstance(ext.sources, (list, tuple)):
            raise errors.DistutilsSetupError(
                "in 'ext_modules' option (extension '%s'), "
                "'sources' must be present and must be "
                "a list of source filenames" % ext.name)

        ext_path = self.get_ext_fullpath(ext.name)
        depends = ext.sources + ext.depends
        if not (self.force or dep_util.newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        c_sources = []
        cxx_sources = []
        for source in ext.sources:
            if source.endswith('.c'):
                c_sources.append(source)
            else:
                cxx_sources.append(source)
        extra_args = ext.extra_compile_args or []

        objects = []
        for lang, sources in (('c', c_sources), ('c++', cxx_sources)):
            if lang == 'c++':
                if self.compiler.compiler_type == 'msvc':
                    extra_args.append('/EHsc')

            macros = ext.define_macros[:]
            if platform.system() == 'Darwin':
                macros.append(('OS_MACOSX', '1'))
            elif self.compiler.compiler_type == 'mingw32':
                # On Windows Python 2.7, pyconfig.h defines "hypot" as "_hypot",
                # This clashes with GCC's cmath, and causes compilation errors when
                # building under MinGW: http://bugs.python.org/issue11566
                macros.append(('_hypot', 'hypot'))
            for undef in ext.undef_macros:
                macros.append((undef,))

            objs = self.compiler.compile(
                sources,
                output_dir=self.build_temp,
                macros=macros,
                include_dirs=ext.include_dirs,
                debug=self.debug,
                extra_postargs=extra_args,
                depends=ext.depends)
            objects.extend(objs)

        self._built_objects = objects[:]
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []
        # when using GCC on Windows, we statically link libgcc and libstdc++,
        # so that we don't need to package extra DLLs
        if self.compiler.compiler_type == 'mingw32':
            extra_args.extend(['-static-libgcc', '-static-libstdc++'])

        ext_path = self.get_ext_fullpath(ext.name)
        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        self.compiler.link_shared_object(
            objects,
            ext_path,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language)
Exemple #53
0
    def build_extension(self, ext):
        sources = ext.sources
        if sources is None or type(sources) not in (list, tuple):
            raise DistutilsSetupError(
                ("in 'ext_modules' option (extension '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % ext.name)
        sources = list(sources)

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            # ignore build-lib -- put the compiled extension into
            # the source tree along with pure Python modules

            modpath = string.split(fullname, '.')
            package = string.join(modpath[0:-1], '.')
            base = modpath[-1]

            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))
        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_filename, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # First, scan the sources for SWIG definition files (.i), run
        # SWIG on 'em to create .c files, and modify the sources list
        # accordingly.
        sources = self.swig_sources(sources, ext)

        # Next, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        # Insert stop at preprocessor output
        if re.compile("^linux").match(sys.platform):
            extra_args[0:0] = ['-E']
            spawn(['rm', '-rf', self.build_temp])

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        #log.info("JUST ABOUT TO COMPILE (V2)...\n")
        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_args,
                                        depends=ext.depends)

        if re.compile("^linux").match(sys.platform):
            log.info("DONE PREPROCESSOR COMPILE...\n")
            log.info("objects: " + repr(objects) + "\n")
            for obj in objects:
                spawn(['ls', '-latr', obj])
                res = re.compile("^(.*?)\.o$").search(obj)
                obji = res.group(1) + ".i"
                objii = res.group(1) + ".ii"
                log.info("target: " + objii + "\n")
                spawn(['rm', '-f', objii])
                spawn(['mv', '-f', obj, objii])
                spawn(['ls', '-latr', objii])
                spawn(['rm', '-f', obji])
                spawn(['perl', './tools/u16lit.pl', '-le', objii])
                spawn(['ls', '-latr', obji])
                cmdargs = []
                for cargs in self.compiler.compiler:
                    cmdargs.append(cargs)
                for incdir in self.include_dirs:
                    cmdargs.append("-I" + incdir)
                for incdir in ext.include_dirs:
                    cmdargs.append("-I" + incdir)
                for cflg in extra_args:
                    if cflg != "-E":
                        cmdargs.append(cflg)
                cmdargs.append('-c')
                cmdargs.append(obji)
                cmdargs.append('-o')
                cmdargs.append(obj)
                log.info("cmdargs: " + repr(cmdargs) + "\n")
                spawn(['rm', '-f', obj])
                spawn(cmdargs)

        # XXX -- this is a Vile HACK!
        #
        # The setup.py script for Python on Unix needs to be able to
        # get this list so it can perform all the clean up needed to
        # avoid keeping object files around when cleaning out a failed
        # build of an extension module.  Since Distutils does not
        # track dependencies, we have to get rid of intermediates to
        # ensure all the intermediates will be properly re-built.
        #
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
        language = ext.language or self.compiler.detect_language(sources)

        self.compiler.link_shared_object(
            objects,
            ext_filename,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language)
Exemple #54
0
    def build_interp(self, ext):
        sources = ext.sources
        if sources is None or not isinstance(sources, (list, tuple)):
            raise DistutilsSetupError(
                  "in 'interpreters' option (extension '%s'), "
                  "'sources' must be present and must be "
                  "a list of source filenames" % ext.name)
        sources = list(sources)

        ext_path = self.get_ext_fullpath(ext.name)

        if ext.target_desc == "executable":
            ext_path += ".exe"
        else:
            ext_path += ".dll"

        depends = sources + ext.depends
        if not (self.force or newer_group(depends, ext_path, 'newer')):
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
            return
        else:
            log.info("building '%s' extension", ext.name)

        # First, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef,))

        objects = self.compiler.compile(sources,
                                         output_dir=self.build_temp,
                                         macros=macros,
                                         include_dirs=ext.include_dirs,
                                         debug=self.debug,
                                         extra_postargs=extra_args,
                                         depends=ext.depends)

        # XXX -- this is a Vile HACK!
        #
        # The setup.py script for Python on Unix needs to be able to
        # get this list so it can perform all the clean up needed to
        # avoid keeping object files around when cleaning out a failed
        # build of an extension module.  Since Distutils does not
        # track dependencies, we have to get rid of intermediates to
        # ensure all the intermediates will be properly re-built.
        #
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        # Detect target language, if not provided
##        language = ext.language or self.compiler.detect_language(sources)

        ## self.compiler.link_shared_object(
        ##     objects, ext_path,
        ##     libraries=self.get_libraries(ext),
        ##     library_dirs=ext.library_dirs,
        ##     runtime_library_dirs=ext.runtime_library_dirs,
        ##     extra_postargs=extra_args,
        ##     export_symbols=self.get_export_symbols(ext),
        ##     debug=self.debug,
        ##     build_temp=self.build_temp,
        ##     target_lang=language)

        # Hm, for Python 3.5 to link a shared library (instead of exe
        # or pyd) we need to add /DLL to the linker arguments.
        # Currently this is done in the setup script; should we do it
        # here?

        self.compiler.link(ext.target_desc,
                           objects, ext_path,
                           libraries=self.get_libraries(ext),
                           library_dirs=ext.library_dirs,
                           runtime_library_dirs=ext.runtime_library_dirs,
                           export_symbols=ext.export_symbols,
                           extra_postargs=extra_args,
                           debug=self.debug)
Exemple #55
0
    def build_a_library(self, build_info, lib_name, libraries):
        # default compilers
        compiler = self.compiler
        fcompiler = self._f_compiler

        sources = build_info.get('sources')
        if sources is None or not is_sequence(sources):
            raise DistutilsSetupError(
                ("in 'libraries' option (library '%s'), " +
                 "'sources' must be present and must be " +
                 "a list of source filenames") % lib_name)
        sources = list(sources)

        c_sources, cxx_sources, f_sources, fmodule_sources \
            = filter_sources(sources)
        requiref90 = not not fmodule_sources or \
            build_info.get('language', 'c') == 'f90'

        # save source type information so that build_ext can use it.
        source_languages = []
        if c_sources:
            source_languages.append('c')
        if cxx_sources:
            source_languages.append('c++')
        if requiref90:
            source_languages.append('f90')
        elif f_sources:
            source_languages.append('f77')
        build_info['source_languages'] = source_languages

        lib_file = compiler.library_filename(lib_name,
                                             output_dir=self.build_clib)
        depends = sources + build_info.get('depends', [])
        if not (self.force or newer_group(depends, lib_file, 'newer')):
            log.debug("skipping '%s' library (up-to-date)", lib_name)
            return
        else:
            log.info("building '%s' library", lib_name)

        config_fc = build_info.get('config_fc', {})
        if fcompiler is not None and config_fc:
            log.info('using additional config_fc from setup script '
                     'for fortran compiler: %s' % (config_fc, ))
            from numpy.distutils.fcompiler import new_fcompiler
            fcompiler = new_fcompiler(compiler=fcompiler.compiler_type,
                                      verbose=self.verbose,
                                      dry_run=self.dry_run,
                                      force=self.force,
                                      requiref90=requiref90,
                                      c_compiler=self.compiler)
            if fcompiler is not None:
                dist = self.distribution
                base_config_fc = dist.get_option_dict('config_fc').copy()
                base_config_fc.update(config_fc)
                fcompiler.customize(base_config_fc)

        # check availability of Fortran compilers
        if (f_sources or fmodule_sources) and fcompiler is None:
            raise DistutilsError("library %s has Fortran sources"
                                 " but no Fortran compiler found" % (lib_name))

        if fcompiler is not None:
            fcompiler.extra_f77_compile_args = build_info.get(
                'extra_f77_compile_args') or []
            fcompiler.extra_f90_compile_args = build_info.get(
                'extra_f90_compile_args') or []

        macros = build_info.get('macros')
        if macros is None:
            macros = []
        include_dirs = build_info.get('include_dirs')
        if include_dirs is None:
            include_dirs = []
        extra_postargs = build_info.get('extra_compiler_args') or []

        include_dirs.extend(get_numpy_include_dirs())
        # where compiled F90 module files are:
        module_dirs = build_info.get('module_dirs') or []
        module_build_dir = os.path.dirname(lib_file)
        if requiref90:
            self.mkpath(module_build_dir)

        if compiler.compiler_type == 'msvc':
            # this hack works around the msvc compiler attributes
            # problem, msvc uses its own convention :(
            c_sources += cxx_sources
            cxx_sources = []

        # filtering C dispatch-table sources when optimization is not disabled,
        # otherwise treated as normal sources.
        copt_c_sources = []
        copt_cxx_sources = []
        copt_baseline_flags = []
        copt_macros = []
        if not self.disable_optimization:
            bsrc_dir = self.get_finalized_command("build_src").build_src
            dispatch_hpath = os.path.join("numpy", "distutils", "include")
            dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath)
            include_dirs.append(dispatch_hpath)

            copt_build_src = None if self.inplace else bsrc_dir
            for _srcs, _dst, _ext in (((c_sources, ), copt_c_sources,
                                       ('.dispatch.c', )),
                                      ((c_sources, cxx_sources),
                                       copt_cxx_sources, ('.dispatch.cpp',
                                                          '.dispatch.cxx'))):
                for _src in _srcs:
                    _dst += [
                        _src.pop(_src.index(s)) for s in _src[:]
                        if s.endswith(_ext)
                    ]
            copt_baseline_flags = self.compiler_opt.cpu_baseline_flags()
        else:
            copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1))

        objects = []
        if copt_cxx_sources:
            log.info("compiling C++ dispatch-able sources")
            objects += self.compiler_opt.try_dispatch(
                copt_c_sources,
                output_dir=self.build_temp,
                src_dir=copt_build_src,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_postargs,
                ccompiler=cxx_compiler)

        if copt_c_sources:
            log.info("compiling C dispatch-able sources")
            objects += self.compiler_opt.try_dispatch(
                copt_c_sources,
                output_dir=self.build_temp,
                src_dir=copt_build_src,
                macros=macros + copt_macros,
                include_dirs=include_dirs,
                debug=self.debug,
                extra_postargs=extra_postargs)

        if c_sources:
            log.info("compiling C sources")
            objects += compiler.compile(c_sources,
                                        output_dir=self.build_temp,
                                        macros=macros + copt_macros,
                                        include_dirs=include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_postargs +
                                        copt_baseline_flags)

        if cxx_sources:
            log.info("compiling C++ sources")
            cxx_compiler = compiler.cxx_compiler()
            cxx_objects = cxx_compiler.compile(cxx_sources,
                                               output_dir=self.build_temp,
                                               macros=macros + copt_macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs +
                                               copt_baseline_flags)
            objects.extend(cxx_objects)

        if f_sources or fmodule_sources:
            extra_postargs = []
            f_objects = []

            if requiref90:
                if fcompiler.module_dir_switch is None:
                    existing_modules = glob('*.mod')
                extra_postargs += fcompiler.module_options(
                    module_dirs, module_build_dir)

            if fmodule_sources:
                log.info("compiling Fortran 90 module sources")
                f_objects += fcompiler.compile(fmodule_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)

            if requiref90 and self._f_compiler.module_dir_switch is None:
                # move new compiled F90 module files to module_build_dir
                for f in glob('*.mod'):
                    if f in existing_modules:
                        continue
                    t = os.path.join(module_build_dir, f)
                    if os.path.abspath(f) == os.path.abspath(t):
                        continue
                    if os.path.isfile(t):
                        os.remove(t)
                    try:
                        self.move_file(f, module_build_dir)
                    except DistutilsFileError:
                        log.warn('failed to move %r to %r' %
                                 (f, module_build_dir))

            if f_sources:
                log.info("compiling Fortran sources")
                f_objects += fcompiler.compile(f_sources,
                                               output_dir=self.build_temp,
                                               macros=macros,
                                               include_dirs=include_dirs,
                                               debug=self.debug,
                                               extra_postargs=extra_postargs)
        else:
            f_objects = []

        if f_objects and not fcompiler.can_ccompiler_link(compiler):
            # Default linker cannot link Fortran object files, and results
            # need to be wrapped later. Instead of creating a real static
            # library, just keep track of the object files.
            listfn = os.path.join(self.build_clib, lib_name + '.fobjects')
            with open(listfn, 'w') as f:
                f.write("\n".join(os.path.abspath(obj) for obj in f_objects))

            listfn = os.path.join(self.build_clib, lib_name + '.cobjects')
            with open(listfn, 'w') as f:
                f.write("\n".join(os.path.abspath(obj) for obj in objects))

            # create empty "library" file for dependency tracking
            lib_fname = os.path.join(self.build_clib,
                                     lib_name + compiler.static_lib_extension)
            with open(lib_fname, 'wb') as f:
                pass
        else:
            # assume that default linker is suitable for
            # linking Fortran object files
            objects.extend(f_objects)
            compiler.create_static_lib(objects,
                                       lib_name,
                                       output_dir=self.build_clib,
                                       debug=self.debug)

        # fix library dependencies
        clib_libraries = build_info.get('libraries', [])
        for lname, binfo in libraries:
            if lname in clib_libraries:
                clib_libraries.extend(binfo.get('libraries', []))
        if clib_libraries:
            build_info['libraries'] = clib_libraries
Exemple #56
0
    def build_extension(self, ext):

        sources = ext.sources
        if sources is None or type(sources) not in (ListType, TupleType):
            raise DistutilsSetupError, \
                  ("in 'ext_modules' option (extension '%s'), " +
                   "'sources' must be present and must be " +
                   "a list of source filenames") % ext.name
        sources = list(sources)

        fullname = self.get_ext_fullname(ext.name)
        if self.inplace:
            # ignore build-lib -- put the compiled extension into
            # the source tree along with pure Python modules

            modpath = string.split(fullname, '.')
            package = string.join(modpath[0:-1], '.')
            base = modpath[-1]

            build_py = self.get_finalized_command('build_py')
            package_dir = build_py.get_package_dir(package)
            ext_filename = os.path.join(package_dir,
                                        self.get_ext_filename(base))
        else:
            ext_filename = os.path.join(self.build_lib,
                                        self.get_ext_filename(fullname))

        if not (self.force or newer_group(sources, ext_filename, 'newer')):
            self.announce("skipping '%s' extension (up-to-date)" % ext.name)
            return
        else:
            self.announce("building '%s' extension" % ext.name)

        # First, scan the sources for SWIG definition files (.i), run
        # SWIG on 'em to create .c files, and modify the sources list
        # accordingly.
        sources = self.swig_sources(sources)

        # Next, compile the source code to object files.

        # XXX not honouring 'define_macros' or 'undef_macros' -- the
        # CCompiler API needs to change to accommodate this, and I
        # want to do one thing at a time!

        # Two possible sources for extra compiler arguments:
        #   - 'extra_compile_args' in Extension object
        #   - CFLAGS environment variable (not particularly
        #     elegant, but people seem to expect it and I
        #     guess it's useful)
        # The environment variable should take precedence, and
        # any sensible compiler will give precedence to later
        # command line args.  Hence we combine them in order:
        extra_args = ext.extra_compile_args or []

        macros = ext.define_macros[:]
        for undef in ext.undef_macros:
            macros.append((undef, ))

        # XXX and if we support CFLAGS, why not CC (compiler
        # executable), CPPFLAGS (pre-processor options), and LDFLAGS
        # (linker options) too?
        # XXX should we use shlex to properly parse CFLAGS?

        if os.environ.has_key('CFLAGS'):
            extra_args.extend(string.split(os.environ['CFLAGS']))

        objects = self.compiler.compile(sources,
                                        output_dir=self.build_temp,
                                        macros=macros,
                                        include_dirs=ext.include_dirs,
                                        debug=self.debug,
                                        extra_postargs=extra_args)

        # XXX -- this is a Vile HACK!
        #
        # The setup.py script for Python on Unix needs to be able to
        # get this list so it can perform all the clean up needed to
        # avoid keeping object files around when cleaning out a failed
        # build of an extension module.  Since Distutils does not
        # track dependencies, we have to get rid of intermediates to
        # ensure all the intermediates will be properly re-built.
        #
        self._built_objects = objects[:]

        # Now link the object files together into a "shared object" --
        # of course, first we have to figure out all the other things
        # that go into the mix.
        if ext.extra_objects:
            objects.extend(ext.extra_objects)
        extra_args = ext.extra_link_args or []

        self.compiler.link_shared_object(
            objects,
            ext_filename,
            libraries=self.get_libraries(ext),
            library_dirs=ext.library_dirs,
            runtime_library_dirs=ext.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=self.get_export_symbols(ext),
            debug=self.debug,
            build_temp=self.build_temp)
Exemple #57
0
def swig_sources(self, sources, extension):
    # Assuming SWIG 1.3.14 or later. See compatibility note in
    #   http://www.swig.org/Doc1.3/Python.html#Python_nn6

    new_sources = []
    swig_sources = []
    swig_targets = {}
    target_dirs = []
    py_files = []  # swig generated .py files
    target_ext = ".c"
    if self.swig_cpp:
        typ = "c++"
        is_cpp = True
    else:
        typ = None
        is_cpp = False
    skip_swig = 0
    ext_name = extension.name.split(".")[-1]

    for source in sources:
        (base, ext) = os.path.splitext(source)
        if ext == ".i":  # SWIG interface file
            if self.inplace:
                target_dir = os.path.dirname(base)
                py_target_dir = self.ext_target_dir
            else:
                target_dir = appendpath(self.build_src, os.path.dirname(base))
                py_target_dir = target_dir
            if os.path.isfile(source):
                name = get_swig_modulename(source)
                # 				if name != ext_name:
                # 					raise DistutilsSetupError(
                # 						'mismatch of extension names: %s provides %r'
                # 						' but expected %r' % (source, name, ext_name))
                if typ is None:
                    typ = get_swig_target(source)
                    is_cpp = typ == "c++"
                    if is_cpp:
                        target_ext = ".cpp"
                else:
                    typ2 = get_swig_target(source)
                    if typ != typ2:
                        log.warn(
                            "expected %r but source %r defines %r swig target"
                            % (typ, source, typ2))
                        if typ2 == "c++":
                            log.warn(
                                "resetting swig target to c++ (some targets may have .c extension)"
                            )
                            is_cpp = True
                            target_ext = ".cpp"
                        else:
                            log.warn("assuming that %r has c++ swig target" %
                                     (source))
                target_file = os.path.join(target_dir,
                                           "%s_wrap%s" % (name, target_ext))
            else:
                log.warn("  source %s does not exist: skipping swig'ing." %
                         (source))
                name = ext_name
                skip_swig = 1
                target_file = _find_swig_target(target_dir, name)
                if not os.path.isfile(target_file):
                    log.warn((
                        "target {} does not exist:\n" +
                        "Assuming {}_wrap.{c,cpp} was generated with 'build_src --inplace' command."
                    ).format(target_file, name))
                    target_dir = os.path.dirname(base)
                    target_file = _find_swig_target(target_dir, name)
                    if not os.path.isfile(target_file):
                        raise DistutilsSetupError("%r missing" %
                                                  (target_file, ))
                    log.warn("   Yes! Using %r as up-to-date target." %
                             (target_file))
            target_dirs.append(target_dir)
            new_sources.append(target_file)
            # py_files.append(os.path.join(py_target_dir, name+'.py'))
            swig_sources.append(source)
            swig_targets[source] = new_sources[-1]
        else:
            new_sources.append(source)

    if not swig_sources:
        return new_sources

    if skip_swig:
        return new_sources + py_files

    for d in target_dirs:
        self.mkpath(d)

    swig = self.swig or self.find_swig()
    swig_cmd = [swig, "-python"] + extension.swig_opts
    if is_cpp:
        swig_cmd.append("-c++")
    for d in extension.include_dirs:
        swig_cmd.append("-I" + d)
    for source in swig_sources:
        target = swig_targets[source]
        depends = [source] + extension.depends
        if self.force or newer_group(depends, target, "newer"):
            log.info("%s: %s" % (os.path.basename(swig) +
                                 (is_cpp and "++" or ""), source))
            self.spawn(swig_cmd + self.swig_opts +
                       ["-o", target, "-outdir", py_target_dir, source])
        else:
            log.debug("  skipping '%s' swig interface (up-to-date)" % (source))

    return new_sources + py_files
Exemple #58
0
"""distutils.cmd
Exemple #59
0
 def generate_defs(self):
     for (target, sources) in self.built_defs:
         if dep_util.newer_group(sources, target):
             # createdefs is mostly called from the CLI !
             args = ['dummy', target] + sources
             codegen.createdefs.main(args)
Exemple #60
0
    def build_dso(self, dso):
        self.dso2lib_pre(dso)
        expand_sources(self, dso.sources)
        expand_sources(self, dso.depends)

        baselib = self._name2file(dso)
        solib = self._name2file(dso, so=True)

        outbaselib = os.path.join(self.build_lib, baselib)
        outlib = os.path.join(self.build_lib, solib)
        sources = list(dso.sources)

        depends = sources + dso.depends
        if not (self.force or newer_group(depends, outlib, 'newer')):
            log.debug("skipping '%s' DSO (up-to-date)", dso.name)
            return
        else:
            log.info("building '%s' DSO as %s", dso.name, outlib)

        macros = dso.define_macros[:]
        for undef in dso.undef_macros:
            macros.append((undef,))

        extra_args = dso.extra_compile_args or []

        include_dirs = massage_dir_list([self.build_temp, self.build_lib], dso.include_dirs or [])

        SRC = defaultdict(list)

        # sort by language
        for src in sources:
            SRC[self.compiler.language_map[os.path.splitext(src)[-1]]].append(src)

        # do the actual compiling
        objects = []

        if 'NUM_JOBS' in os.environ: # because it is so very cumbersome to pass extra build args through pip and setuptools ...
            nworkers = int(os.environ['NUM_JOBS'])
        elif hasattr(os, 'cpu_count'): # py3
            nworkers = os.cpu_count()
        else:
            nworkers = 2 # why not?

        with Pool(nworkers) as P:
            jobs = []
            for lang, srcs in SRC.items():

                # submit jobs
                # allocate every n-th object to the n-th worker.
                # Load not well balanced, but easy to do.
                for inputs in [srcs[n::nworkers] for n in range(nworkers)]:
                    jobs.append(P.apply_async(self.compiler.compile, [inputs], {
                        'output_dir':self.build_temp,
                        'macros':macros,
                        'include_dirs':include_dirs,
                        'extra_postargs':extra_args + (dso.lang_compile_args.get(lang) or []),
                        'depends':dso.depends,
                    }))

            # work for completion
            [objects.extend(job.get()) for job in jobs]

        library_dirs = massage_dir_list([self.build_lib], dso.library_dirs or [])

        # the Darwin linker errors if given non-existant -L directories :(
        [self.mkpath(D) for D in library_dirs]

        if dso.extra_objects:
            objects.extend(dso.extra_objects)

        extra_args = dso.extra_link_args or []
        solibbase = os.path.basename(solib)

        if sys.platform == 'darwin':
            # we always want to produce relocatable (movable) binaries
            # this install_name will be replaced below (cf. 'install_name_tool')
            extra_args.extend(['-install_name', '@rpath/%s'%solibbase])

        elif sys.platform == "win32":
            # The .lib is considered "temporary" for extensions, but not for us
            # so we pass export_symbols=None and put it along side the .dll
            extra_args.append('/IMPLIB:%s.lib'%(os.path.splitext(outlib)[0]))

        elif baselib!=solib: # ELF
            extra_args.extend(['-Wl,-h,%s'%solibbase])

        language = dso.language or self.compiler.detect_language(sources)

        self.compiler.link_shared_object(
            objects, outlib,
            libraries=dso.libraries,
            library_dirs=library_dirs,
            runtime_library_dirs=dso.runtime_library_dirs,
            extra_postargs=extra_args,
            export_symbols=None,
            #debug=self.debug,
            build_temp=self.build_temp,
            target_lang=language)

        self.dso2lib_post(outlib)

        if baselib!=solib:
            # we make best effort here, even though zipfiles (.whl or .egg) will contain copies
            log.info("symlink %s <- %s", solibbase, outbaselib)
            if not self.dry_run:
                if os.path.exists(outbaselib):
                    os.unlink(outbaselib)
                os.symlink(solibbase, outbaselib)
            #self.copy_file(outlib, outbaselib) # link="sym" seem to get the target path wrong

        if self.inplace:
            build_py = self.get_finalized_command('build_py')
            pkg = '.'.join(dso.name.split('.')[:-1])    # path.to.dso -> path.to
            pkgdir = build_py.get_package_dir(pkg)      # path.to -> src/path/to

            solib_dst   = os.path.join(pkgdir, os.path.basename(solib))     # path/to/dso.so -> src/path/to/dso.so
            baselib_dst = os.path.join(pkgdir, os.path.basename(baselib))

            self.mkpath(os.path.dirname(solib_dst))
            self.copy_file(outlib, solib_dst)
            if baselib!=solib:
                self.copy_file(outbaselib, baselib_dst)