def run(self): try: gettextutil.check_version() except gettextutil.GettextError as e: raise SystemExit(e) # if lang is given, force update pot and the specific po if self.lang is not None: po = os.path.join(self.po_directory, self.lang + ".po") if po not in self.po_files: raise SystemExit("Error: %r not found" % po) self._update_pot() self._update_po(po) return infilename = os.path.join(self.po_directory, "POTFILES.in") with open(infilename, "rb") as h: infiles = h.read().splitlines() # if any of the in files is newer than the pot, update the pot for filename in infiles: if newer(filename, self.pot_file): self._update_pot() break else: print "not pot update" # if the pot file is newer than any of the po files, update that po for po in self.po_files: if newer(self.pot_file, po): self._update_po(po)
def run(self): from PyQt4 import uic _translate_re = re.compile( r'QtGui\.QApplication.translate\(.*?, (.*?), None, ' r'QtGui\.QApplication\.UnicodeUTF8\)') for uifile in glob.glob("ui/*.ui"): pyfile = "ui_%s.py" % os.path.splitext(os.path.basename(uifile))[0] pyfile = os.path.join("picard", "ui", pyfile) if newer(uifile, pyfile): log.info("compiling %s -> %s", uifile, pyfile) tmp = StringIO() uic.compileUi(uifile, tmp) source = _translate_re.sub(r'_(\1)', tmp.getvalue()) f = open(pyfile, "w") f.write(source) f.close() qrcfile = os.path.join("resources", "picard.qrc") pyfile = os.path.join("picard", "resources.py") build_resources = False if newer("resources/picard.qrc", pyfile): build_resources = True for datafile in glob.glob("resources/images/*.*"): if newer(datafile, pyfile): build_resources = True break if build_resources: log.info("compiling %s -> %s", qrcfile, pyfile) os.system("pyrcc4 %s -o %s" % (qrcfile, pyfile))
def run(self): build.run(self) cherrytree_man_file = "linux/cherrytree.1" cherrytree_man_file_gz = cherrytree_man_file + ".gz" if newer(cherrytree_man_file, cherrytree_man_file_gz): if os.path.isfile(cherrytree_man_file_gz): os.remove(cherrytree_man_file_gz) import gzip f_in = open(cherrytree_man_file, 'rb') f_out = gzip.open(cherrytree_man_file_gz, 'wb') f_out.writelines(f_in) f_out.close() f_in.close() if self.distribution.without_gettext: return for po in glob.glob(os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'cherrytree.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def run(self): if find_executable("intltool-update") is None: raise SystemExit("Error: 'intltool' not found.") if find_executable("msgfmt") is None: raise SystemExit("Error: 'gettext' not found.") basepath = os.path.join(self.build_base, 'share', 'locale') infilename = os.path.join(self.po_directory, "POTFILES.in") infiles = file(infilename).read().splitlines() pot_name = os.path.join( self.po_directory, self.po_package + ".pot") for filename in infiles: if newer(filename, pot_name): oldpath = os.getcwd() os.chdir(self.po_directory) self.spawn(["intltool-update", "--pot", "--gettext-package", self.po_package]) for po in self.po_files: self.spawn(["intltool-update", "--dist", "--gettext-package", self.po_package, os.path.basename(po[:-3])]) os.chdir(oldpath) for po in self.po_files: language = os.path.basename(po).split(".")[0] fullpath = os.path.join(basepath, language, "LC_MESSAGES") destpath = os.path.join(fullpath, self.po_package + ".mo") if newer(po, destpath): self.mkpath(fullpath) self.spawn(["msgfmt", "-o", destpath, po])
def test_newer(self): tmpdir = self.mkdtemp() new_file = os.path.join(tmpdir, 'new') old_file = os.path.abspath(__file__) self.assertRaises(DistutilsFileError, newer, new_file, old_file) self.write_file(new_file) self.assertTrue(newer(new_file, 'I_dont_exist')) self.assertTrue(newer(new_file, old_file)) self.assertFalse(newer(old_file, new_file))
def swig_sources(self, sources, extension): ### Copied from build_ext.py : swig_sources, with small mods marked "# AN" new_sources = [] swig_sources = [] swig_targets = {} # AN swig_py_targets = {} if self.swig_cpp: log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") if self.swig_cpp or ('-c++' in self.swig_opts) or \ ('-c++' in extension.swig_opts): target_ext = '.cpp' else: target_ext = '.c' for source in sources: (base, ext) = os.path.splitext(source) if ext == ".i": # SWIG interface file new_sources.append(base + '_wrap' + target_ext) swig_sources.append(source) swig_targets[source] = new_sources[-1] # AN swig_py_targets[source] = base + '.py' else: new_sources.append(source) if not swig_sources: return new_sources swig = self.swig or self.find_swig() swig_cmd = [swig, "-python"] swig_cmd.extend(self.swig_opts) if self.swig_cpp: swig_cmd.append("-c++") # Do not override commandline arguments if not self.swig_opts: for o in extension.swig_opts: swig_cmd.append(o) for source in swig_sources: target = swig_targets[source] log.info("swigging %s to %s", source, target) # AN py_target = swig_py_targets[source] if not (self.force or newer(source, target) or newer(source, py_target)): log.debug("skipping swig of %s (older than %s, %s)" % (source, target, py_target)) continue # AN self.spawn(swig_cmd + ["-o", target, source]) return new_sources
def isNewer(nfile,ofile): # TODO: use of newer_group for the second part of the test. i = 1 if path.isfile(ofile): if newer(ofile,nfile): i = 0 elif path.isdir(ofile): for file in listdir(ofile): if path.isfile(path.join(ofile,file)): if newer(path.join(ofile,file),nfile): i *= 0 return 1 - min(i,1)
def pre_swig_hook(self, sources, ext): """Extra hook to build cl_lib.h2 and sized_struct.i""" # crude detection of V5 vs v6 cl_lib = os.path.join(dtk, 'include', 'cl_lib.h') if os.path.exists(cl_lib): # This is V6 # Create patched versions for cl_lib.h and acu_type.h # To generate a patch, (for example cl_lib.patch), copy the # original cl_lib.h to cl_lib.h2, edit it, and then do: # diff -u $(DTK)/include/cl_lib.h cl_lib.h2 > cl_lib.patch if newer('cl_lib.patch', 'cl_lib.h2'): self.spawn(['patch', '-o', 'cl_lib.h2', cl_lib, 'cl_lib.patch']) self.spawn(['patch', '-o', 'acu_type.h2', os.path.join(dtk, 'include', 'acu_type.h'), 'acu_type.patch']) swig = self.swig or self.find_swig() swig_cmd = [swig, '-xml', '-xmllite'] swig_cmd.extend(self.swig_opts) if newer('lowlevel.i', 'sized_struct.i'): # Do not override commandline arguments if not self.swig_opts: for o in ext.swig_opts: # More ugly hacks. # Remove Python specific swig args if not o in ['-modern', '-new_repr']: swig_cmd.append(o) self.spawn(swig_cmd + ['-o', 'lowlevel.xml', 'lowlevel.i']) of = open('sized_struct.i', 'w') parser = xml.sax.make_parser() handler = FindStruct(of, u'ACU_SNAPSHOT_PARMS') parser.setContentHandler(handler) parser.parse('lowlevel.xml') of.close() else: # Create patched version for smosintf.h if newer('smosintf.patch', 'smosintf.h2'): self.spawn(['patch', '-o', 'smosintf.h2', os.path.join(dtk, 'speech', 'include', 'smosintf.h'), 'smosintf.patch'])
def run(self): from pybtex.database.convert import convert bibtex_yaml = os.path.join('examples', 'xampl.yaml') bibtexml = os.path.join('examples', 'xampl.bibtexml') bibtex = os.path.join('examples', 'xampl.bib') if not os.path.exists(bibtex_yaml) or newer(bibtex, bibtex_yaml): convert(bibtex, bibtex_yaml) if not os.path.exists(bibtexml) or newer(bibtex, bibtexml): convert(bibtex, bibtexml) from pybtex.docgen import generate_docs generate_docs(os.path.join(ROOT, 'docs'), ('html', 'manpages')) sdist.run(self)
def build_ref_manual(self): """Build the API reference manual.""" self.announce("building API reference manual") tmp_man_dir = os.path.abspath(os.path.join(self.build_temp, 'doc/Manual')) make = os.environ.get('MAKE', 'make') srcdir = os.path.abspath('doc/Manual/') cwd = os.getcwd() mkpath(tmp_man_dir, 0777, self.verbose, self.dry_run) if self.html: spawn([make, '-C', tmp_man_dir, 'html']) if self.printable: spawn([make, '-C', tmp_man_dir, 'latexpdf']) if self.sxr: spawn([make, '-C', tmp_man_dir, 'sxr', 'sxr=%s'%self.sxr]) builddir = os.path.abspath(os.path.join(self.build_lib, 'share/doc/synopsis/html/Manual')) if self.html: src = os.path.join(tmp_man_dir, 'html', 'python') dest = os.path.join(builddir, 'python') if newer(src, dest): rmtree(dest, True) copy_tree(src, dest) if self.sxr: src = os.path.join(tmp_man_dir, 'html', 'sxr') builddir = os.path.abspath(os.path.join(self.build_lib, 'share/doc/synopsis/html/')) dest = os.path.join(builddir, 'SXR') if newer(src, dest): rmtree(dest, True) copy_tree(src, dest) if self.printable: builddir = os.path.abspath(os.path.join(self.build_lib, 'share/doc/synopsis/print')) mkpath(builddir, 0777, self.verbose, self.dry_run) copy_file(os.path.join(tmp_man_dir, 'Manual.pdf'), os.path.join(builddir, 'Manual.pdf'))
def preprocess(self, source, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None): ignore, macros, include_dirs = \ self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = self.preprocessor + pp_opts if output_file: pp_args.extend(['-o', output_file]) if extra_preargs: pp_args[:0] = extra_preargs if extra_postargs: pp_args.extend(extra_postargs) pp_args.append(source) # We need to preprocess: either we're being forced to, or we're # generating output to stdout, or there's a target output file and # the source file is newer than the target (or the target doesn't # exist). if self.force or output_file is None or newer(source, output_file): if output_file: self.mkpath(os.path.dirname(output_file)) try: self.spawn(pp_args) except DistutilsExecError, msg: raise CompileError, msg
def build_libraries(self, libraries): for (lib_name, build_info) in libraries: compiler = self.prepare_compiler(build_info.get('plat')) sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( "in 'libraries' option (library '%s'), " "'sources' must be present and must be " "a list of source filenames" % lib_name) sources = list(sources) target = os.path.join(self.build_clib, lib_name + ".dll") if not self.force and not any([newer(source, target) for source in sources]): continue macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') objects = compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=self.compile_args) link_args = self.link_args[:] link_args.append('/DEF:{}'.format(build_info['def_file'])) link_args.append('/pdb:{}.pdb'.format(lib_name)) compiler.link_shared_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug, libraries=self.link_libs, extra_postargs=link_args)
def run(self): from PyQt4 import uic _translate_re = ( re.compile( r'QtGui\.QApplication.translate\(.*?, (.*?), None, ' r'QtGui\.QApplication\.UnicodeUTF8\)'), re.compile( r'\b_translate\(.*?, (.*?), None\)') ) for uifile in glob.glob("ui/*.ui"): pyfile = "ui_%s.py" % os.path.splitext(os.path.basename(uifile))[0] pyfile = os.path.join("picard", "ui", pyfile) if newer(uifile, pyfile): log.info("compiling %s -> %s", uifile, pyfile) tmp = StringIO() uic.compileUi(uifile, tmp) source = tmp.getvalue() for r in list(_translate_re): source = r.sub(r'_(\1)', source) f = open(pyfile, "w") f.write(source) f.close() from resources import compile, makeqrc makeqrc.main() compile.main()
def preprocess (self, source, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None): (_, macros, include_dirs) = \ self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = ['cpp32.exe'] + pp_opts if output_file is not None: pp_args.append('-o' + output_file) if extra_preargs: pp_args[:0] = extra_preargs if extra_postargs: pp_args.extend(extra_postargs) pp_args.append(source) # We need to preprocess: either we're being forced to, or the # source file is newer than the target (or the target doesn't # exist). if self.force or output_file is None or newer(source, output_file): if output_file: self.mkpath(os.path.dirname(output_file)) try: self.spawn(pp_args) except DistutilsExecError, msg: shout msg raise CompileError, msg
def compile_po_files(domain, dirname='locale'): """ Compiles po files to mo files. Note. this function depends on gettext utilities being installed :param domain: gettext domain :param dirname: base directory :returns: a list of po files """ data_files = [] for po in listfiles('po', '*.po'): lang = os.path.basename(po[:-3]) mo = os.path.join(dirname, lang, 'LC_MESSAGES', domain + '.mo') if not os.path.exists(mo) or newer(po, mo): directory = os.path.dirname(mo) if not os.path.exists(directory): info("creating %s" % directory) os.makedirs(directory) try: p = subprocess.Popen(['msgfmt', '-o', mo, po], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: warn('msgfmt is missing, not installing translations') return [] info('compiled %s -> %s' % (po, mo)) p.communicate() dest = os.path.dirname(os.path.join('share', mo)) data_files.append((dest, [mo])) return data_files
def run(self): if find_executable("msgfmt") is None: raise SystemExit("Error: 'gettext' not found.") # It's OK to skip po update for building release tarballs, since # things are updated right before release... if not self.skip_po_update: self.run_command("update_po") basepath = os.path.join(self.build_base, 'share', 'locale') for po in self.po_files: language = os.path.basename(po).split(".")[0] fullpath = os.path.join(basepath, language, "LC_MESSAGES") destpath = os.path.join(fullpath, self.po_package + ".mo") if newer(po, destpath): self.mkpath(fullpath) # strip POT-Creation-Date from po/mo to make build reproducible fd, temp_path = mkstemp(".po") try: os.close(fd) shutil.copy(po, temp_path) strip_pot_date(temp_path) self.spawn(["msgfmt", "-o", destpath, temp_path]) finally: os.remove(temp_path)
def run(self): msgfmt_cmd = find_command('msgfmt') for pofile in glob.glob(os.path.join('..', 'data', 'po', '*.po')): mofile = os.path.join('..', 'data', 'translations', os.path.basename(pofile)[:-3]+'.mo') if newer(pofile, mofile): self.mkpath(os.path.dirname(mofile)) self.spawn([msgfmt_cmd, '-c', '-o', mofile, pofile])
def build_extensions(self): if newer("src/docstrings.h.in", "src/docstrings.h"): updateDocHeader("src/docstrings.h.in", "src/docstrings.h") for ext in self.extensions: ext.include_dirs.append(self.boost_include_dir) ext.include_dirs.append(self.timbl_include_dir) ext.include_dirs.append(self.libxml2_include_dir) ext.library_dirs.append(self.timbl_library_dir) ext.library_dirs.append(self.boost_library_dir) ext.library_dirs.append(self.libxml2_library_dir) pyversion = sys.version[0:3][0] + sys.version[0:3][2] # returns something like 32 if os.path.exists(self.boost_library_dir + "/libboost_python-py" + pyversion + ".so"): boostlib = "boost_python-py" + pyversion elif os.path.exists(self.boost_library_dir + "/libboost_python3.so"): boostlib = "boost_python3" elif os.path.exists(self.boost_library_dir + "/libboost_python.so"): # probably goes wrong if this is for python 2! boostlib = "boost_python" elif os.path.exists(self.boost_library_dir + "/libboost_python3.dylib"): # Mac OS X boostlib = "boost_python3" elif os.path.exists(self.boost_library_dir + "/libboost_python.dylib"): # Mac OS X # probably goes wrong if this is for python 2! boostlib = "boost_python" else: print("Unable to find boost library", file=sys.stderr) sys.exit(65) if isinstance(self.compiler, UnixCCompiler) and self.static_boost_python: ext.extra_link_args.extend("-Wl,-Bstatic -l" + boostlib + " -Wl,-Bdynamic".split()) else: ext.libraries.append(boostlib) build_ext.build_extensions(self)
def cythonize(): for fn in glob.glob("carray/*.pyx"): dest = fn.split(".")[0] + ".c" if newer(fn, dest): if not cython: exit_with_error("Need Cython >= %s to generate extensions." % min_cython_version) sh("cython " + fn)
def run (self): build.run (self) if self.distribution.without_gettext: return for po in glob.glob (os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'terminator.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def _compile_po_files(self): print "COMPILING PO FILES" i18nfiles = [] if not os.path.isdir("src/openmolar/locale/"): print "WARNING - language files are missing!" for po in glob.glob("src/openmolar/locale/*.po"): directory, file = os.path.split(po) lang = file.replace(".po","") mo = os.path.join(directory, lang) try: os.mkdir(mo) except OSError: pass mo = os.path.join(mo, "openmolar.mo") if not os.path.exists(mo) or newer(po, mo): cmd = 'msgfmt -o %s %s' % (mo, po) info ('compiling %s -> %s' % (po, mo)) if os.system(cmd) != 0: info('Error while running msgfmt on %s'% po) destdir = os.path.join ("/usr","share", "locale", lang, "LC_MESSAGES") i18nfiles.append((destdir, [mo])) return i18nfiles
def _compile_po_files (self): data_files = [] # Don't install language files on win32 if sys.platform == 'win32': return data_files PO_DIR = 'po' for lang in open(os.path.join(PO_DIR, 'availables'), 'r').readlines(): lang = lang.strip() if lang: po = os.path.join(PO_DIR, '%s.po' % lang) mo = os.path.join('build', 'mo', lang, 'gespeaker.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): # True if mo doesn't exist cmd = 'msgfmt -o %s %s' % (mo, po) info('compiling %s -> %s' % (po, mo)) if os.system(cmd) != 0: raise SystemExit('Error while running msgfmt') dest = os.path.dirname(os.path.join('share', 'locale', lang, 'LC_MESSAGES', 'gespeaker.mo')) data_files.append((dest, [mo])) return data_files
def run (self): # Gen .in files with @PREFIX@ replaced for filename in ['udev-discover']: infile = open(filename + '.in', 'r') data = infile.read().replace('@PREFIX@', self.prefix) infile.close() outfile = open(filename, 'w') outfile.write(data) outfile.close() build.run (self) for po in glob.glob (os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'udevdiscover.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py \ --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def build_extensions(self): if newer("src/docstrings.h.in", "src/docstrings.h"): updateDocHeader("src/docstrings.h.in", "src/docstrings.h") for ext in self.extensions: ext.include_dirs.append(self.boost_include_dir) ext.include_dirs.append(self.timbl_include_dir) ext.include_dirs.append(self.libxml2_include_dir) ext.library_dirs.append(self.timbl_library_dir) ext.library_dirs.append(self.boost_library_dir) ext.library_dirs.append(self.libxml2_library_dir) compile_args = ["-std=c++11"] if platform.system() == "Darwin": compile_args.append("-stdlib=libc++") ext.extra_compile_args.extend(compile_args) if isinstance(self.compiler, UnixCCompiler) and self.static_boost_python: ext.extra_link_args.extend( "-Wl,-Bstatic -l" + self.boostlib + " -Wl,-Bdynamic".split()) else: ext.libraries.append(self.boostlib) build_ext.build_extensions(self)
def compile_po_files(domain, dirname='locale'): """ Compiles po files to mo files. Note. this function depends on gettext utilities being installed @param domain: gettext domain @param dirname: base directory @returns: a list of po files """ if os.system('msgfmt 2> /dev/null') not in [1, 256]: warn('msgfmt is missing, not installing translations') return [] data_files = [] for po in listfiles('po', '*.po'): lang = os.path.basename(po[:-3]) mo = os.path.join(dirname, lang, 'LC_MESSAGES', domain + '.mo') if not os.path.exists(mo) or newer(po, mo): directory = os.path.dirname(mo) if not os.path.exists(directory): info("creating %s" % directory) os.makedirs(directory) cmd = 'msgfmt -o %s %s' % (mo, po) info('compiling %s -> %s' % (po, mo)) if os.system(cmd) != 0: raise SystemExit("Error while running msgfmt") dest = os.path.dirname(os.path.join('share', mo)) data_files.append((dest, [mo])) return data_files
def copy_scripts(self): """ Override the default distutils copy_scripts to call replace_tags if it's marked as a Python script. """ self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: adjust = 0 script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue self.copy_file(script, outfile, self.dry_run) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 07777 newmode = (oldmode | 0555) & 07777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode)
def handle_dependencies(pyxfilename): dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT # by default let distutils decide whether to rebuild on its own # (it has a better idea of what the output file will be) # but we know more about dependencies so force a rebuild if # some of the dependencies are newer than the pyxfile. if os.path.exists(dependfile): depends = open(dependfile).readlines() depends = [depend.strip() for depend in depends] # gather dependencies in the "files" variable # the dependency file is itself a dependency files = [dependfile] for depend in depends: fullpath = os.path.join(os.path.dirname(dependfile), depend) files.extend(glob.glob(fullpath)) # only for unit testing to see we did the right thing _test_files[:] = [] #$pycheck_no # if any file that the pyxfile depends upon is newer than # the pyx file, 'touch' the pyx file so that distutils will # be tricked into rebuilding it. for file in files: from distutils.dep_util import newer if newer(file, pyxfilename): print("Rebuilding because of ", file) filetime = os.path.getmtime(file) os.utime(pyxfilename, (filetime, filetime)) _test_files.append(file)
def run(self): print("COMPILING PO FILES") i18nfiles = [] if not os.path.isdir("src/openmolar/locale/"): print("WARNING - language files are missing!") for po_file in glob.glob("src/openmolar/locale/*.po"): directory, file_ = os.path.split(po_file) lang = file_.replace(".po", "") mo_dir = os.path.join(directory, lang) try: os.mkdir(mo_dir) except OSError: pass mo_file = os.path.join(mo_dir, "openmolar.mo") if not os.path.exists(mo_file) or newer(po_file, mo_file): cmd = 'msgfmt -o %s %s' % (mo_file, po_file) info('compiling %s -> %s' % (po_file, mo_file)) if os.system(cmd) != 0: info('Error while running msgfmt on %s' % po_file) destdir = os.path.join("/usr", "share", "locale", lang, "LC_MESSAGES") i18nfiles.append((destdir, [mo_file])) self.data_files.extend(i18nfiles) install_data.run(self)
def _byte_compile(files, optimize=-1, force=False, prefix=None, base_dir=None, dry_run=False): from da.compiler import dafile_to_pycfile from da.importer import da_cache_from_source # XXX: do we need "indirect" mode?? for file in files: if file[-3:] != ".da": continue if optimize >= 0: opt = '' if optimize == 0 else optimize cfile = da_cache_from_source(file, optimization=opt) else: cfile = da_cache_from_source(file) dfile = file if prefix: if file[:len(prefix)] != prefix: raise ValueError("invalid prefix: filename {} doesn't start with {}".format(file, prefix)) dfile = dfile[len(prefix):] if base_dir: dfile = os.path.join(base_dir, dfile) if force or newer(file, cfile): log.info("byte-compiling {} to {}".format(file, cfile)) if not dry_run: dafile_to_pycfile(file, outname=cfile, optimize=optimize, dfile=dfile) else: log.debug("skipping byte-compilation of {} to {}." .format(file, cfile))
def update_sources_by_file(self, pool): (src, keylist) = pool.current_pkgsourcelist if not src: return False self.addkeys(keylist) dest = "%s/%s" % (self.SOURCELISTS_DIR, os.path.basename(src)) if not os.path.exists(src) or newer(src, dest): shutil.copy(src, dest)
def copy_scripts(self): r"""Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first line to refer to the current Python interpreter as we copy. """ self.mkpath(self.build_dir) outfiles = [] updated_files = [] for script in self.scripts: adjust = False script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue # Always open the file, but ignore failures in dry-run mode -- # that way, we'll get accurate feedback if we can read the # script. try: f = open(script, "rb") except OSError: if not self.dry_run: raise f = None else: encoding, lines = tokenize.detect_encoding(f.readline) f.seek(0) first_line = f.readline() if not first_line: self.warn("%s is an empty file (skipping)" % script) continue match = first_line_re.match(first_line) if match: adjust = True post_interp = match.group(1) or b'' adjust = 0 if adjust: log.info("copying and adjusting %s -> %s", script, self.build_dir) updated_files.append(outfile) if not self.dry_run: if not sysconfig.python_build: executable = self.executable else: executable = os.path.join( sysconfig.get_config_var("BINDIR"), "python%s%s" % (sysconfig.get_config_var("VERSION"), sysconfig.get_config_var("EXE"))) executable = os.fsencode(executable) shebang = b"#!" + executable + post_interp + b"\n" # Python parser starts to read a script using UTF-8 until # it gets a #coding:xxx cookie. The shebang has to be the # first line of a file, the #coding:xxx cookie cannot be # written before. So the shebang has to be decodable from # UTF-8. try: shebang.decode('utf-8') except UnicodeDecodeError: raise ValueError("The shebang ({!r}) is not decodable " "from utf-8".format(shebang)) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. try: shebang.decode(encoding) except UnicodeDecodeError: raise ValueError( "The shebang ({!r}) is not decodable " "from the script encoding ({})".format( shebang, encoding)) with open(outfile, "wb") as outf: outf.write(shebang) outf.writelines(f.readlines()) if f: f.close() else: if f: f.close() updated_files.append(outfile) self.copy_file(script, outfile) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 0o7777 newmode = (oldmode | 0o555) & 0o7777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode) # XXX should we modify self.outfiles? return outfiles, updated_files
import sys, os, glob from distutils.dep_util import newer from distutils.spawn import spawn poFiles = glob.glob("*.po") for po in poFiles: mo = os.path.splitext(po)[0] + '.mo' if mo != 'wxstd.mo': if newer(po, mo): cmd = 'msgfmt --verbose -o %s %s' % (mo, po) print cmd spawn(cmd.split()) print else: print "%s is up to date" % mo
from __future__ import print_function from distutils.dep_util import newer import os, os.path from setuptools import setup import subprocess # reflexible version VERSION = open('VERSION').read().strip() # Create the version.py file open('reflexible/version.py', 'w').write('__version__ = "%s"\n' % VERSION) # Build the FortFlex extension if necessary if (not os.path.exists("reflexible/conv2netcdf4/FortFlex.so") or newer("reflexible/conv2netcdf4/fortflex/FortFlex.f", "reflexible/conv2netcdf4/FortFlex.so")): try: print( subprocess.check_output( "cd reflexible/conv2netcdf4/fortflex; " "sh build_FortFlex.sh", shell=True)) except subprocess.CalledProcessError as e: print(e.output) print("Problems compiling the FortFlex module. " "Will continue using a slower fallback...") else: print( "FortFlex.so extension has been created in reflexible/conv2netcdf4/!" )
def main(): p = optparse.OptionParser(usage=__doc__.strip()) p.add_option("--no-force", action="store_false", dest="force", default=True) options, args = p.parse_args() names = [] i_types, it_types, getter_code = get_thunk_type_set() # Generate *_impl.h for each compilation unit for unit_name, routines in COMPILATION_UNITS: thunks = [] methods = [] # Generate thunks and methods for all routines for line in routines.splitlines(): line = line.strip() if not line or line.startswith('#'): continue try: name, args = line.split(None, 1) except ValueError: raise ValueError("Malformed line: %r" % (line, )) args = "".join(args.split()) if 't' in args or 'T' in args: thunk, method = parse_routine(name, args, it_types) else: thunk, method = parse_routine(name, args, i_types) if name in names: raise ValueError("Duplicate routine %r" % (name, )) names.append(name) thunks.append(thunk) methods.append(method) # Produce output dst = os.path.join(os.path.dirname(__file__), 'sparsetools', unit_name + '_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst, )) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(getter_code) for thunk in thunks: f.write(thunk) for method in methods: f.write(method) else: print("[generate_sparsetools] %r already up-to-date" % (dst, )) # Generate code for method struct method_defs = "" for name in names: method_defs += "NPY_VISIBILITY_HIDDEN PyObject *%s_method(PyObject *, PyObject *);\n" % ( name, ) method_struct = """\nstatic struct PyMethodDef sparsetools_methods[] = {""" for name in names: method_struct += """ {"%(name)s", (PyCFunction)%(name)s_method, METH_VARARGS, NULL},""" % dict( name=name) method_struct += """ {NULL, NULL, 0, NULL} };""" # Produce sparsetools_impl.h dst = os.path.join(os.path.dirname(__file__), 'sparsetools', 'sparsetools_impl.h') if newer(__file__, dst) or options.force: print("[generate_sparsetools] generating %r" % (dst, )) with open(dst, 'w') as f: write_autogen_blurb(f) f.write(method_defs) f.write(method_struct) else: print("[generate_sparsetools] %r already up-to-date" % (dst, ))
def all_newer(src_files, dst_files): from distutils.dep_util import newer return all(os.path.exists(dst) and newer(dst, src) for dst in dst_files for src in src_files)
if not dry_run: mkpath(dst) outputs = [] for n in names: src_name = os.path.join(src, n) dst_name = os.path.join(dst, n) if (condition is not None) and (not condition(src_name)): continue if preserve_symlinks and os_path_islink(src_name): link_dest = os_readlink(src_name) log.info("linking %s -> %s", dst_name, link_dest) if not dry_run: if update and not newer(src, dst_name): pass else: if os_path_islink(dst_name): os.remove(dst_name) os.symlink(link_dest, dst_name) outputs.append(dst_name) elif os_path_isdir(src_name): outputs.extend( copy_tree(src_name, dst_name, preserve_mode, preserve_times, preserve_symlinks, update,
if not sys.platform.startswith("win") and options.no_config: build_options.append("--no_config") elif ( not sys.platform.startswith("win") and not options.force_config and not options_changed ): dependencies = [ os.path.join(WXWIN, 'Makefile.in'), os.path.join(WXWIN, 'configure'), os.path.join(WXWIN, 'setup.h.in'), os.path.join(WXWIN, 'version-script.in'), os.path.join(WXWIN, 'wx-config.in'), ] blddir = WXPY_BUILD_DIR for dep in dependencies: if newer(dep, os.path.join(blddir, "Makefile")): break else: build_options.append("--no_config") if sys.platform.startswith("darwin") and options.osx_cocoa: build_options.append("--osx_cocoa") wxpy_build_options.append("WXPORT=osx_cocoa") if not sys.platform.startswith("win") and options.install: build_options.append('--installdir=%s' % DESTDIR) build_options.append("--install") if options.mac_framework and sys.platform.startswith("darwin"): build_options.append("--mac_framework") build_options.append("--mac_framework_prefix=%s" % options.mac_framework_prefix)
def run_swig(files, dir, gendir, package, USE_SWIG, force, swig_args, swig_deps=[], add_under=False): """Run SWIG the way I want it done""" if USE_SWIG and not os.path.exists(os.path.join(dir, gendir)): os.mkdir(os.path.join(dir, gendir)) sources = [] if add_under: pre = '_' else: pre = '' for file in files: basefile = os.path.splitext(file)[0] i_file = os.path.join(dir, file) py_file = os.path.join(dir, gendir, pre+basefile+'.py') cpp_file = os.path.join(dir, gendir, pre+basefile+'_wrap.cpp') if add_under: interface = ['-interface', '_'+basefile+'_'] else: interface = [] sources.append(cpp_file) if not cleaning and USE_SWIG: for dep in swig_deps: # this may fail for external builds, but it's not # a fatal error, so keep going. try: if newer(dep, py_file) or newer(dep, cpp_file): force = 1 break except: pass if force or newer(i_file, py_file) or newer(i_file, cpp_file): ## we need forward slashes here, even on win32 #cpp_file = opj(cpp_file) #'/'.join(cpp_file.split('\\')) #i_file = opj(i_file) #'/'.join(i_file.split('\\')) if BUILD_RENAMERS: xmltemp = tempfile.mktemp('.xml') # First run swig to produce the XML file, adding # an extra -D that prevents the old rename # directives from being used cmd = [ swig_cmd ] + swig_args + \ [ '-DBUILDING_RENAMERS', '-xmlout', xmltemp ] + \ ['-I'+dir, '-o', cpp_file, i_file] msg(' '.join(cmd)) spawn(cmd) # Next run build_renamers to process the XML myRenamer = BuildRenamers() myRenamer.run(dir, pre+basefile, xmltemp) os.remove(xmltemp) # Then run swig for real cmd = [ swig_cmd ] + swig_args + interface + \ ['-I'+dir, '-o', cpp_file, i_file] msg(' '.join(cmd)) spawn(cmd) # copy the generated python file to the package directory copy_file(py_file, package, update=not force, verbose=0) CLEANUP.append(opj(package, os.path.basename(py_file))) return sources
def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" # put common include directory in build_dir on search path # allows using code generation in headers headers config.add_include_dirs(join(build_dir, "src", "common")) config.add_include_dirs(join(build_dir, "src", "npymath")) target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Check whether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write( textwrap.dedent(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """)) # Dump the numpyconfig.h header to stdout log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') config.add_data_files((header_dir, target)) return target
def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # C99 restrict keyword moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict())) # Inline check inline = config_cmd.check_inline() # Use relaxed stride checking if NPY_RELAXED_STRIDES_CHECKING: moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1)) # Use bogus stride debug aid when relaxed strides are enabled if NPY_RELAXED_STRIDES_DEBUG: moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1)) # Get long double representation rep = check_long_double_representation(config_cmd) moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) if check_for_right_shift_internal_compiler_error(config_cmd): moredefs.append('NPY_DO_NOT_OPTIMIZE_LONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift') moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift') # Py3K check if sys.version_info[0] >= 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs with open(target, 'w') as target_f: for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write( textwrap.dedent(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """)) log.info('File: %s' % target) with open(target) as target_f: log.info(target_f.read()) log.info('EOF') else: mathlibs = [] with open(target) as target_f: for line in target_f: s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target
def byte_compile(py_files, optimize=0, force=0, target_dir=None, verbose=1, dry_run=0, direct=None): if direct is None: direct = (__debug__ and optimize == 0) # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: from tempfile import mktemp from distutils.util import execute, spawn script_name = mktemp(".py") if verbose: print "writing byte-compilation script '%s'" % script_name if not dry_run: script = open(script_name, "w") script.write(""" from py2app.util import byte_compile from modulegraph.modulegraph import * files = [ """) for f in py_files: script.write(repr(f) + ",\n") script.write("]\n") script.write(""" byte_compile(files, optimize=%r, force=%r, target_dir=%r, verbose=%r, dry_run=0, direct=1) """ % (optimize, force, target_dir, verbose)) script.close() cmd = [sys.executable, script_name] if optimize == 1: cmd.insert(1, "-O") elif optimize == 2: cmd.insert(1, "-OO") spawn(cmd, verbose=verbose, dry_run=dry_run) execute(os.remove, (script_name, ), "removing %s" % script_name, verbose=verbose, dry_run=dry_run) else: from py_compile import compile from distutils.dir_util import mkpath for mod in py_files: # Terminology from the py_compile module: # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) if mod.filename == mod.identifier: cfile = os.path.basename(mod.filename) dfile = cfile + (__debug__ and 'c' or 'o') else: cfile = mod.identifier.replace('.', os.sep) if mod.packagepath: dfile = cfile + os.sep + '__init__.py' + (__debug__ and 'c' or 'o') else: dfile = cfile + '.py' + (__debug__ and 'c' or 'o') if target_dir: cfile = os.path.join(target_dir, dfile) if force or newer(mod.filename, cfile): if verbose: print "byte-compiling %s to %s" % (mod.filename, dfile) if not dry_run: mkpath(os.path.dirname(cfile)) suffix = os.path.splitext(mod.filename)[1] if suffix in ('.py', '.pyw'): zfile, pth = path_to_zip(mod.filename) if zfile is None: compile(mod.filename, cfile, dfile) else: fn = dfile + '.py' open(fn, 'wb').write(get_zip_data(zfile, pth)) compile(mod.filename, cfile, dfile) os.unlink(fn) elif suffix in PY_SUFFIXES: # Minor problem: This will happily copy a file # <mod>.pyo to <mod>.pyc or <mod>.pyc to # <mod>.pyo, but it does seem to work. copy_file(mod.filename, cfile) else: raise RuntimeError \ ("Don't know how to handle %r" % mod.filename) else: if verbose: print "skipping byte-compilation of %s to %s" % \ (mod.filename, dfile)
def byte_compile (py_files, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None): """Byte-compile a collection of Python source files to either .pyc or .pyo files in the same directory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently skipped. 'optimize' must be one of the following: 0 - don't optimize (generate .pyc) 1 - normal optimization (like "python -O") 2 - extra optimization (like "python -OO") If 'force' is true, all files are recompiled regardless of timestamps. The source filename encoded in each bytecode file defaults to the filenames listed in 'py_files'; you can modify these with 'prefix' and 'basedir'. 'prefix' is a string that will be stripped off of each source filename, and 'base_dir' is a directory name that will be prepended (after 'prefix' is stripped). You can supply either or both (or neither) of 'prefix' and 'base_dir', as you wish. If 'dry_run' is true, doesn't actually do anything that would affect the filesystem. Byte-compilation is either done directly in this interpreter process with the standard py_compile module, or indirectly by writing a temporary script and executing it. Normally, you should let 'byte_compile()' figure out to use direct compilation or not (see the source for details). The 'direct' flag is used by the script generated in indirect mode; unless you know what you're doing, leave it set to None. """ # nothing is done if sys.dont_write_bytecode is True if sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') # First, if the caller didn't force us into direct or indirect mode, # figure out which mode we should be in. We take a conservative # approach: choose direct mode *only* if the current interpreter is # in debug mode and optimize is 0. If we're not in debug mode (-O # or -OO), we don't know which level of optimization this # interpreter is running with, so we can't do direct # byte-compilation and be certain that it's the right thing. Thus, # always compile indirectly if the current interpreter is in either # optimize mode, or if either optimization level was requested by # the caller. if direct is None: direct = (__debug__ and optimize == 0) # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: try: from tempfile import mkstemp (script_fd, script_name) = mkstemp(".py") except ImportError: from tempfile import mktemp (script_fd, script_name) = None, mktemp(".py") log.info("writing byte-compilation script '%s'", script_name) if not dry_run: if script_fd is not None: script = os.fdopen(script_fd, "w") else: script = open(script_name, "w") script.write("""\ from distutils.util import byte_compile files = [ """) # XXX would be nice to write absolute filenames, just for # safety's sake (script should be more robust in the face of # chdir'ing before running it). But this requires abspath'ing # 'prefix' as well, and that breaks the hack in build_lib's # 'byte_compile()' method that carefully tacks on a trailing # slash (os.sep really) to make sure the prefix here is "just # right". This whole prefix business is rather delicate -- the # problem is that it's really a directory, but I'm treating it # as a dumb string, so trailing slashes and so forth matter. #py_files = map(os.path.abspath, py_files) #if prefix: # prefix = os.path.abspath(prefix) script.write(string.join(map(repr, py_files), ",\n") + "]\n") script.write(""" byte_compile(files, optimize=%r, force=%r, prefix=%r, base_dir=%r, verbose=%r, dry_run=0, direct=1) """ % (optimize, force, prefix, base_dir, verbose)) script.close() cmd = [sys.executable, script_name] if optimize == 1: cmd.insert(1, "-O") elif optimize == 2: cmd.insert(1, "-OO") spawn(cmd, dry_run=dry_run) execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) # "Direct" byte-compilation: use the py_compile module to compile # right here, right now. Note that the script generated in indirect # mode simply calls 'byte_compile()' in direct mode, a weird sort of # cross-process recursion. Hey, it works! else: from py_compile import compile for file in py_files: if file[-3:] != ".py": # This lets us be lazy and not filter filenames in # the "install_lib" command. continue # Terminology from the py_compile module: # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) cfile = file + (__debug__ and "c" or "o") dfile = file if prefix: if file[:len(prefix)] != prefix: raise ValueError, \ ("invalid prefix: filename %r doesn't start with %r" % (file, prefix)) dfile = dfile[len(prefix):] if base_dir: dfile = os.path.join(base_dir, dfile) cfile_base = os.path.basename(cfile) if direct: if force or newer(file, cfile): log.info("byte-compiling %s to %s", file, cfile_base) if not dry_run: compile(file, cfile, dfile) else: log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Generate the config.h file from moredefs target_f = open(target, 'a') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') target_f.close() print 'File:', target target_f = open(target) print target_f.read() target_f.close() print 'EOF' else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target
def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=1, dry_run=0): """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is copied there with the same name; otherwise, it must be a filename. (If the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' is true (the default), the file's mode (type and permission bits, or whatever is analogous on the current platform) is copied. If 'preserve_times' is true (the default), the last-modified and last-access times are copied as well. If 'update' is true, 'src' will only be copied if 'dst' does not exist, or if 'dst' does exist but is older than 'src'. 'link' allows you to make hard links (os.link) or symbolic links (os.symlink) instead of copying: set it to "hard" or "sym"; if it is None (the default), files are copied. Don't set 'link' on systems that don't support it: 'copy_file()' doesn't check if hard or symbolic linking is available. Under Mac OS, uses the native file copy function in macostools; on other systems, uses '_copy_file_contents()' to copy file contents. Return a tuple (dest_name, copied): 'dest_name' is the actual name of the output file, and 'copied' is true if the file was copied (or would have been copied, if 'dry_run' true). """ # XXX if the destination file already exists, we clobber it if # copying, but blow up if linking. Hmmm. And I don't know what # macostools.copyfile() does. Should definitely be consistent, and # should probably blow up if destination exists and we would be # changing it (ie. it's not already a hard/soft link to src OR # (not update) and (src newer than dst). from distutils.dep_util import newer from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE if not os.path.isfile(src): raise DistutilsFileError( "can't copy '%s': doesn't exist or not a regular file" % src) if os.path.isdir(dst): dir = dst dst = os.path.join(dst, os.path.basename(src)) else: dir = os.path.dirname(dst) if update and not newer(src, dst): if verbose >= 1: log.debug("not copying %s (output up-to-date)", src) return dst, 0 try: action = _copy_action[link] except KeyError: raise ValueError("invalid value '%s' for 'link' argument" % link) if verbose >= 1: if os.path.basename(dst) == os.path.basename(src): log.info("%s %s -> %s", action, src, dir) else: log.info("%s %s -> %s", action, src, dst) if dry_run: return (dst, 1) # If linking (hard or symbolic), use the appropriate system call # (Unix only, of course, but that's the caller's responsibility) if link == 'hard': if not (os.path.exists(dst) and os.path.samefile(src, dst)): os.link(src, dst) elif link == 'sym': if not (os.path.exists(dst) and os.path.samefile(src, dst)): os.symlink(src, dst) # Otherwise (non-Mac, not linking), copy the file contents and # (optionally) copy the times and mode. else: _copy_file_contents(src, dst) if preserve_mode or preserve_times: st = os.stat(src) # According to David Ascher <*****@*****.**>, utime() should be done # before chmod() (at least under NT). if preserve_times: os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) if preserve_mode: os.chmod(dst, S_IMODE(st[ST_MODE])) return (dst, 1)
def run(self): """Run this command, i.e. do the actual document generation.""" tempdir = os.path.abspath(os.path.join(self.build_temp, 'doc')) srcdir = os.getcwd() source_files = [os.path.abspath(p) for p in ['doc/tutorial.xml', 'doc/concepts.xml', 'doc/cli_reference.xml', 'doc/customizing.xml', 'doc/extending.xml']] # Look for programs and supporting libraries required to build # DocBook documentation. xsltproc = find_executable('xsltproc') foproc = None xep = False fop = False if not xsltproc: self.warn("could not find xsltproc in PATH") self.warn("cannot build tutorial") return foproc = find_executable('xep') if foproc: xsltproc += ' --stringparam xep.extensions 1' xep = True if not foproc: foproc = find_executable('fop') fop = True if not foproc: foproc = find_executable('xmlroff') if foproc: foproc += ' --compat' if not foproc: self.warn("could not find either of xep, fop, or xmlroff in PATH") self.warn("cannot build tutorial.pdf") return self.mkpath(tempdir) os.chdir(tempdir) if newer_group(source_files, 'print/tutorial.pdf'): self.announce("building pdf tutorial") self.mkpath('print') cmd = xsltproc.split() + ['--novalid', '--xinclude', '-o', 'print/tutorial.fo', srcdir + '/doc/fo.xsl', srcdir + '/doc/tutorial.xml'] self.announce(' '.join(cmd)) spawn(cmd) if xep: cmd = foproc.split() + ['print/tutorial.fo'] elif fop: cmd = foproc.split() + ['print/tutorial.fo', 'print/tutorial.pdf'] else: cmd = foproc.split() + ['-o', 'print/tutorial.pdf', 'print/tutorial.fo'] self.announce(' '.join(cmd)) spawn(cmd) self.mkpath(srcdir + '/share/doc/qmtest/print') dest = srcdir + '/share/doc/qmtest/print/tutorial.pdf' if newer('print/tutorial.pdf', dest): copy_file('print/tutorial.pdf', dest) os.chdir(srcdir)
def copyIfNewer(src, dest, verbose=False): if os.path.isdir(dest): dest = os.path.join(dest, os.path.basename(src)) if newer(src, dest): copyFile(src, dest, verbose)
def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=0, dry_run=0): """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is copied there with the same name; otherwise, it must be a filename. (If the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' is true (the default), the file's mode (type and permission bits, or whatever is analogous on the current platform) is copied. If 'preserve_times' is true (the default), the last-modified and last-access times are copied as well. If 'update' is true, 'src' will only be copied if 'dst' does not exist, or if 'dst' does exist but is older than 'src'. 'link' allows you to make hard links (os.link) or symbolic links (os.symlink) instead of copying: set it to "hard" or "sym"; if it is None (the default), files are copied. Don't set 'link' on systems that don't support it: 'copy_file()' doesn't check if hard or symbolic linking is available. Under Mac OS, uses the native file copy function in macostools; on other systems, uses '_copy_file_contents()' to copy file contents. Return a tuple (dest_name, copied): 'dest_name' is the actual name of the output file, and 'copied' is true if the file was copied (or would have been copied, if 'dry_run' true). """ # XXX if the destination file already exists, we clobber it if # copying, but blow up if linking. Hmmm. And I don't know what # macostools.copyfile() does. Should definitely be consistent, and # should probably blow up if destination exists and we would be # changing it (ie. it's not already a hard/soft link to src OR # (not update) and (src newer than dst). from distutils.dep_util import newer from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE if not os.path.isfile(src): raise DistutilsFileError, \ "can't copy '%s': doesn't exist or not a regular file" % src if os.path.isdir(dst): dir = dst dst = os.path.join(dst, os.path.basename(src)) else: dir = os.path.dirname(dst) if update and not newer(src, dst): log.debug("not copying %s (output up-to-date)", src) return dst, 0 try: action = _copy_action[link] except KeyError: raise ValueError, \ "invalid value '%s' for 'link' argument" % link if os.path.basename(dst) == os.path.basename(src): log.info("%s %s -> %s", action, src, dir) else: log.info("%s %s -> %s", action, src, dst) if dry_run: return (dst, 1) # On Mac OS, use the native file copy routine if os.name == 'mac': import macostools try: macostools.copy(src, dst, 0, preserve_times) except os.error, exc: raise DistutilsFileError, \ "could not copy '%s' to '%s': %s" % (src, dst, exc[-1])
def get_file_list (self): """Figure out the list of files to include in the source distribution, and put it in 'self.filelist'. This might involve reading the manifest template (and writing the manifest), or just reading the manifest, or just using the default file set -- it all depends on the user's options and the state of the filesystem. """ # If we have a manifest template, see if it's newer than the # manifest; if so, we'll regenerate the manifest. template_exists = os.path.isfile(self.template) if template_exists: template_newer = dep_util.newer(self.template, self.manifest) # The contents of the manifest file almost certainly depend on the # setup script as well as the manifest template -- so if the setup # script is newer than the manifest, we'll regenerate the manifest # from the template. (Well, not quite: if we already have a # manifest, but there's no template -- which will happen if the # developer elects to generate a manifest some other way -- then we # can't regenerate the manifest, so we don't.) self.debug_print("checking if %s newer than %s" % (self.distribution.script_name, self.manifest)) setup_newer = dep_util.newer(self.distribution.script_name, self.manifest) # cases: # 1) no manifest, template exists: generate manifest # (covered by 2a: no manifest == template newer) # 2) manifest & template exist: # 2a) template or setup script newer than manifest: # regenerate manifest # 2b) manifest newer than both: # do nothing (unless --force or --manifest-only) # 3) manifest exists, no template: # do nothing (unless --force or --manifest-only) # 4) no manifest, no template: generate w/ warning ("defaults only") manifest_outofdate = (template_exists and (template_newer or setup_newer)) force_regen = self.force_manifest or self.manifest_only manifest_exists = os.path.isfile(self.manifest) neither_exists = (not template_exists and not manifest_exists) # Regenerate the manifest if necessary (or if explicitly told to) if manifest_outofdate or neither_exists or force_regen: if not template_exists: self.warn(("manifest template '%s' does not exist " + "(using default file list)") % self.template) self.filelist.findall() if self.use_defaults: self.add_defaults() if template_exists: self.read_template() if self.prune: self.prune_file_list() self.filelist.sort() self.filelist.remove_duplicates() self.write_manifest() # Don't regenerate the manifest, just read it in. else: self.read_manifest()
def generate_config_h(ext, build_dir): target = join(build_dir, 'config.h') if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) tc = generate_testcode(target) from distutils import sysconfig python_include = sysconfig.get_python_inc() python_h = join(python_include, 'Python.h') if not os.path.isfile(python_h): raise SystemError,\ "Non-existing %s. Perhaps you need to install"\ " python-dev|python-devel." % (python_h) result = config_cmd.try_run(tc, include_dirs=[python_include], library_dirs=default_lib_dirs) if not result: raise SystemError,"Failed to test configuration. "\ "See previous error messages for more information." # Python 2.3 causes a segfault when # trying to re-acquire the thread-state # which is done in error-handling # ufunc code. NPY_ALLOW_C_API and friends # cause the segfault. So, we disable threading # for now. if sys.version[:5] < '2.4.2': nosmp = 1 else: # Perhaps a fancier check is in order here. # so that threads are only enabled if there # are actually multiple CPUS? -- but # threaded code can be nice even on a single # CPU so that long-calculating code doesn't # block. try: nosmp = os.environ['NPY_NOSMP'] nosmp = 1 except KeyError: nosmp = 0 if nosmp: moredefs = [('NPY_ALLOW_THREADS', '0')] else: moredefs = [] # mathlibs = [] tc = testcode_mathlib() mathlibs_choices = [[], ['m'], ['cpml']] mathlib = os.environ.get('MATHLIB') if mathlib: mathlibs_choices.insert(0, mathlib.split(',')) for libs in mathlibs_choices: if config_cmd.try_run(tc, libraries=libs): mathlibs = libs break else: raise EnvironmentError("math library missing; rerun " "setup.py after setting the " "MATHLIB env variable") ext.libraries.extend(mathlibs) moredefs.append(('MATHLIB', ','.join(mathlibs))) def check_func(func_name): return config_cmd.check_func(func_name, libraries=mathlibs, decl=False, headers=['math.h']) for func_name, defsymbol in FUNCTIONS_TO_CHECK: if check_func(func_name): moredefs.append(defsymbol) if sys.platform == 'win32': moredefs.append('NPY_NO_SIGNAL') if sys.version[:3] > '2.4' and (sys.platform == 'win32' or os.name == 'nt'): from distutils.msvccompiler import get_build_architecture a = get_build_architecture() print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % ( a, os.name, sys.platform) if a == 'AMD64': moredefs.append('DISTUTILS_USE_SDK') if sys.version[:3] < '2.4': if config_cmd.check_func('strtod', decl=False, headers=['stdlib.h']): moredefs.append(('PyOS_ascii_strtod', 'strtod')) target_f = open(target, 'a') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) if not nosmp: # default is to use WITH_THREAD target_f.write( '#ifdef WITH_THREAD\n#define NPY_ALLOW_THREADS 1\n#else\n#define NPY_ALLOW_THREADS 0\n#endif\n' ) target_f.close() print 'File:', target target_f = open(target) print target_f.read() target_f.close() print 'EOF' else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) config.add_data_files((header_dir, target)) return target
def copy_scripts(self): """Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first line to refer to the current Python interpreter as we copy. """ self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: adjust = 0 script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug("not copying %s (up-to-date)", script) continue # Always open the file, but ignore failures in dry-run mode -- # that way, we'll get accurate feedback if we can read the # script. try: f = open(script, "r") except IOError: if not self.dry_run: raise f = None else: first_line = f.readline() if not first_line: self.warn("%s is an empty file (skipping)" % script) continue match = first_line_re.match(first_line) if match: adjust = 1 post_interp = match.group(1) or '' if adjust: log.info("copying and adjusting %s -> %s", script, self.build_dir) if not self.dry_run: outf = open(outfile, "w") if not sysconfig.python_build: outf.write("#!%s%s\n" % (self.executable, post_interp)) else: outf.write("#!%s%s\n" % (os.path.join( sysconfig.get_config_var("BINDIR"), "python" + sysconfig.get_config_var("EXE")), post_interp)) outf.writelines(f.readlines()) outf.close() if f: f.close() else: if f: f.close() self.copy_file(script, outfile) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info("changing mode of %s", file) else: oldmode = os.stat(file)[ST_MODE] & 07777 newmode = (oldmode | 0555) & 07777 if newmode != oldmode: log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode)
def copy_scripts(self): r"""Copy each script listed in 'self.scripts'; if it's marked as a Python script in the Unix way (first line matches 'first_line_re', ie. starts with "\#!" and contains "python"), then adjust the first line to refer to the current Python interpreter as we copy. """ _sysconfig = __import__('sysconfig') self.mkpath(self.build_dir) outfiles = [] for script in self.scripts: adjust = 0 script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) outfiles.append(outfile) if not self.force and not newer(script, outfile): log.debug('not copying %s (up-to-date)', script) continue try: f = open(script, 'r') except IOError: if not self.dry_run: raise f = None else: first_line = f.readline() if not first_line: self.warn('%s is an empty file (skipping)' % script) continue match = first_line_re.match(first_line) if match: adjust = 1 post_interp = match.group(1) or '' if adjust: log.info('copying and adjusting %s -> %s', script, self.build_dir) if not self.dry_run: outf = open(outfile, 'w') if not _sysconfig.is_python_build(): outf.write('#!%s%s\n' % (self.executable, post_interp)) else: outf.write('#!%s%s\n' % (os.path.join( _sysconfig.get_config_var('BINDIR'), 'python%s%s' % (_sysconfig.get_config_var('VERSION'), _sysconfig.get_config_var('EXE'))), post_interp)) outf.writelines(f.readlines()) outf.close() if f: f.close() else: if f: f.close() self.copy_file(script, outfile) if os.name == 'posix': for file in outfiles: if self.dry_run: log.info('changing mode of %s', file) else: oldmode = os.stat(file)[ST_MODE] & 4095 newmode = (oldmode | 365) & 4095 if newmode != oldmode: log.info('changing mode of %s from %o to %o', file, oldmode, newmode) os.chmod(file, newmode) return
def cython_sources(self, sources, extension): """ Walk the list of source files in 'sources', looking for Cython source files (.pyx and .py). Run Cython on all that are found, and return a modified 'sources' list with Cython source files replaced by the generated C (or C++) files. """ try: from Cython.Compiler.Main \ import CompilationOptions, \ default_options as cython_default_options, \ compile as cython_compile from Cython.Compiler.Errors import PyrexError except ImportError: e = sys.exc_info()[1] print("failed to import Cython: %s" % e) raise DistutilsPlatformError( "Cython does not appear to be installed") new_sources = [] cython_sources = [] cython_targets = {} # Setup create_list and cplus from the extension options if # Cython.Distutils.extension.Extension is used, otherwise just # use what was parsed from the command-line or the configuration file. # cplus will also be set to true is extension.language is equal to # 'C++' or 'c++'. #try: # create_listing = self.cython_create_listing or \ # extension.cython_create_listing # cplus = self.cython_cplus or \ # extension.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') #except AttributeError: # create_listing = self.cython_create_listing # cplus = self.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') create_listing = self.cython_create_listing or \ getattr(extension, 'cython_create_listing', 0) line_directives = self.cython_line_directives or \ getattr(extension, 'cython_line_directives', 0) no_c_in_traceback = self.no_c_in_traceback or \ getattr(extension, 'no_c_in_traceback', 0) cplus = self.cython_cplus or getattr(extension, 'cython_cplus', 0) or \ (extension.language and extension.language.lower() == 'c++') cython_gen_pxi = self.cython_gen_pxi or getattr( extension, 'cython_gen_pxi', 0) cython_gdb = self.cython_gdb or getattr(extension, 'cython_gdb', False) cython_compile_time_env = self.cython_compile_time_env or \ getattr(extension, 'cython_compile_time_env', None) # Set up the include_path for the Cython compiler: # 1. Start with the command line option. # 2. Add in any (unique) paths from the extension # cython_include_dirs (if Cython.Distutils.extension is used). # 3. Add in any (unique) paths from the extension include_dirs includes = self.cython_include_dirs try: for i in extension.cython_include_dirs: if not i in includes: includes.append(i) except AttributeError: pass for i in extension.include_dirs: if not i in includes: includes.append(i) # Set up Cython compiler directives: # 1. Start with the command line option. # 2. Add in any (unique) entries from the extension # cython_directives (if Cython.Distutils.extension is used). directives = self.cython_directives if hasattr(extension, "cython_directives"): directives.update(extension.cython_directives) # Set the target_ext to '.c'. Cython will change this to '.cpp' if # needed. if cplus: target_ext = '.cpp' else: target_ext = '.c' # Decide whether to drop the generated C files into the temp dir # or the source tree. if not self.inplace and (self.cython_c_in_temp or getattr(extension, 'cython_c_in_temp', 0)): target_dir = os.path.join(self.build_temp, "pyrex") for package_name in extension.name.split('.')[:-1]: target_dir = os.path.join(target_dir, package_name) else: target_dir = None newest_dependency = None for source in sources: (base, ext) = os.path.splitext(os.path.basename(source)) if ext == ".py": # FIXME: we might want to special case this some more ext = '.pyx' if ext == ".pyx": # Cython source file output_dir = target_dir or os.path.dirname(source) new_sources.append(os.path.join(output_dir, base + target_ext)) cython_sources.append(source) cython_targets[source] = new_sources[-1] elif ext == '.pxi' or ext == '.pxd': if newest_dependency is None \ or newer(source, newest_dependency): newest_dependency = source else: new_sources.append(source) if not cython_sources: return new_sources module_name = extension.name for source in cython_sources: target = cython_targets[source] depends = [source] + list(extension.depends or ()) if (source[-4:].lower() == ".pyx" and os.path.isfile(source[:-3] + "pxd")): depends += [source[:-3] + "pxd"] rebuild = self.force or newer_group(depends, target, 'newer') if not rebuild and newest_dependency is not None: rebuild = newer(newest_dependency, target) if rebuild: log.info("cythoning %s to %s", source, target) self.mkpath(os.path.dirname(target)) if self.inplace: output_dir = os.curdir else: output_dir = self.build_lib options = CompilationOptions( cython_default_options, use_listing_file=create_listing, include_path=includes, compiler_directives=directives, output_file=target, cplus=cplus, emit_linenums=line_directives, c_line_in_traceback=not no_c_in_traceback, generate_pxi=cython_gen_pxi, output_dir=output_dir, gdb_debug=cython_gdb, compile_time_env=cython_compile_time_env) result = cython_compile(source, options=options, full_module_name=module_name) else: log.info("skipping '%s' Cython extension (up-to-date)", target) return new_sources
def byte_compile(py_files, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None): """Byte-compile a collection of Python source files to either .pyc or .pyo files in the same directory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently skipped. 'optimize' must be one of the following: 0 - don't optimize (generate .pyc) 1 - normal optimization (like "python -O") 2 - extra optimization (like "python -OO") If 'force' is true, all files are recompiled regardless of timestamps. The source filename encoded in each bytecode file defaults to the filenames listed in 'py_files'; you can modify these with 'prefix' and 'basedir'. 'prefix' is a string that will be stripped off of each source filename, and 'base_dir' is a directory name that will be prepended (after 'prefix' is stripped). You can supply either or both (or neither) of 'prefix' and 'base_dir', as you wish. If 'dry_run' is true, doesn't actually do anything that would affect the filesystem. Byte-compilation is either done directly in this interpreter process with the standard py_compile module, or indirectly by writing a temporary script and executing it. Normally, you should let 'byte_compile()' figure out to use direct compilation or not (see the source for details). The 'direct' flag is used by the script generated in indirect mode; unless you know what you're doing, leave it set to None. """ if sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') if direct is None: direct = __debug__ and optimize == 0 if not direct: try: from tempfile import mkstemp script_fd, script_name = mkstemp('.py') except ImportError: from tempfile import mktemp script_fd, script_name = None, mktemp('.py') log.info("writing byte-compilation script '%s'", script_name) if not dry_run: if script_fd is not None: script = os.fdopen(script_fd, 'w') else: script = open(script_name, 'w') script.write( 'from distutils.util import byte_compile\nfiles = [\n') script.write(string.join(map(repr, py_files), ',\n') + ']\n') script.write( '\nbyte_compile(files, optimize=%r, force=%r,\n prefix=%r, base_dir=%r,\n verbose=%r, dry_run=0,\n direct=1)\n' % (optimize, force, prefix, base_dir, verbose)) script.close() cmd = [sys.executable, script_name] if optimize == 1: cmd.insert(1, '-O') elif optimize == 2: cmd.insert(1, '-OO') spawn(cmd, dry_run=dry_run) execute(os.remove, (script_name, ), 'removing %s' % script_name, dry_run=dry_run) else: from py_compile import compile for file in py_files: if file[-3:] != '.py': continue cfile = file + (__debug__ and 'c' or 'o') dfile = file if prefix: if file[:len(prefix)] != prefix: raise ValueError, "invalid prefix: filename %r doesn't start with %r" % ( file, prefix) dfile = dfile[len(prefix):] if base_dir: dfile = os.path.join(base_dir, dfile) cfile_base = os.path.basename(cfile) if direct: if force or newer(file, cfile): log.info('byte-compiling %s to %s', file, cfile_base) if not dry_run: compile(file, cfile, dfile) else: log.debug('skipping byte-compilation of %s to %s', file, cfile_base) return
def byte_compile(py_files, optimize=0, force=0, target_dir=None, verbose=1, dry_run=0, direct=None): if direct is None: direct = __debug__ and optimize == 0 # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: from distutils.util import execute, spawn from tempfile import mktemp script_name = mktemp(".py") if verbose: print("writing byte-compilation script '%s'" % script_name) if not dry_run: with open(script_name, "w") as script: script.write(""" from py2app.util import byte_compile from modulegraph.modulegraph import * files = [ """) for f in py_files: script.write(repr(f) + ",\n") script.write("]\n") script.write(""" byte_compile(files, optimize=%r, force=%r, target_dir=%r, verbose=%r, dry_run=0, direct=1) """ % (optimize, force, target_dir, verbose)) # Ensure that py2app is on PYTHONPATH, this ensures that # py2app.util can be found even when we're running from # an .egg that was downloaded by setuptools import py2app pp = os.path.dirname(os.path.dirname(py2app.__file__)) if "PYTHONPATH" in os.environ: pp = "%s:%s" % (pp, os.environ["PYTHONPATH"]) cmd = [ "/usr/bin/env", "PYTHONPATH=%s" % (pp, ), sys.executable, script_name ] if optimize == 1: cmd.insert(3, "-O") elif optimize == 2: cmd.insert(3, "-OO") spawn(cmd, verbose=verbose, dry_run=dry_run) execute( os.remove, (script_name, ), "removing %s" % script_name, verbose=verbose, dry_run=dry_run, ) else: from distutils.dir_util import mkpath from py_compile import compile for mod in py_files: # Terminology from the py_compile module: # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) if mod.filename == mod.identifier: cfile = os.path.basename(mod.filename) dfile = cfile + (__debug__ and "c" or "o") else: cfile = mod.identifier.replace(".", os.sep) if sys.version_info[:2] <= (3, 4): if mod.packagepath: dfile = (cfile + os.sep + "__init__.py" + (__debug__ and "c" or "o")) else: dfile = cfile + ".py" + (__debug__ and "c" or "o") else: if mod.packagepath: dfile = cfile + os.sep + "__init__.pyc" else: dfile = cfile + ".pyc" if target_dir: cfile = os.path.join(target_dir, dfile) if force or newer(mod.filename, cfile): if verbose: print("byte-compiling %s to %s" % (mod.filename, dfile)) if not dry_run: mkpath(os.path.dirname(cfile)) suffix = os.path.splitext(mod.filename)[1] if suffix in (".py", ".pyw"): fn = cfile + ".py" with zipio.open(mod.filename, "rb") as fp_in: with open(fn, "wb") as fp_out: fp_out.write(fp_in.read()) compile(fn, cfile, dfile) os.unlink(fn) elif suffix in PY_SUFFIXES: # Minor problem: This will happily copy a file # <mod>.pyo to <mod>.pyc or <mod>.pyc to # <mod>.pyo, but it does seem to work. copy_file(mod.filename, cfile, preserve_times=True) else: raise RuntimeError("Don't know how to handle %r" % mod.filename) else: if verbose: print("skipping byte-compilation of %s to %s" % (mod.filename, dfile))
def generate_numpyconfig_h(ext, build_dir): """Depends on config.h: generate_config_h has to be called before !""" target = join(build_dir, header_dir, '_numpyconfig.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir) if is_npy_no_signal(): moredefs.append(('NPY_NO_SIGNAL', 1)) if is_npy_no_smp(): moredefs.append(('NPY_NO_SMP', 1)) else: moredefs.append(('NPY_NO_SMP', 0)) mathlibs = check_mathlib(config_cmd) moredefs.extend(cocache.check_ieee_macros(config_cmd)[1]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1]) if ENABLE_SEPARATE_COMPILATION: moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1)) # Check wether we can use inttypes (C99) formats if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']): moredefs.append(('NPY_USE_C99_FORMATS', 1)) # visibility check hidden_visibility = visibility_define(config_cmd) moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility)) # Add the C API/ABI versions moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION)) moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION)) # Add moredefs to header target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # Define __STDC_FORMAT_MACROS target_f.write(""" #ifndef __STDC_FORMAT_MACROS #define __STDC_FORMAT_MACROS 1 #endif """) target_f.close() # Dump the numpyconfig.h header to stdout print('File: %s' % target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') config.add_data_files((header_dir, target)) return target
def copy_tree( src, dst, preserve_mode=1, preserve_times=1, preserve_symlinks=0, update=0, verbose=0, dry_run=0, condition=None, ): """ Copy an entire directory tree 'src' to a new location 'dst'. Both 'src' and 'dst' must be directory names. If 'src' is not a directory, raise DistutilsFileError. If 'dst' does not exist, it is created with 'mkpath()'. The end result of the copy is that every file in 'src' is copied to 'dst', and directories under 'src' are recursively copied to 'dst'. Return the list of files that were copied or might have been copied, using their output name. The return value is unaffected by 'update' or 'dry_run': it is simply the list of all files under 'src', with the names changed to be under 'dst'. 'preserve_mode' and 'preserve_times' are the same as for 'copy_file'; note that they only apply to regular files, not to directories. If 'preserve_symlinks' is true, symlinks will be copied as symlinks (on platforms that support them!); otherwise (the default), the destination of the symlink will be copied. 'update' and 'verbose' are the same as for 'copy_file'. """ assert isinstance(src, (str, unicode)), repr(src) assert isinstance(dst, (str, unicode)), repr(dst) from distutils import log from distutils.dep_util import newer from distutils.dir_util import mkpath from distutils.errors import DistutilsFileError src = fsencoding(src) dst = fsencoding(dst) if condition is None: condition = skipscm if not dry_run and not zipio.isdir(src): raise DistutilsFileError("cannot copy tree '%s': not a directory" % src) try: names = zipio.listdir(src) except os.error as exc: (errno, errstr) = exc.args if dry_run: names = [] else: raise DistutilsFileError("error listing files in '%s': %s" % (src, errstr)) if not dry_run: mkpath(dst) outputs = [] for n in names: src_name = os.path.join(src, n) dst_name = os.path.join(dst, n) if (condition is not None) and (not condition(src_name)): continue # Note: using zipio's internal _locate function throws an IOError on # dead symlinks, so handle it here. if os.path.islink(src_name) and not os.path.exists( os.readlink(src_name)): continue if preserve_symlinks and zipio.islink(src_name): link_dest = zipio.readlink(src_name) log.info("linking %s -> %s", dst_name, link_dest) if not dry_run: if update and not newer(src, dst_name): pass else: make_symlink(link_dest, dst_name) outputs.append(dst_name) elif zipio.isdir(src_name) and not os.path.isfile(src_name): # ^^^ this odd tests ensures that resource files that # happen to be a zipfile won't get extracted. outputs.extend( copy_tree( src_name, dst_name, preserve_mode, preserve_times, preserve_symlinks, update, dry_run=dry_run, condition=condition, )) else: copy_file( src_name, dst_name, preserve_mode, preserve_times, update, dry_run=dry_run, ) outputs.append(dst_name) return outputs
if not exists(extcfile) or newer(extpfile, extcfile): # This is the only place where Cython is needed, but every # developer should have it installed, so it should not be # a hard requisite from Cython.Build import cythonize cythonize(extpfile) extfiles[extname] = extcfile return extfiles cython_extfiles = get_cython_extfiles(cython_extnames) # Update the version.h file if this file is newer if newer('VERSION', 'src/version.h'): open('src/version.h', 'w').write( '#define PYTABLES_VERSION "%s"\n' % VERSION) # -------------------------------------------------------------------- # Package information for ``setuptools`` # PyTables contains data files for tests. setuptools_kwargs['zip_safe'] = False setuptools_kwargs['extras_require'] = {} setuptools_kwargs['install_requires'] = requirements # Detect packages automatically. setuptools_kwargs['packages'] = find_packages(exclude=['*.bench']) # Entry points for automatic creation of scripts. setuptools_kwargs['entry_points'] = {
"""distutils.command.build_scripts
def f2py_sources(self, sources, extension): new_sources = [] f2py_sources = [] f_sources = [] f2py_targets = {} target_dirs = [] ext_name = extension.name.split('.')[-1] skip_f2py = 0 for source in sources: (base, ext) = os.path.splitext(source) if ext == '.pyf': # F2PY interface file if self.inplace: target_dir = os.path.dirname(base) else: target_dir = appendpath(self.build_src, os.path.dirname(base)) if os.path.isfile(source): name = get_f2py_modulename(source) if name != ext_name: raise DistutilsSetupError( 'mismatch of extension names: %s ' 'provides %r but expected %r' % (source, name, ext_name)) target_file = os.path.join(target_dir, name + 'module.c') else: log.debug(' source %s does not exist: skipping f2py\'ing.' \ % (source)) name = ext_name skip_f2py = 1 target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): log.warn(' target %s does not exist:\n '\ 'Assuming %smodule.c was generated with '\ '"build_src --inplace" command.' \ % (target_file, name)) target_dir = os.path.dirname(base) target_file = os.path.join(target_dir, name + 'module.c') if not os.path.isfile(target_file): raise DistutilsSetupError("%r missing" % (target_file, )) log.info(' Yes! Using %r as up-to-date target.' \ % (target_file)) target_dirs.append(target_dir) f2py_sources.append(source) f2py_targets[source] = target_file new_sources.append(target_file) elif fortran_ext_match(ext): f_sources.append(source) else: new_sources.append(source) if not (f2py_sources or f_sources): return new_sources for d in target_dirs: self.mkpath(d) f2py_options = extension.f2py_options + self.f2py_opts if self.distribution.libraries: for name, build_info in self.distribution.libraries: if name in extension.libraries: f2py_options.extend(build_info.get('f2py_options', [])) log.info("f2py options: %s" % (f2py_options)) if f2py_sources: if len(f2py_sources) != 1: raise DistutilsSetupError( 'only one .pyf file is allowed per extension module but got'\ ' more: %r' % (f2py_sources,)) source = f2py_sources[0] target_file = f2py_targets[source] target_dir = os.path.dirname(target_file) or '.' depends = [source] + extension.depends if (self.force or newer_group(depends, target_file,'newer')) \ and not skip_f2py: log.info("f2py: %s" % (source)) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--build-dir', target_dir, source]) else: log.debug(" skipping '%s' f2py interface (up-to-date)" % (source)) else: #XXX TODO: --inplace support for sdist command if is_sequence(extension): name = extension[0] else: name = extension.name target_dir = os.path.join(*([self.build_src]\ +name.split('.')[:-1])) target_file = os.path.join(target_dir, ext_name + 'module.c') new_sources.append(target_file) depends = f_sources + extension.depends if (self.force or newer_group(depends, target_file, 'newer')) \ and not skip_f2py: log.info("f2py:> %s" % (target_file)) self.mkpath(target_dir) import numpy.f2py numpy.f2py.run_main(f2py_options + ['--lower', '--build-dir',target_dir]+\ ['-m',ext_name]+f_sources) else: log.debug(" skipping f2py fortran files for '%s' (up-to-date)"\ % (target_file)) if not os.path.isfile(target_file): raise DistutilsError("f2py target file %r not generated" % (target_file, )) target_c = os.path.join(self.build_src, 'fortranobject.c') target_h = os.path.join(self.build_src, 'fortranobject.h') log.info(" adding '%s' to sources." % (target_c)) new_sources.append(target_c) if self.build_src not in extension.include_dirs: log.info(" adding '%s' to include_dirs." \ % (self.build_src)) extension.include_dirs.append(self.build_src) if not skip_f2py: import numpy.f2py d = os.path.dirname(numpy.f2py.__file__) source_c = os.path.join(d, 'src', 'fortranobject.c') source_h = os.path.join(d, 'src', 'fortranobject.h') if newer(source_c, target_c) or newer(source_h, target_h): self.mkpath(os.path.dirname(target_c)) self.copy_file(source_c, target_c) self.copy_file(source_h, target_h) else: if not os.path.isfile(target_c): raise DistutilsSetupError("f2py target_c file %r not found" % (target_c, )) if not os.path.isfile(target_h): raise DistutilsSetupError("f2py target_h file %r not found" % (target_h, )) for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: filename = os.path.join(target_dir, ext_name + name_ext) if os.path.isfile(filename): log.info(" adding '%s' to sources." % (filename)) f_sources.append(filename) return new_sources + f_sources
def generate_config_h(ext, build_dir): target = join(build_dir, header_dir, 'config.h') d = os.path.dirname(target) if not os.path.exists(d): os.makedirs(d) if newer(__file__, target): config_cmd = config.get_config_cmd() log.info('Generating %s', target) # Check sizeof moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir) # Check math library and C99 math funcs availability mathlibs = check_mathlib(config_cmd) moredefs.append(('MATHLIB', ','.join(mathlibs))) check_math_capabilities(config_cmd, moredefs, mathlibs) moredefs.extend(cocache.check_ieee_macros(config_cmd)[0]) moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0]) # Signal check if is_npy_no_signal(): moredefs.append('__NPY_PRIVATE_NO_SIGNAL') # Windows checks if sys.platform == 'win32' or os.name == 'nt': win32_checks(moredefs) # Inline check inline = config_cmd.check_inline() # Check whether we need our own wide character support if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']): PYTHON_HAS_UNICODE_WIDE = True else: PYTHON_HAS_UNICODE_WIDE = False if ENABLE_SEPARATE_COMPILATION: moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1)) # Get long double representation if sys.platform != 'darwin': rep = check_long_double_representation(config_cmd) if rep in [ 'INTEL_EXTENDED_12_BYTES_LE', 'INTEL_EXTENDED_16_BYTES_LE', 'IEEE_QUAD_LE', 'IEEE_QUAD_BE', 'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE', 'DOUBLE_DOUBLE_BE' ]: moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1)) else: raise ValueError("Unrecognized long double format: %s" % rep) # Py3K check if sys.version_info[0] == 3: moredefs.append(('NPY_PY3K', 1)) # Generate the config.h file from moredefs target_f = open(target, 'w') for d in moredefs: if isinstance(d, str): target_f.write('#define %s\n' % (d)) else: target_f.write('#define %s %s\n' % (d[0], d[1])) # define inline to our keyword, or nothing target_f.write('#ifndef __cplusplus\n') if inline == 'inline': target_f.write('/* #undef inline */\n') else: target_f.write('#define inline %s\n' % inline) target_f.write('#endif\n') # add the guard to make sure config.h is never included directly, # but always through npy_config.h target_f.write(""" #ifndef _NPY_NPY_CONFIG_H_ #error config.h should never be included directly, include npy_config.h instead #endif """) target_f.close() print('File:', target) target_f = open(target) print(target_f.read()) target_f.close() print('EOF') else: mathlibs = [] target_f = open(target) for line in target_f.readlines(): s = '#define MATHLIB' if line.startswith(s): value = line[len(s):].strip() if value: mathlibs.extend(value.split(',')) target_f.close() # Ugly: this can be called within a library and not an extension, # in which case there is no libraries attributes (and none is # needed). if hasattr(ext, 'libraries'): ext.libraries.extend(mathlibs) incl_dir = os.path.dirname(target) if incl_dir not in config.numpy_include_dirs: config.numpy_include_dirs.append(incl_dir) return target