def run(self): # Do the standard installation. base.run(self) config_file = os.path.join(self.install_dir, 'Synopsis', 'config.py') self.announce("adjusting config parameters") i = self.distribution.get_command_obj('install') version = self.distribution.get_version() datadir=os.path.join(i.prefix, 'share', 'synopsis-%s'%version) reset_config_variables(config_file, prefix=i.prefix, version=version, revision=self.distribution.revision, datadir=datadir) # Make sure the new config file gets recompiled, or else python may # not notice it is in fact different from the original config file. files = [config_file] from distutils.util import byte_compile install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=1, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=1, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
def write_stub(self, output_dir, ext, compile = False): log.info('writing stub loader for %s to %s', ext._full_name, output_dir) stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py' if compile and os.path.exists(stub_file): raise DistutilsError(stub_file + ' already exists! Please delete.') if not self.dry_run: f = open(stub_file, 'w') f.write('\n'.join(['def __bootstrap__():', ' global __bootstrap__, __file__, __loader__', ' import sys, os, pkg_resources, imp' + if_dl(', dl'), ' __file__ = pkg_resources.resource_filename(__name__,%r)' % os.path.basename(ext._file_name), ' del __bootstrap__', " if '__loader__' in globals():", ' del __loader__', if_dl(' old_flags = sys.getdlopenflags()'), ' old_dir = os.getcwd()', ' try:', ' os.chdir(os.path.dirname(__file__))', if_dl(' sys.setdlopenflags(dl.RTLD_NOW)'), ' imp.load_dynamic(__name__,__file__)', ' finally:', if_dl(' sys.setdlopenflags(old_flags)'), ' os.chdir(old_dir)', '__bootstrap__()', ''])) f.close() if compile: from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def run(self): # Do the standard installation. base.run(self) config_file = join(self.install_dir, 'qm', 'config.py') self.announce("adjusting config parameters") i = self.distribution.get_command_obj('install') prefix = i.prefix extension_path = join('share', 'qmtest', 'site-extensions-%d.%d'%sys.version_info[:2]) reset_config_variables(config_file, version=self.distribution.get_version(), prefix=prefix, extension_path=extension_path) # Make sure the new config file gets recompiled, or else python may # not notice it is in fact different from the original config file. files = [config_file] from distutils.util import byte_compile install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=1, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=1, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile # Get the "--root" directory supplied to the "install" command, # and use it as a prefix to strip off the purported filename # encoded in bytecode files. This is far from complete, but it # should at least generate usable bytecode in RPM distributions. install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep # XXX this code is essentially the same as the 'byte_compile() # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, dry_run=self.dry_run)
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): """Generate file of install path constants.""" path = os.path.join(python_base, "pkgcore", "_const.py") try: os.makedirs(os.path.dirname(path)) except OSError as e: if e.errno != errno.EEXIST: raise log.info("writing lookup config to %r" % path) with open(path, "w") as f: os.chmod(path, 0o644) # write more dynamic _const file for wheel installs if install_prefix != os.path.abspath(sys.prefix): import textwrap f.write(textwrap.dedent("""\ import os.path as osp import sys from snakeoil import process INSTALL_PREFIX = osp.abspath(sys.prefix) DATA_PATH = osp.join(INSTALL_PREFIX, {!r}) CONFIG_PATH = osp.join(INSTALL_PREFIX, {!r}) LIBDIR_PATH = osp.join(INSTALL_PREFIX, {!r}) EBD_PATH = osp.join(INSTALL_PREFIX, {!r}) INJECTED_BIN_PATH = () CP_BINARY = process.find_binary('cp') """.format( DATA_INSTALL_OFFSET, CONFIG_INSTALL_OFFSET, LIBDIR_INSTALL_OFFSET, EBD_INSTALL_OFFSET))) else: f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)) f.write("LIBDIR_PATH=%r\n" % os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET)) f.write("EBD_PATH=%r\n" % os.path.join(install_prefix, EBD_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),)) # Static paths for various utilities. from snakeoil import process required_progs = ('cp',) try: for prog in required_progs: prog_path = process.find_binary(prog) f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) except process.CommandNotFound: raise DistutilsExecError( "generating lookup config failed: required utility %r missing from PATH" % (prog,)) f.close() byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def writepy(self, paths=[]): """Add all files from paths to the ZIP archive. paths is a list of paths each of which specifies a pathname. """ from vyperlogix import misc for top in paths if (misc.isList(paths)) else [paths]: try: for root, dirs, files in os.walk(top): if (self.rx.search(root) == None): print '=' * 80 print 'files=%s' % files py_files = [ os.path.join(root, f) for f in files if f.endswith( '.py' if not self.isSourceless else '.pyo') ] print '-' * 80 print 'py_files=%s' % py_files util.byte_compile(py_files, optimize=2, force=1) for f in py_files: print 'ZIP Adding (%s) to (%s)' % (f, self.filename) f_base = f.replace('.pyo', '.pyc').replace(top, '') _f_base = f_base.split(os.sep)[-1] self.write(f, f_base) print '=' * 80 except Exception as details: print 'Error in ZIP processing. (%s)' % (str(details))
def write_stub(self, output_dir, ext, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, output_dir) stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + '.py') if compile and os.path.exists(stub_file): raise DistutilsError(stub_file + " already exists! Please delete.") if not self.dry_run: f = open(stub_file, 'w') f.write( '\n'.join([ "def __bootstrap__():", " global __bootstrap__, __file__, __loader__", <<<<<<< HEAD " import sys, os, pkg_resources, importlib.util" + if_dl(", dl"), ======= " import sys, os, pkg_resources" + if_dl(", dl"), " from importlib.machinery import ExtensionFileLoader", >>>>>>> 74c061954d5e927be4caafbd793e96a50563c265 " __file__ = pkg_resources.resource_filename" "(__name__,%r)" % os.path.basename(ext._file_name), " del __bootstrap__", " if '__loader__' in globals():", " del __loader__", if_dl(" old_flags = sys.getdlopenflags()"), " old_dir = os.getcwd()", " try:", " os.chdir(os.path.dirname(__file__))", if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), <<<<<<< HEAD " spec = importlib.util.spec_from_file_location(", " __name__, __file__)", " mod = importlib.util.module_from_spec(spec)", " spec.loader.exec_module(mod)", ======= " ExtensionFileLoader(__name__,", " __file__).load_module()", >>>>>>> 74c061954d5e927be4caafbd793e96a50563c265 " finally:", if_dl(" sys.setdlopenflags(old_flags)"), " os.chdir(old_dir)", "__bootstrap__()", "" # terminal \n ]) ) f.close() if compile: from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): """Generate file of install path constants.""" path = os.path.join(python_base, "pkgcore", "_const.py") os.makedirs(os.path.dirname(path), exist_ok=True) log.info("writing lookup config to %r" % path) with open(path, "w") as f: os.chmod(path, 0o644) # write more dynamic _const file for wheel installs if install_prefix != os.path.abspath(sys.prefix): import textwrap f.write(textwrap.dedent("""\ import os.path as osp import sys from snakeoil import process INSTALL_PREFIX = osp.abspath(sys.prefix) DATA_PATH = osp.join(INSTALL_PREFIX, {!r}) CONFIG_PATH = osp.join(INSTALL_PREFIX, {!r}) LIBDIR_PATH = osp.join(INSTALL_PREFIX, {!r}) EBD_PATH = osp.join(INSTALL_PREFIX, {!r}) INJECTED_BIN_PATH = () CP_BINARY = process.find_binary('cp') """.format( DATA_INSTALL_OFFSET, CONFIG_INSTALL_OFFSET, LIBDIR_INSTALL_OFFSET, EBD_INSTALL_OFFSET))) else: f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)) f.write("LIBDIR_PATH=%r\n" % os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET)) f.write("EBD_PATH=%r\n" % os.path.join(install_prefix, EBD_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),)) # Static paths for various utilities. from snakeoil import process required_progs = ('cp',) try: for prog in required_progs: prog_path = process.find_binary(prog) f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) except process.CommandNotFound: raise DistutilsExecError( "generating lookup config failed: required utility %r missing from PATH" % (prog,)) f.close() byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
def run(self, *args, **kwargs): # __ugly__ tdir = os.path.join(os.path.split(self.install_dir.rstrip('/'))[0], 'sbin/') if not (os.path.exists(tdir)): os.makedirs(tdir) fn = os.path.join(tdir, 'teucrium') fn_src = 'src/teucrium/main.py' byte_compile((fn_src,)) self.copy_file(fn_src + 'c', fn) os.system('chmod g+x,o-x %r' % (fn,))
def _compile_and_remove_stub(self, stub_file: str): from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, dry_run=self.dry_run)
def compile(self, name): path = shell.native(name) term.write("%(ERASE)s%(BOLD)s>>> Compiling %(name)s...%(NORMAL)s", name=name) from distutils import util try: from distutils import log except ImportError: util.byte_compile([path], verbose=0, force=True) else: log.set_verbosity(0) util.byte_compile([path], force=True)
def copy_file(self, infile, outfile, **kwargs): if outfile[-5:] == ".tmpl": output_dir = os.path.split(outfile)[0] output_dir = output_dir[:output_dir.find(os.path.dirname(infile))] template_compile(infile, output_dir) outfile = output_dir + "/" + infile.replace(".tmpl", ".pyc") else: apply(sdist.copy_file, (self, infile, outfile), kwargs) if infile[:7] != "Cheetah" and outfile[-3:] == ".py": util.byte_compile([ outfile ]) print "delete", outfile, "after byte compiling" os.remove(outfile)
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument("modelfile", type=Path, help="gaphor model filename") parser.add_argument("outfile", type=Path, help="python data model filename") parser.add_argument("overrides", type=Path, help="override filename") args = parser.parse_args() print(f"Generating {args.outfile} from {args.modelfile}...") print(" (warnings can be ignored)") autocoder.generate(args.modelfile, args.outfile, args.overrides) byte_compile([str(args.outfile)])
def write_lookup_config(python_base, install_prefix): """Generate file of install path constants.""" path = os.path.join(python_base, pkgdist.MODULE, "_const.py") try: os.makedirs(os.path.dirname(path)) except OSError as e: if e.errno != errno.EEXIST: raise log.info("writing lookup config to %r" % path) with pkgdist.syspath(pkgdist.PACKAGEDIR): from bite import const clients = tuple(sorted(const.CLIENTS.items())) services = tuple(sorted(const.SERVICES.items())) service_opts = tuple(sorted(const.SERVICE_OPTS.items())) import textwrap with open(path, "w") as f: os.chmod(path, 0o644) # write more dynamic file for wheel installs if install_prefix != os.path.abspath(sys.prefix): f.write( textwrap.dedent(f"""\ import os.path as osp import sys INSTALL_PREFIX = osp.abspath(sys.prefix) DATA_PATH = osp.join(INSTALL_PREFIX, {DATA_INSTALL_OFFSET!r}) CONFIG_PATH = osp.join(INSTALL_PREFIX, {CONFIG_INSTALL_OFFSET!r}) CLIENTS = {clients} SERVICES = {services} SERVICE_OPTS = {service_opts} """)) else: data_path = os.path.join(install_prefix, DATA_INSTALL_OFFSET), config_path = os.path.join(install_prefix, CONFIG_INSTALL_OFFSET), f.write( textwrap.dedent(f"""\ INSTALL_PREFIX = {install_prefix!r} DATA_PATH = {data_path!r} CONFIG_PATH = {config_path!r} CLIENTS = {clients!r} SERVICES = {services!r} SERVICE_OPTS = {service_opts!r} """)) f.close() byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): path = os.path.join(python_base, "pkgcore", "_const.py") log.info("Writing lookup configuration to %s" % path) with open(path, "w") as f: os.chmod(path, 0o644) f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)) f.write("EBD_PATH=%r\n" % os.path.join(install_prefix, EBD_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),)) byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def byte_compile(self, files): from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep # XXX this code is essentially the same as the 'byte_compile() # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, dry_run=self.dry_run)
def run(self): # Do the normal install steps install_data.run(self) if self.root is None: self.root = '' # Byte compile any python files that were installed as data files for path, fnames in data_files: for fname in fnames: if fname.endswith(".py"): full = os.path.join(self.root + sys.prefix, path, fname) print "byte-compiling %s" % full byte_compile([full], prefix=self.root, base_dir=sys.prefix)
def optimize_modules(_): from sys import version_info from distutils.util import byte_compile os.chdir(PATH_JOIN(os.getcwd(), 'static', 'modules')) py_f = ['blogfy.py', 'templates.py', 'functions.py', 'strings_to_format.py'] byte_compile(py_f, 2, True) if version_info[0] == 3: os.chdir('__pycache__') pyo_f = os.listdir(os.getcwd()) pyc_f = [x[:-1]+'c' for x in pyo_f] else: pyo_f = [x+'o' for x in py_f] pyc_f = [x+'c' for x in py_f] [os.rename(x,z) for x,z in zip(pyo_f, pyc_f)]
def write_pkgcore_lookup_configs(python_base, data_path, injected_bin_path=()): path = os.path.join(python_base, "pkgcore", "_const.py") log.info("Writing lookup configuration to %s" % path) with open(path, "w") as f: os.chmod(path, 0o644) f.write("DATA_PATH=%r\n" % data_path) f.write("EBD_PATH=%r\n" % os.path.join(data_path, EBD_DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(data_path, CONFIG_DATA_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),)) byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def write_stub(self, output_dir, ext, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, output_dir) stub_file = os.path.join(output_dir, * ext._full_name.split(".")) + ".py" if compile and os.path.exists(stub_file): raise DistutilsError(stub_file + " already exists! Please delete.") if not self.dry_run: f = open(stub_file, "w") f.write("\n".join([ "def __bootstrap__():", " global __bootstrap__, __file__, __loader__", " import sys, os, pkg_resources" + if_dl(", dl"), " from importlib.machinery import ExtensionFileLoader", " __file__ = pkg_resources.resource_filename" "(__name__,%r)" % os.path.basename(ext._file_name), " del __bootstrap__", " if '__loader__' in globals():", " del __loader__", if_dl(" old_flags = sys.getdlopenflags()"), " old_dir = os.getcwd()", " try:", " os.chdir(os.path.dirname(__file__))", if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), " ExtensionFileLoader(__name__,", " __file__).load_module()", " finally:", if_dl(" sys.setdlopenflags(old_flags)"), " os.chdir(old_dir)", "__bootstrap__()", "", # terminal \n ])) f.close() if compile: from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command("install_lib").optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def optimize_modules(_): from sys import version_info from distutils.util import byte_compile os.chdir(PATH_JOIN(os.getcwd(), 'static', 'modules')) py_f = [ 'blogfy.py', 'templates.py', 'functions.py', 'strings_to_format.py' ] byte_compile(py_f, 2, True) if version_info[0] == 3: os.chdir('__pycache__') pyo_f = os.listdir(os.getcwd()) pyc_f = [x[:-1] + 'c' for x in pyo_f] else: pyo_f = [x + 'o' for x in py_f] pyc_f = [x + 'c' for x in py_f] [os.rename(x, z) for x, z in zip(pyo_f, pyc_f)]
def byte_compile (self, files): from distutils.util import byte_compile # Get the "--root" directory supplied to the "install" command, # and use it as a prefix to strip off the purported filename # encoded in bytecode files. This is far from complete, but it # should at least generate usable bytecode in RPM distributions. install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
def write_stub(self, output_dir, ext, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, output_dir) stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + '.py') if compile and os.path.exists(stub_file): raise DistutilsError(stub_file + " already exists! Please delete.") if not self.dry_run: f = open(stub_file, 'w') f.write('\n'.join([ "def __bootstrap__():", " global __bootstrap__, __file__, __loader__", " import sys, os, pkg_resources, imp" + if_dl(", dl"), " __file__ = pkg_resources.resource_filename" "(__name__,%r)" % self.get_ext_fullpath(ext.name), " del __bootstrap__", " if '__loader__' in globals():", " del __loader__", if_dl(" old_flags = sys.getdlopenflags()"), " old_dir = os.getcwd()", " try:", " os.chdir(os.path.dirname(__file__))", if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), " imp.load_dynamic(__name__,__file__)", " finally:", if_dl(" sys.setdlopenflags(old_flags)"), " os.chdir(old_dir)", "__bootstrap__()", "" # terminal \n ])) f.close() if compile: from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def write_stub(self, output_dir, ext, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, output_dir) stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + '.py') if compile and os.path.exists(stub_file): raise DistutilsError(stub_file + " already exists! Please delete.") if not self.dry_run: f = open(stub_file, 'w') f.write( '\n'.join([ "def __bootstrap__():", " global __bootstrap__, __file__, __loader__", " import sys, os, pkg_resources, imp" + if_dl(", dl"), " __file__ = pkg_resources.resource_filename" "(__name__,%r)" % os.path.basename(ext._file_name), " del __bootstrap__", " if '__loader__' in globals():", " del __loader__", if_dl(" old_flags = sys.getdlopenflags()"), " old_dir = os.getcwd()", " try:", " os.chdir(os.path.dirname(__file__))", if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), " imp.load_dynamic(__name__,__file__)", " finally:", if_dl(" sys.setdlopenflags(old_flags)"), " os.chdir(old_dir)", "__bootstrap__()", "" # terminal \n ]) ) f.close() if compile: from distutils.util import byte_compile byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file)
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): """Generate file of install path constants.""" path = os.path.join(python_base, "pkgcore", "_const.py") log.info("writing lookup config to %r" % path) with open(path, "w") as f: os.chmod(path, 0o644) f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)) f.write("LIBDIR_PATH=%r\n" % os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET)) f.write("EBD_PATH=%r\n" % os.path.join(install_prefix, EBD_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path), )) # Static paths for various utilities. from snakeoil import process required_progs = ('bash', 'cp') try: for prog in required_progs: prog_path = process.find_binary(prog) f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) except process.CommandNotFound: raise DistutilsExecError( "generating lookup config failed: required utility %r missing from PATH" % (prog, )) extra_progs = ('sandbox', ) for prog in extra_progs: try: prog_path = process.find_binary(prog) except process.CommandNotFound: prog_path = '' f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def write_lookup_config(python_base, install_prefix): """Generate file of install path constants.""" path = os.path.join(python_base, pkgdist.MODULE, "_const.py") os.makedirs(os.path.dirname(path), exist_ok=True) log.info("writing lookup config to %r" % path) with pkgdist.syspath(pkgdist.PACKAGEDIR): from bite import const clients = tuple(sorted(const.CLIENTS.items())) services = tuple(sorted(const.SERVICES.items())) service_opts = tuple(sorted(const.SERVICE_OPTS.items())) with open(path, "w") as f: os.chmod(path, 0o644) # write more dynamic file for wheel installs if install_prefix != os.path.abspath(sys.prefix): f.write(dedent(f"""\ import os.path as osp import sys INSTALL_PREFIX = osp.abspath(sys.prefix) DATA_PATH = osp.join(INSTALL_PREFIX, {DATA_INSTALL_OFFSET!r}) CONFIG_PATH = osp.join(INSTALL_PREFIX, {CONFIG_INSTALL_OFFSET!r}) CLIENTS = {clients} SERVICES = {services} SERVICE_OPTS = {service_opts} """)) else: data_path = os.path.join(install_prefix, DATA_INSTALL_OFFSET) config_path = os.path.join(install_prefix, CONFIG_INSTALL_OFFSET) f.write(dedent(f"""\ INSTALL_PREFIX = {install_prefix!r} DATA_PATH = {data_path!r} CONFIG_PATH = {config_path!r} CLIENTS = {clients!r} SERVICES = {services!r} SERVICE_OPTS = {service_opts!r} """)) f.close() byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def generate_uml2(force=False): """ Generate gaphor/UML/uml2.py in the source directory. """ gen = os.path.join("utils", "model", "gen_uml.py") overrides = os.path.join("gaphor", "UML", "uml2.override") model = os.path.join("gaphor", "UML", "uml2.gaphor") py_model = os.path.join("gaphor", "UML", "uml2.py") outfile = py_model mkpath(os.path.dirname(outfile)) if (force or newer(model, outfile) or newer(overrides, outfile) or newer(gen, outfile)): print(f"generating {py_model} from {model}...") print(" (warnings can be ignored)") gen_uml.generate(model, outfile, overrides) else: print(f"not generating {py_model} (up-to-date)") byte_compile([outfile])
def generate_uml2(self): """ Generate gaphor/UML/uml2.py in the build directory. """ gen = os.path.join('utils', 'command', 'gen_uml.py') overrides = os.path.join('gaphor', 'UML', 'uml2.override') model = os.path.join('gaphor', 'UML', 'uml2.gaphor') py_model = os.path.join('gaphor', 'UML', 'uml2.py') outfile = py_model #os.path.join(self.build_lib, py_model) self.mkpath(os.path.dirname(outfile)) if self.force or newer(model, outfile) \ or newer(overrides, outfile) \ or newer(gen, outfile): print('generating %s from %s...' % (py_model, model)) print(' (warnings can be ignored)') from . import gen_uml gen_uml.generate(model, outfile, overrides) else: print('not generating %s (up-to-date)' % py_model) byte_compile([outfile])
def generate_uml2(self): """ Generate gaphor/UML/uml2.py in the build directory. """ gen = os.path.join('utils', 'command', 'gen_uml.py') overrides = os.path.join('gaphor', 'UML', 'uml2.override') model = os.path.join('gaphor', 'UML', 'uml2.gaphor') py_model = os.path.join('gaphor', 'UML', 'uml2.py') outfile = py_model #os.path.join(self.build_lib, py_model) self.mkpath(os.path.dirname(outfile)) if self.force or newer(model, outfile) \ or newer(overrides, outfile) \ or newer(gen, outfile): print 'generating %s from %s...' % (py_model, model) print ' (warnings can be ignored)' import gen_uml gen_uml.generate(model, outfile, overrides) else: print 'not generating %s (up-to-date)' % py_model byte_compile([outfile])
def run(self): # Do the normal install steps install_data.run(self) if self.root is None: self.root = '' if hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode: print "byte-compiling disabled" else: # Byte compile any python files that were installed as data files for path, fnames in data_files: for fname in fnames: if fname.endswith(".py"): full = os.path.join(self.root + sys.prefix, path, fname) print "byte-compiling %s" % full try: byte_compile([full], prefix=self.root, base_dir=sys.prefix) except Exception, e: print "Byte-compile failed: " + str(e)
def byte_compile_scheme(self, scheme): files, common, prefix = self.get_scheme_root(scheme) install_root = self.get_scheme_dir(scheme) byte_compile( files, optimize=0, force=self.force, prefix=install_root, dry_run=self.dry_run, ) if self.optimize > 0: byte_compile( files, optimize=self.optimize, force=self.force, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run, )
def byte_compile (self, files): from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep # XXX this code is essentially the same as the 'byte_compile() # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, verbose=self.verbose, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, verbose=self.verbose, dry_run=self.dry_run)
def run(self): # Byte-compile Python files. scripts = [] for i in self.data_files: for j in i[1]: if j[-2:] == "py": scripts.append(j) i[1].append(j + 'c') byte_compile(scripts) # Change install dir to library dir. install_cmd = self.get_finalized_command('install') self.install_dir = getattr(install_cmd, 'install_lib') return install_data.run(self)
def generate_uml2(self): """ Generate gaphor/UML/uml2.py in the build directory. """ gen = os.path.join("utils", "command", "gen_uml.py") overrides = os.path.join("gaphor", "UML", "uml2.override") model = os.path.join("gaphor", "UML", "uml2.gaphor") py_model = os.path.join("gaphor", "UML", "uml2.py") outfile = py_model # os.path.join(self.build_lib, py_model) self.mkpath(os.path.dirname(outfile)) if (self.force or newer(model, outfile) or newer(overrides, outfile) or newer(gen, outfile)): print("generating %s from %s..." % (py_model, model)) print(" (warnings can be ignored)") from . import gen_uml gen_uml.generate(model, outfile, overrides) else: print("not generating %s (up-to-date)" % py_model) byte_compile([outfile])
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): """Generate file of install path constants.""" path = os.path.join(python_base, "pkgcore", "_const.py") log.info("writing lookup config to %r" % path) with open(path, "w") as f: os.chmod(path, 0o644) f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) f.write("CONFIG_PATH=%r\n" % os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)) f.write("LIBDIR_PATH=%r\n" % os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET)) f.write("EBD_PATH=%r\n" % os.path.join(install_prefix, EBD_INSTALL_OFFSET)) # This is added to suppress the default behaviour of looking # within the repo for a bin subdir. f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),)) # Static paths for various utilities. from snakeoil import process required_progs = ('bash', 'cp') try: for prog in required_progs: prog_path = process.find_binary(prog) f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) except process.CommandNotFound: raise DistutilsExecError( "generating lookup config failed: required utility %r missing from PATH" % (prog,)) extra_progs = ('sandbox',) for prog in extra_progs: try: prog_path = process.find_binary(prog) except process.CommandNotFound: prog_path = '' f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path)) byte_compile([path], prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def run(self): self.mkpath(self.install_dir) # used for byte compiling created_files = [] for item in self.appdata: if isinstance(item, basestring): # put it right into the installation directory if os.path.isfile(item): (f, copied) = self.copy_file(item, self.install_dir) created_files.append(f) elif os.path.isdir(item): target = os.path.join(self.install_dir, item) files = self.copy_tree(item, target) created_files.extend(files) else: self.warn('Unable to find %s...' % item) else: # assume we have a tupel-like thing here. target directory # relative to install_dir is in first element target_dir = item[0] if self.root: target_dir = util.change_root(self.root, target_dir) else: target_dir = os.path.join(self.install_dir, target_dir) for fso in item[1]: if os.path.isdir(fso): files = self.copy_tree(fso, target_dir) created_files.extend(files) elif os.path.isfile(fso): (f, copied) = self.copy_file(fso, target_dir) created_files.append(f) else: self.warn('Unable to find %s...' % fso) # byte compilation util.byte_compile(created_files, optimize=0, force=True, dry_run=self.dry_run)
def write_obj_lists(python_base, install_prefix): """Generate config file of keyword, check, and other object lists.""" path = os.path.join(python_base, pkgdist.MODULE_NAME, "_const.py") os.makedirs(os.path.dirname(path), exist_ok=True) log.info(f'writing config to {path!r}') # hack to drop quotes on modules in generated files class _kls(object): def __init__(self, module): self.module = module def __repr__(self): return self.module with pkgdist.syspath(pkgdist.PACKAGEDIR): from pkgcheck import const modules = defaultdict(set) objs = defaultdict(list) for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'): for name, cls in getattr(const, obj).items(): parent, module = cls.__module__.rsplit('.', 1) modules[parent].add(module) objs[obj].append((name, _kls(f'{module}.{name}'))) keywords = tuple(objs['KEYWORDS']) checks = tuple(objs['CHECKS']) reporters = tuple(objs['REPORTERS']) with open(path, 'w') as f: os.chmod(path, 0o644) for k, v in sorted(modules.items()): f.write(f"from {k} import {', '.join(sorted(v))}\n") f.write( dedent(f"""\ KEYWORDS = {keywords} CHECKS = {checks} REPORTERS = {reporters} """)) # write install path constants to config if install_prefix != os.path.abspath(sys.prefix): # write more dynamic _const file for wheel installs f.write( dedent("""\ import os.path as osp import sys INSTALL_PREFIX = osp.abspath(sys.prefix) DATA_PATH = osp.join(INSTALL_PREFIX, {!r}) """.format(DATA_INSTALL_OFFSET))) else: f.write("INSTALL_PREFIX=%r\n" % install_prefix) f.write("DATA_PATH=%r\n" % os.path.join(install_prefix, DATA_INSTALL_OFFSET)) # only optimize during install, skip during wheel builds if install_prefix == os.path.abspath(sys.prefix): byte_compile([path], prefix=python_base) byte_compile([path], optimize=1, prefix=python_base) byte_compile([path], optimize=2, prefix=python_base)
def __init__(self): ms = 'modules.zip' cwd = os.getcwd() mso = 'modules-orig.zip' ver = version_info[0] pat = os.path pyo_f = ('blogfy.pyo', 'post_funcs.pyo', 'templates.pyo', 'strings_to_format.pyo', 'templates_not_included.py') pcache = '__pycache__' if int(argv[1]) != 1: copy(mso, ms) else: with ZipFile(mso, 'r') as ziP: ziP.extractall() py_f = [x[:-1] for x in pyo_f[:-1]] byte_compile(py_f, 2, True) if ver == 3: long_names = os.listdir(pcache) a = [pat.splitext(x)[0].split('.')[0] for x in long_names] b = ('.pyo '.join(a) + '.pyo').split() for x in long_names: move(pat.join(cwd, pcache, x), pat.join(cwd, b[0])) del b[0] with ZipFile(ms, 'a') as z: [z.write(pyo_f[x]) for x in range(4)] cwd_files = os.listdir(cwd) [os.remove(x) for x in pyo_f if x in cwd_files] [os.remove(x) for x in py_f] if pat.exists(pcache): rmtree(pcache) move(ms, pat.join(pat.split(cwd)[0], ms))
def __init__(self): ms = 'modules.zip' cwd = os.getcwd() mso = 'modules-orig.zip' ver = version_info[0] pat = os.path pyo_f = ('blogfy.pyo', 'post_funcs.pyo', 'templates.pyo', 'strings_to_format.pyo','templates_not_included.py') pcache = '__pycache__' if int(argv[1]) != 1: copy(mso, ms) else: with ZipFile(mso, 'r') as ziP: ziP.extractall() py_f = [x[:-1] for x in pyo_f[:-1]] byte_compile(py_f, 2, True) if ver == 3: long_names = os.listdir(pcache) a = [pat.splitext(x)[0].split('.')[0] for x in long_names] b = ('.pyo '.join(a) + '.pyo').split() for x in long_names: move(pat.join(cwd, pcache, x), pat.join(cwd, b[0])) del b[0] with ZipFile(ms, 'a') as z: [z.write(pyo_f[x]) for x in range(4)] cwd_files = os.listdir(cwd) [os.remove(x) for x in pyo_f if x in cwd_files] [os.remove(x) for x in py_f] if pat.exists(pcache): rmtree(pcache) move(ms, pat.join(pat.split(cwd)[0], ms))
def run(self): from distutils.util import byte_compile install_data.run(self) py2exe = self.distribution.get_command_obj('py2exe', False) # GZ 2010-04-19: Setup has py2exe.optimize as 2, but give plugins # time before living with docstring stripping optimize = 1 compile_names = [f for f in self.outfiles if f.endswith('.py')] # Round mtime to nearest even second so that installing on a FAT # filesystem bytecode internal and script timestamps will match for f in compile_names: mtime = os.stat(f).st_mtime remainder = mtime % 2 if remainder: mtime -= remainder os.utime(f, (mtime, mtime)) byte_compile(compile_names, optimize=optimize, force=self.force, prefix=self.install_dir, dry_run=self.dry_run) self.outfiles.extend([f + 'o' for f in compile_names])
def generate_uml2(self): """ Generate gaphor/UML/uml2.py in the build directory. """ gen = os.path.join("utils", "command", "gen_uml.py") overrides = os.path.join("gaphor", "UML", "uml2.override") model = os.path.join("gaphor", "UML", "uml2.gaphor") py_model = os.path.join("gaphor", "UML", "uml2.py") outfile = py_model # os.path.join(self.build_lib, py_model) self.mkpath(os.path.dirname(outfile)) if ( self.force or newer(model, outfile) or newer(overrides, outfile) or newer(gen, outfile) ): print("generating %s from %s..." % (py_model, model)) print(" (warnings can be ignored)") from . import gen_uml gen_uml.generate(model, outfile, overrides) else: print("not generating %s (up-to-date)" % py_model) byte_compile([outfile])
def run(self): # Do the standard installation. base.run(self) config_file = join(self.install_dir, 'qm', 'config.py') self.announce("adjusting config parameters") i = self.distribution.get_command_obj('install') prefix = i.prefix extension_path = join('share', 'qmtest', 'site-extensions-%d.%d' % sys.version_info[:2]) reset_config_variables(config_file, version=self.distribution.get_version(), prefix=prefix, extension_path=extension_path) # Make sure the new config file gets recompiled, or else python may # not notice it is in fact different from the original config file. files = [config_file] from distutils.util import byte_compile install_root = self.get_finalized_command('install').root if self.compile: byte_compile(files, optimize=0, force=1, prefix=install_root, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=1, prefix=install_root, verbose=self.verbose, dry_run=self.dry_run)
"""distutils.command.build_py
# created 1999/03/13, Greg Ward
if (rx.search(root) == None) and (root.find(nbproject_token) == -1): d = {} _files = [os.sep.join([root, f]) for f in files] for f in _files: d[os.path.basename(f)] = f for f in ignore: try: if d.has_key(f): del d[f] except Exception, details: print "WARNING in ignoring files due to (%s)" % (str(details)) _files = d.values() py_files = [f for f in _files if f.endswith(".py")] if root != top: print "Compiling (%s) %s" % (root, py_files) util.byte_compile(py_files, optimize=2, force=1) if root.find(bridge_token) == -1: for f in _files: if rxZip.search(f) == None: print "ZIP Adding (%s) to (%s)" % (f, zipName) f_base = f.replace(".pyo", ".pyc").replace(top, "") _f_base = f_base.split(os.sep)[-1] if _f_base in edit_files: if f_base.startswith(os.sep): f_base = f_base[1:] editFile(zip, f, f_base) else: zip.write(f, f_base) elif root.find(bridge_token) > -1: py_files = [f for f in _files if f.endswith(".pyo")] if len(py_files) == 0:
" os.chdir(os.path.dirname(__file__))", if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), " imp.load_dynamic(__name__,__file__)", " finally:", if_dl(" sys.setdlopenflags(old_flags)"), " os.chdir(old_dir)", "__bootstrap__()", "" # terminal \n ]) ) f.close() if compile: from distutils.util import byte_compile >>>>>>> 54eef0be98b1b67c8507db91f4cfa90b64991027 byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile([stub_file], optimize=optimize, force=True, dry_run=self.dry_run) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file) <<<<<<< HEAD if use_stubs or os.name=='nt': # Build shared libraries # def link_shared_object(self, objects, output_libname, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None,
for dirpath, dirname, filenames in walkedFiles: for filename in filenames: if not filename.endswith(".py"): continue src = os.path.join(dirpath, filename) ziptarget = os.path.join(dirpath[4:], filename) filesToZip.append((src, ziptarget)) filesToZip.append(("sql_mar.py", "sql_mar.py")) filesToCompile = [f[0] for f in filesToZip] print "Compiling scripts..." byte_compile(filesToCompile, optimize=0, force=1) byte_compile(filesToCompile, optimize=2, force=1) zfilefull = zipfile.ZipFile("gadfly.zip", "w", zipfile.ZIP_DEFLATED) zfilesmall = zipfile.ZipFile("gadfly_small.zip", "w", zipfile.ZIP_DEFLATED) print "Compressing files..." for src, ziptarget in filesToZip: zfilefull.write(src, ziptarget) if src.endswith(".py"): zfilefull.write(src+"c", ziptarget+"c") zfilefull.write(src+"o", ziptarget+"o") zfilesmall.write(src+"o", ziptarget+"o")
def run(self): """Run the distutils command""" log.info("installing library code to %s" % self.bdist_dir) dist_name = self.distribution.get_fullname() self.dist_target = os.path.join(self.dist_dir, dist_name) log.info("distribution will be available as '%s'" % self.dist_target) # build command: just to get the build_base cmdbuild = self.get_finalized_command("build") self.build_base = cmdbuild.build_base # install command install = self.reinitialize_command('install_lib', reinit_subcommands=1) install.compile = False install.warn_dir = 0 install.install_dir = self.bdist_dir log.info("installing to %s" % self.bdist_dir) self.run_command('install_lib') # install extra files extra_files = { } for src, dest in extra_files.items(): self.copy_file(src, dest) # install_egg_info command cmd_egginfo = self.get_finalized_command('install_egg_info') cmd_egginfo.install_dir = self.bdist_dir self.run_command('install_egg_info') installed_files = install.get_outputs() # remove the GPL license ignore = [ os.path.join(self.bdist_dir, os.path.normcase('mysql/__init__.py')), os.path.join(self.bdist_dir, 'mysql', 'connector', 'locales', 'eng', '__init__.py'), cmd_egginfo.target, ] django_backend = os.path.join('connector', 'django') for pyfile in installed_files: if '.py' not in pyfile: continue if pyfile not in ignore and django_backend not in pyfile: commercial.remove_gpl(pyfile, dry_run=self.dry_run) log.info("setting license information in version.py") loc_version_py = os.path.join( self.bdist_dir, os.path.normcase('mysql/connector/version.py') ) version_py = open(loc_version_py, 'r').readlines() for (nr, line) in enumerate(version_py): if line.startswith('LICENSE'): version_py[nr] = 'LICENSE = "Commercial"\n' fp = open(loc_version_py, 'w') fp.write(''.join(version_py)) fp.close() # compile and remove sources if not self.include_sources: util.byte_compile(installed_files, optimize=0, force=True, prefix=install.install_dir) self._remove_sources() if get_python_version().startswith('3'): log.info("copying byte code from __pycache__") self._copy_from_pycache(os.path.join(self.bdist_dir, 'mysql')) # create distribution info_files = [ ('cpyint/data/commercial/README_COM.txt', 'README.txt'), ('cpyint/data/commercial/LICENSE_COM.txt', 'LICENSE.txt'), ] copy_tree(self.bdist_dir, self.dist_target) mkpath(os.path.join(self.dist_target)) xy_needle = 'Connector/Python X.Y' xy_sub = 'Connector/Python {0}.{1}' for src, dst in info_files: if dst is None: dest_name, _ = copy_file(src, self.dist_target) else: dest_name, _ = copy_file(src, os.path.join(self.dist_target, dst)) with open(dest_name, 'r+') as fp: content = fp.readlines() for i, line in enumerate(content): if xy_needle in line: content[i] = line.replace(xy_needle, xy_sub.format(*VERSION[0:2])) fp.seek(0) fp.write(''.join(content)) add_docs(os.path.join(self.dist_target, 'docs')) self._write_setuppy() if not self.keep_temp: remove_tree(self.build_base, dry_run=self.dry_run)
def run(self): """Run the distutils command""" log.info("installing library code to %s" % self.bdist_dir) egg_name = _get_dist_name(self.distribution, source_only_dist=self.include_sources) self.egg = egg.Egg(name=egg_name, destination=self.dist_dir, builtdir=self.bdist_dir, info_file=None) log.info("egg will created as '%s'" % self.egg.get_archive_name()) # build command: just to get the build_base cmdbuild = self.get_finalized_command("build") self.build_base = cmdbuild.build_base # install command install = self.reinitialize_command("install", reinit_subcommands=1) install.compile = False install.warn_dir = 0 install.prefix = self.bdist_dir install.install_purelib = self.bdist_dir log.info("installing to %s" % self.bdist_dir) self.run_command("install") # install_data command install_data = self.reinitialize_command("install_data", reinit_subcommands=1) install_data.install_dir = self.bdist_dir log.info("installing data files to %s" % self.bdist_dir) self.run_command("install_data") # install_egg_info command cmd_egginfo = self.get_finalized_command("install_egg_info") self.egg._info_file = cmd_egginfo.target # remove the GPL license to_compile = install.get_outputs() ignore = [ os.path.join(self.bdist_dir, os.path.normcase("mysql/__init__.py")), cmd_egginfo.target, os.path.join(self.bdist_dir, os.path.normcase("mysql/connector/version.py")), ] for pyfile in install.get_outputs(): if pyfile not in ignore: _remove_gpl(pyfile, dry_run=self.dry_run) # compile and remove sources if not self.include_sources: byte_compile(to_compile, optimize=0, force=True, prefix=install.root) self._remove_sources() # create the egg info_files = [("README_com.txt", "README.txt"), ("LICENSE_com.txt", "LICENSE.txt")] self.egg.create( out=self.egg.get_archive_name(), extra_info_files=info_files, zip_safe=getattr(self.distribution, "zip_safe", True), dry_run=self.dry_run, ) # create the pth-file if not self.without_pth: self._create_path_file() if not self.keep_temp: remove_tree(self.build_base, dry_run=self.dry_run)