def test_freshPyReplacesStalePyc(self): """ Verify that if a stale .pyc file on the PYTHONPATH is replaced by a fresh .py file, the plugins in the new .py are picked up rather than the stale .pyc, even if the .pyc is still around. """ mypath = self.appPackage.child("stale.py") mypath.setContent(pluginFileContents('one')) # Make it super stale x = time.time() - 1000 os.utime(mypath.path, (x, x)) pyc = mypath.sibling('stale.pyc') # compile it if _PY3: # On python 3, don't use the __pycache__ directory; the intention # of scanning for .pyc files is for configurations where you want # to intentionally include them, which means we _don't_ scan for # them inside cache directories. extra = dict(legacy=True) else: # On python 2 this option doesn't exist. extra = dict() compileall.compile_dir(self.appPackage.path, quiet=1, **extra) os.utime(pyc.path, (x, x)) # Eliminate the other option. mypath.remove() # Make sure it's the .pyc path getting cached. self.resetEnvironment() # Sanity check. self.assertIn('one', self.getAllPlugins()) self.failIfIn('two', self.getAllPlugins()) self.resetEnvironment() mypath.setContent(pluginFileContents('two')) self.failIfIn('one', self.getAllPlugins()) self.assertIn('two', self.getAllPlugins())
def test_compileall(self): with temp_cwd(): PACKAGE = os.path.realpath("./greetings") PYC_GREETER = os.path.join(PACKAGE, "greeter.pyc") PYCLASS_GREETER = os.path.join(PACKAGE, "greeter$py.class") PYCLASS_TEST = os.path.join(PACKAGE, "test$py.class") os.mkdir(PACKAGE) self.write_code( PACKAGE, "greeter.py", """ def greet(): print 'Hello world!' """) self.write_code( PACKAGE, "test.py", """ from greeter import greet greet() """) # pretend we have a Python bytecode compiler by touching this file open(PYC_GREETER, "a").close() compileall.compile_dir(PACKAGE, quiet=True) self.assertTrue(os.path.exists(PYC_GREETER)) # still exists self.assertTrue(os.path.exists(PYCLASS_TEST)) # along with these new compiled files self.assertTrue(os.path.exists(PYCLASS_GREETER)) # verify we can work with just compiled files os.unlink(os.path.join(PACKAGE, "greeter.py")) self.assertEqual( subprocess.check_output([sys.executable, os.path.join(PACKAGE, "test.py")]).rstrip(), "Hello world!")
def make_build(self): if not self.config.get("pycompiled"): return # make build directory build_root = os.path.join(self.git_root, "dist", "build") build_server = os.path.join(build_root, "server", "sputnik") build_tools = os.path.join(build_root, "tools") shutil.rmtree(build_server, True) shutil.rmtree(build_tools, True) # byte-compile compile server_source = os.path.join(self.git_root, "server", "sputnik") tools_source = os.path.join(self.git_root, "tools") compileall.compile_dir(server_source) compileall.compile_dir(tools_source) # copy files def ignore(path, names): ignored = [] for name in names: if not fnmatch.fnmatch(name, "*.pyc") and not os.path.isdir(os.path.join(path, name)): ignored.append(name) return ignored shutil.copytree(server_source, build_server, ignore=ignore) shutil.copytree(tools_source, build_tools, ignore=ignore)
def __call__(self, parser, namespace, values, option_string=None): # print '%r %r %r' % (namespace, values, option_string) import re import compileall compileall.compile_dir('.', maxlevels=20, rx=re.compile(r'/\.svn')) sys.stdout.write('all pyc has been deleted !\n') sys.exit()
def update_dependencies( upgrade ): print( 'Updating Dependencies...' ) pip = 'C:/Python27/Scripts/pip.exe' if os.path.isfile(pip): print( 'Found "pip" at "{}".'.format(pip) ) else: pip = 'pip' for d in dependencies: args = [pip, 'install', d] if upgrade: args.append('--upgrade') print( ' '.join(args) ) subprocess.call( args ) for d in uninstall_dependencies: args = [pip, 'uninstall', d] print( ' '.join(args) ) subprocess.call( args ) print( 'Removing old compiled files...' ) for root, dirs, files in os.walk( '.' ): for f in files: fname = os.path.join( root, f ) if os.path.splitext(fname)[1] == '.pyc': os.remove( fname ) print( 'Pre-compiling source code...' ) compileall.compile_dir( '.', quiet=True )
def build_cocoa(dev): print("Creating OS X app structure") app = cocoa_app() # We import this here because we don't want opened module to prevent us replacing .pyd files. from core.app import Application as MoneyGuruApp app_version = MoneyGuruApp.VERSION filereplace('cocoa/InfoTemplate.plist', 'build/Info.plist', version=app_version) app.create('build/Info.plist') print("Building localizations") build_localizations('cocoa') print("Building xibless UIs") build_cocoalib_xibless() build_xibless() print("Building Python extensions") build_cocoa_proxy_module() build_cocoa_bridging_interfaces() print("Building the cocoa layer") copy_embeddable_python_dylib('build') pydep_folder = op.join(app.resources, 'py') ensure_folder(pydep_folder) if dev: hardlink('cocoa/mg_cocoa.py', 'build/mg_cocoa.py') else: copy('cocoa/mg_cocoa.py', 'build/mg_cocoa.py') tocopy = ['core', 'hscommon', 'cocoalib/cocoa', 'objp', 'sgmllib'] copy_packages(tocopy, pydep_folder, create_links=dev) sys.path.insert(0, 'build') collect_stdlib_dependencies('build/mg_cocoa.py', pydep_folder) del sys.path[0] copy_sysconfig_files_for_embed(pydep_folder) if not dev: # Important: Don't ever run delete_files_with_pattern('*.py') on dev builds because you'll # be deleting all py files in symlinked folders. compileall.compile_dir(pydep_folder, force=True, legacy=True) delete_files_with_pattern(pydep_folder, '*.py') delete_files_with_pattern(pydep_folder, '__pycache__') print("Compiling PSMTabBarControl framework") os.chdir('psmtabbarcontrol') print_and_do('{0} waf configure && {0} waf && {0} waf build_framework'.format(sys.executable)) os.chdir('..') print("Compiling with WAF") os.chdir('cocoa') print_and_do(cocoa_compile_command()) os.chdir('..') app.copy_executable('cocoa/build/moneyGuru') build_help() print("Copying resources and frameworks") resources = [ 'cocoa/dsa_pub.pem', 'build/mg_cocoa.py', 'build/help', 'data/example.moneyguru', 'plugin_examples' ] + glob.glob('images/*') app.copy_resources(*resources, use_symlinks=dev) app.copy_frameworks( 'build/Python', 'cocoalib/Sparkle.framework', 'psmtabbarcontrol/PSMTabBarControl.framework' ) print("Creating the run.py file") tmpl = open('run_template_cocoa.py', 'rt').read() run_contents = tmpl.replace('{{app_path}}', app.dest) open('run.py', 'wt').write(run_contents)
def installPythonModules(path=None): import os, compileall if path is None: path = os.getcwd() compileall.compile_dir(path) os.chdir(path) os.system("cp -r * " + mmtk_package_path)
def bundle(dir): branch = Branch.open(dir) output_zip = '%s_%d.zip'%(dir, branch.revno()) temp_dir = '/tmp/output_%d'%(branch.revno()) #Empty the temp_dir shutil.rmtree(temp_dir, True) #export the bzr repository to temp_dir export(branch.basis_tree(), temp_dir) #Compile the source code in templocation compileall.compile_dir(temp_dir) #Remove the .py files from the exported directory. clean_path(temp_dir, [".py"]) #create a HISTORY file in the temp_dir show_log(branch, ShortLogFormatter(open(temp_dir+os.sep+'HISTORY', 'w'))) #create a VERSION file in temp_dir f = open(temp_dir+os.sep+'VERSION', 'w') f.write(str(branch.revno())) f.close() #write to zip z.toZip(temp_dir, output_zip)
def main(): for pythonPath in (DIRS): modulePath = os.path.join(BUILD_ROOT,*pythonPath) print "Compiling: " + modulePath compileall.compile_dir(modulePath,force=1) print "Done" return 0
def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) self.parser = Parser() # compile snippets self.snippets = os.path.join(os.path.dirname(sys.argv[0]), 'snippets') compileall.compile_dir(self.snippets)
def test_freshPyReplacesStalePyc(self): """ Verify that if a stale .pyc file on the PYTHONPATH is replaced by a fresh .py file, the plugins in the new .py are picked up rather than the stale .pyc, even if the .pyc is still around. """ mypath = self.appPackage.child("stale.py") mypath.setContent(pluginFileContents('one')) # Make it super stale x = time.time() - 1000 os.utime(mypath.path, (x, x)) pyc = mypath.sibling('stale.pyc') # compile it compileall.compile_dir(self.appPackage.path, quiet=1) os.utime(pyc.path, (x, x)) # Eliminate the other option. mypath.remove() # Make sure it's the .pyc path getting cached. self.resetEnvironment() # Sanity check. self.assertIn('one', self.getAllPlugins()) self.failIfIn('two', self.getAllPlugins()) self.resetEnvironment() mypath.setContent(pluginFileContents('two')) self.failIfIn('one', self.getAllPlugins()) self.assertIn('two', self.getAllPlugins())
def test_alwaysPreferPy(self): """ Verify that .py files will always be preferred to .pyc files, regardless of directory listing order. """ mypath = FilePath(self.mktemp()) mypath.createDirectory() pp = modules.PythonPath(sysPath=[mypath.path]) originalSmartPath = pp._smartPath def _evilSmartPath(pathName): o = originalSmartPath(pathName) originalChildren = o.children def evilChildren(): # normally this order is random; let's make sure it always # comes up .pyc-first. x = originalChildren() x.sort() x.reverse() return x o.children = evilChildren return o mypath.child("abcd.py").setContent("\n") compileall.compile_dir(mypath.path, quiet=True) # sanity check self.assertEquals(len(mypath.children()), 2) pp._smartPath = _evilSmartPath self.assertEquals(pp["abcd"].filePath, mypath.child("abcd.py"))
def test_compile_dir_pathlike(self): self.assertFalse(os.path.isfile(self.bc_path)) with support.captured_stdout() as stdout: compileall.compile_dir(pathlib.Path(self.directory)) line = stdout.getvalue().splitlines()[0] self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') self.assertTrue(os.path.isfile(self.bc_path))
def compile_program(self): """ Compiles the program. """ for location in self.input_location: compileall.compile_dir(location, quiet=True) self.create_program(skip_rebuild=True)
def ToCompileAllFiles(self, pDirProject): """Compile all folder projeect.""" import re import compileall import os.path if pDirProject != "": if "," not in pDirProject: try: rexp = re.compile(r'[/\\][.]svn') normalpath = os.path.normpath(pDirProject) compileall.compile_dir(normalpath, rx=rexp, force=True, optimize=2) normalpath = os.path.join(normalpath, "_pycache_") print("\nCompilation complete successfuly.") print("See compiled files in %s" %normalpath) except Exception as e: print("Error:", e) else: print('Error: invalid folder/directry. Try:\n\n', ' Use diretories/folders between double quotes (\"\"). \n', ' Example for (Windows/Linux/Unix(Mac OS)):\n\n', ' # python SPCompiler.py \"c:\\:users\edson lourenco\"' '\n OR\n', ' # python SPCompiler.py \"/home/edson lourenco\"', '\n OR\n', ' # python SPCompiler.py \"/users/edson lourenco\"') else: print("No directory/folder... Try again. ")
def install_plugins(cls, plugin_names): """ Installs a list of plugins into the current environment""" for plugin_name in plugin_names: cls.install_plugin(plugin_name) compileall.compile_dir( cafe.__path__[0], maxlevels=1000, force=1, quiet=1)
def compile_BC(self): try: import compileall compileall.compile_dir(os.path.join(BlenderVR_root, 'modules', 'blendervr'), quiet=True) except: self.logger.log_traceback(False)
def compile_dir(dir,force=0, **keywords): global debug global verbosity global errors_are_fatal key = os.getpid() debug[key] = 0 verbosity[key] = 1 errors_are_fatal[key] = 0 map_globals = {'debug':debug, 'verbosity':verbosity, 'errors_are_fatal':errors_are_fatal} try: keys = keywords.keys() for keyword in ('debug','verbosity','errors_are_fatal'): if keywords.has_key(keyword): keys.remove(keyword) map_globals[keyword][key] = keywords[keyword] if keys: raise TypeError,\ 'compile_dir() got an unexpected keyword argument %s' % \ keys[0] compile_dir_and_recurse(dir,0,force) compileall.compile_dir(dir,ddir=os.path.basename(dir),force=force) finally: del debug[key] del verbosity[key] del errors_are_fatal[key]
def build_cocoa(dev): sparkle_framework_path = op.join('cocoa', 'Sparkle', 'build', 'Release', 'Sparkle.framework') if not op.exists(sparkle_framework_path): print("Building Sparkle") os.chdir(op.join('cocoa', 'Sparkle')) print_and_do('make build') os.chdir(op.join('..', '..')) print("Creating OS X app structure") app = cocoa_app() app_version = get_module_version('core') cocoa_project_path = 'cocoa' filereplace(op.join(cocoa_project_path, 'InfoTemplate.plist'), op.join('build', 'Info.plist'), version=app_version) app.create(op.join('build', 'Info.plist')) print("Building localizations") build_localizations('cocoa') print("Building xibless UIs") build_cocoalib_xibless() build_xibless() print("Building Python extensions") build_cocoa_proxy_module() build_cocoa_bridging_interfaces() print("Building the cocoa layer") copy_embeddable_python_dylib('build') pydep_folder = op.join(app.resources, 'py') if not op.exists(pydep_folder): os.mkdir(pydep_folder) shutil.copy(op.join(cocoa_project_path, 'dg_cocoa.py'), 'build') tocopy = [ 'core', 'hscommon', 'cocoa/inter', 'cocoalib/cocoa', 'objp', 'send2trash', 'hsaudiotag', ] copy_packages(tocopy, pydep_folder, create_links=dev) sys.path.insert(0, 'build') # ModuleFinder can't seem to correctly detect the multiprocessing dependency, so we have # to manually specify it. extra_deps = ['multiprocessing'] collect_stdlib_dependencies('build/dg_cocoa.py', pydep_folder, extra_deps=extra_deps) del sys.path[0] # Views are not referenced by python code, so they're not found by the collector. copy_all('build/inter/*.so', op.join(pydep_folder, 'inter')) if not dev: # Important: Don't ever run delete_files_with_pattern('*.py') on dev builds because you'll # be deleting all py files in symlinked folders. compileall.compile_dir(pydep_folder, force=True, legacy=True) delete_files_with_pattern(pydep_folder, '*.py') delete_files_with_pattern(pydep_folder, '__pycache__') print("Compiling with WAF") os.chdir('cocoa') print_and_do('{0} waf configure && {0} waf'.format(sys.executable)) os.chdir('..') app.copy_executable('cocoa/build/dupeGuru') build_help() print("Copying resources and frameworks") image_path = 'cocoa/dupeguru.icns' resources = [image_path, 'cocoa/dsa_pub.pem', 'build/dg_cocoa.py', 'build/help'] app.copy_resources(*resources, use_symlinks=dev) app.copy_frameworks('build/Python', sparkle_framework_path) print("Creating the run.py file") tmpl = open('cocoa/run_template.py', 'rt').read() run_contents = tmpl.replace('{{app_path}}', app.dest) open('run.py', 'wt').write(run_contents)
def gen_package_pickled_dic(path, module_name): modules_dic = {} start_path = module_name.replace(".", "/") search_path = os.path.dirname(path) module_dir = os.path.join(search_path, start_path) if os.path.isdir(path): try: compileall.compile_dir(os.path.relpath(module_dir), force=False, quiet=True) except Exception, e: logging.debug('Compilation failed: {}: {}'.format(e, module_dir)) for root, dirs, files in os.walk(module_dir): to_embedd = set() for f in files: base, ext = os.path.splitext(f) if base+'.pyc' in files and not ext in ('.pyc', '.pyo'): continue elif base+'.pyo' in files and not ext == '.pyo': continue else: to_embedd.add(f) for f in to_embedd: module_code="" with open(os.path.join(root,f),'rb') as fd: module_code=fd.read() modprefix = root[len(search_path.rstrip(os.sep))+1:] modpath = os.path.join(modprefix,f).replace("\\","/") modules_dic[modpath]=module_code
def update_installer_dir(code_dir, installer_dir, keep_source=True): print 'Updating code ...' procodile_src = os.path.join(code_dir, r'src\procodile') procodile_dst = os.path.join(installer_dir, r'python\Lib\site-packages\procodile') scripts_src = os.path.join(code_dir, r'src\scripts') scripts_dst = os.path.join(installer_dir, 'scripts') samples_src = os.path.join(code_dir, r'misc\karthik\samples') samples_dst = os.path.join(installer_dir, 'samples') # remove old data in destination dir locations for path in (procodile_dst, scripts_dst, samples_dst): if os.path.exists(path): print ' Removing old code ...' shutil.rmtree(path, onerror=handle_rm_error) # copy from code dir to installer dir print 'Copying new code ...' igfn = _ignore_files shutil.copytree(procodile_src, procodile_dst, ignore=igfn) shutil.copytree(scripts_src, scripts_dst, ignore=igfn) shutil.copytree(samples_src, samples_dst, ignore=igfn) # regenerate .pyc files print 'Compiling source ...' for path in (procodile_dst, scripts_dst, samples_dst): compileall.compile_dir(path, quiet=True) if not keep_source: print 'Removing raw source ...' remove_source(procodile_dst) remove_source(scripts_dst)
def test_error(self): try: orig_stdout = sys.stdout sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii') compileall.compile_dir(self.directory) finally: sys.stdout = orig_stdout
def main(): """ Runs the program, stand alone, from the command line. """ print "\n Building current Project Python Baseline ...\n\n" compileall.compile_dir("./", force=True) print "\n\n The Project Python Baseline is built. \n\n"
def precompile_python_files(self, quiet=False): ''' Precompiles all Python files inside WSPPDE\Python\Lib into byte code ''' compileall.compile_dir( os.path.join(os.path.dirname(sys.executable), "Lib"), force=False, quiet=quiet)
def testModules(self): """Testing all modules by compiling them""" import compileall import re if sys.version_info < (3,0): self.failUnless(compileall.compile_dir('.' + os.sep + 'sleekxmpp', rx=re.compile('/[.]svn'), quiet=True)) else: self.failUnless(compileall.compile_dir('.' + os.sep + 'sleekxmpp', rx=re.compile('/[.]svn|.*26.*'), quiet=True))
def compileModules(self, force=True): """Compile modules in all installed componentes.""" from compileall import compile_dir print print 'Byte compiling all modules...' for comp in self._comps: dir = os.path.abspath(comp['dirname']) compile_dir(dir, force=force, quiet=True)
def run(): dirs = sys.argv[1:] if (len(dirs) == 0): dirs = [os.getcwd()] sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() for dir in dirs: compileall.compile_dir(dir, 100)
def _compile(self): def filter_pyc(parents, name, contents): return name.endswith(u'.pyc') try: # STEP 1: Backup of all `pyc` files within the module information self.log(1, 'backing_up', self.name) files = self._get_resource_paths(filter_pyc) for pyc_file in files: self._backup(pyc_file) self.log(1, 'backing_done', self.name) # STEP 2: Compile the module try: self.log(1, 'compiling', self.name) module_path = get_module_path(self.name) compileall.compile_dir(module_path, True) for pyc_file in files: _logger.warning('Reload %s' % pyc_file) reload(pyc_file) self.log(1, 'compilation_done', self.name) except Exception as ex: # STEP 3: Restore old files if compilation fails self.log(1, 'compilation_fail', self.name) for pyc_file in files: self._backup(pyc_file, restore=True) self.log(1, 'restoring_done', self.name) raise ex # STEP 4: Update modules m_data_domain = [ ('module', '=', self.name), ('model', '=', 'ir.model.data') ] m_data_obj = self.env['ir.model.data'] m_data_set = m_data_obj.search(m_data_domain) ids = [record.res_id for record in m_data_set] model_obj = self.env['ir.model'] model_set = model_obj.browse(ids) for model in model_set: model = self.env['dev.code.tester'] init_module_models(self.env.cr, 'development_tools', model) except Exception as ex: _logger.error('compile %s' % ex)
def exportMysite(original): import compileall old=os.getcwd() compileall.compile_dir(old) zipDir(old, tmpDir()+'/mysite.zip', ['*'], ['.hg','_svn','.svn','setup','photologue','lzo','upload'], ['.*','icdat.db','*.swp','*.py','*.orig','*.zip','options.txt','l.txt','*.sql', '*.7z', '*.doc', 'tftpgui.cfg', 'oracle9', 'author.pyc','authorization.pyc'],[])#'*.log',*.txt'runpool.pyc', 'datacommcenter.pyc', '__init__.pyc' try: os.removedirs(tmpDir()+"/mysite") except: pass unzipFile(tmpDir()+'/mysite.zip', tmpDir()+"/mysite/")
def _compile(): """ Compile sources at VERSIONDIR. This is a separate function because we need to compile sources after we've created the update tarball for automatic updates. """ compileall.compile_dir("neubot/%s" % NUMERIC_VERSION)
print(u'Compiling Python modules (.py) to bytecode (.pyc) ...\n') print(u'Compiling root directory: {}'.format(PATH_app)) for F in os.listdir(PATH_app): if os.path.isfile(F) and F.endswith(u'.py') and F != u'init.py': F = ConcatPaths((PATH_app, F)) compileall.compile_file(F) print for D in os.listdir(PATH_app): D = ConcatPaths((PATH_app, D)) if os.path.isdir(D) and os.path.basename(D) in compile_dirs: print(u'Compiling directory: {}'.format(D)) compileall.compile_dir(D) print sys.exit(0) if u'clean' in parsed_commands: import errno if not os.access(PATH_app, os.W_OK): print(u'ERROR: No write privileges for {}'.format(PATH_app)) sys.exit(errno.EACCES) print(u'Cleaning Python bytecode (.pyc) ...\n') for ROOT, DIRS, FILES in os.walk(PATH_app): for F in FILES:
quit(0) if python_version[0] != int(CompileVersion[0]) or python_version[1] != int( CompileVersion[1]): # or python_version[2] != int(CompileVersion[2]): raise Exception("Targeting Python v%s but Python version is %s" % (CompileVersion, str(python_version))) # Compiled files path_compile_to_version = path_compile_to + '/v' + CompileVersion + "/" # Duplicate folder copy_and_overwrite(path_compile_from, path_compile_to_version) import compileall #compiles all .py files in the project (creates PYC files) compileall.compile_dir(path_compile_to_version) # Remove PY files from compiled directory for root, dirnames, filenames in os.walk(path_compile_to_version): for filename in fnmatch.filter(filenames, '*.py'): if any(s in filename for s in SKIP_COMPILE): print("Skipping compilation of: " + filename) continue fdelete = os.path.join(root, filename) print("Deleting file: " + fdelete) os.remove(fdelete) # Move all PYC files from pycache and rename them as modules (except RoboDK.py) for root, dirnames, filenames in os.walk(path_compile_to_version): for filename in fnmatch.filter(filenames, '*.pyc'):
'No keyring found, please update repos.') if not keyring.is_valid(tardata, sig_data): raise ModuleInstallError('Invalid signature for %s.' % module.name) # Extract module from tarball. if os.path.isdir(module_dir): shutil.rmtree(module_dir) progress.progress(0.7, 'Setting up module...') with closing(tarfile.open('', 'r:gz', StringIO(tardata))) as tar: tar.extractall(self.modules_dir) if not os.path.isdir(module_dir): raise ModuleInstallError('The archive for %s looks invalid.' % module.name) # Precompile compile_dir(module_dir, quiet=True) self.versions.set(module.name, module.version) progress.progress(0.9, 'Downloading icon...') self.retrieve_icon(module) progress.progress(1.0, 'Module %s has been installed!' % module.name) @staticmethod def url2filename(url): """ Get a safe file name for an URL. All non-alphanumeric characters are replaced by _. """
def main(): """Main installer function. """ optLetters = 'hp:d:i:b:sx' try: opts, args = getopt.getopt(sys.argv[1:], optLetters) except getopt.GetoptError: usage(2) global prefixDir global docDir global iconDir global buildRoot global progName depCheck = True translated = True for opt, val in opts: if opt == '-h': usage(0) elif opt == '-p': prefixDir = os.path.abspath(val) elif opt == '-d': docDir = val elif opt == '-i': iconDir = val elif opt == '-b': buildRoot = val elif opt == '-s': translated = False elif opt == '-x': depCheck = False if not os.path.isfile('install.py'): print('Error - {0} files not found'.format(progName)) print('The directory containing "install.py" must be current') sys.exit(4) if (os.path.isdir('source') and not os.path.isfile('source/{0}.py'.format(progName))): print('Error - source files not found') print('Retry the extraction from the tar archive') sys.exit(4) if depCheck: print('Checking dependencies...') pyVersion = sys.version_info[:3] pyVersion = '.'.join([str(num) for num in pyVersion]) if cmpVersions(pyVersion, (3, 4)): print(' Python Version {0} -> OK'.format(pyVersion)) else: print(' Python Version {0} -> Sorry, 3.4 or higher is required' .format(pyVersion)) sys.exit(3) try: from PyQt5 import QtCore, QtWidgets except: print(' PyQt not found -> Sorry, PyQt 5.4 or higher is required' ' and must be built for Python 3') sys.exit(3) qtVersion = QtCore.qVersion() if cmpVersions(qtVersion, (5, 4)): print(' Qt Version {0} -> OK'.format(qtVersion)) else: print(' Qt Version {0} -> Sorry, 5.4 or higher is required' .format(qtVersion)) sys.exit(3) pyqtVersion = QtCore.PYQT_VERSION_STR if cmpVersions(pyqtVersion, (5, 4)): print(' PyQt Version {0} -> OK'.format(pyqtVersion)) else: print(' PyQt Version {0} -> Sorry, 5.4 or higher is required' .format(pyqtVersion)) sys.exit(3) pythonPrefixDir = os.path.join(prefixDir, 'share', progName) pythonBuildDir = os.path.join(buildRoot, pythonPrefixDir[1:]) if os.path.isdir('source'): print('Installing files...') print(' Copying python files to {0}'.format(pythonBuildDir)) removeDir(pythonBuildDir) # remove old? copyDir('source', pythonBuildDir) if os.path.isdir('translations') and translated: translationDir = os.path.join(pythonBuildDir, 'translations') print(' Copying translation files to {0}'.format(translationDir)) copyDir('translations', translationDir) if os.path.isdir('doc'): docPrefixDir = docDir.replace('<prefix>/', '') if not os.path.isabs(docPrefixDir): docPrefixDir = os.path.join(prefixDir, docPrefixDir) docBuildDir = os.path.join(buildRoot, docPrefixDir[1:]) print(' Copying documentation files to {0}'.format(docBuildDir)) copyDir('doc', docBuildDir) if not translated: for name in glob.glob(os.path.join(docBuildDir, '*_[a-z][a-z].html')): os.remove(name) # update help file location in main python script replaceLine(os.path.join(pythonBuildDir, '{0}.py'.format(progName)), 'helpFilePath = None', 'helpFilePath = \'{0}\' # modified by install script\n' .format(docPrefixDir)) if os.path.isdir('data'): dataPrefixDir = os.path.join(prefixDir, 'share', progName, 'data') dataBuildDir = os.path.join(buildRoot, dataPrefixDir[1:]) print(' Copying data files to {0}'.format(dataBuildDir)) removeDir(dataBuildDir) # remove old? copyDir('data', dataBuildDir) if not translated: for name in glob.glob(os.path.join(dataBuildDir, '*_[a-z][a-z].dat')): os.remove(name) # update data file location in main python script replaceLine(os.path.join(pythonBuildDir, '{0}.py'.format(progName)), 'dataFilePath = None', 'dataFilePath = \'{0}\' # modified by install script\n' .format(dataPrefixDir)) if os.path.isdir('icons'): iconPrefixDir = iconDir.replace('<prefix>/', '') if not os.path.isabs(iconPrefixDir): iconPrefixDir = os.path.join(prefixDir, iconPrefixDir) iconBuildDir = os.path.join(buildRoot, iconPrefixDir[1:]) print(' Copying icon files to {0}'.format(iconBuildDir)) copyDir('icons', iconBuildDir) # update icon location in main python script replaceLine(os.path.join(pythonBuildDir, '{0}.py'.format(progName)), 'iconPath = None', 'iconPath = \'{0}\' # modified by install script\n' .format(iconPrefixDir)) if os.path.isfile(os.path.join('icons', progName + '-icon.svg')): svgIconPrefixDir = os.path.join(prefixDir, 'share', 'icons', 'hicolor', 'scalable', 'apps') svgIconBuildDir = os.path.join(buildRoot, svgIconPrefixDir[1:]) print(' Copying app icon files to {0}'.format(svgIconBuildDir)) if not os.path.isdir(svgIconBuildDir): os.makedirs(svgIconBuildDir) shutil.copy2(os.path.join('icons', progName + '-icon.svg'), svgIconBuildDir) if os.path.isfile(progName + '.desktop'): desktopPrefixDir = os.path.join(prefixDir, 'share', 'applications') desktopBuildDir = os.path.join(buildRoot, desktopPrefixDir[1:]) print(' Copying desktop file to {0}'.format(desktopBuildDir)) if not os.path.isdir(desktopBuildDir): os.makedirs(desktopBuildDir) shutil.copy2(progName + '.desktop', desktopBuildDir) if os.path.isdir('source'): createWrapper(pythonPrefixDir, progName) binBuildDir = os.path.join(buildRoot, prefixDir[1:], 'bin') print(' Copying executable file "{0}" to {1}' .format(progName, binBuildDir)) if not os.path.isdir(binBuildDir): os.makedirs(binBuildDir) shutil.copy2(progName, binBuildDir) compileall.compile_dir(pythonBuildDir, ddir=prefixDir) cleanSource() print('Install complete.')
#!/usr/bin/env python import compileall compileall.compile_dir(".") # End of file
def c(base_path: str, **kw: object) -> None: try: kw['invalidation_mode'] = py_compile.PycInvalidationMode.UNCHECKED_HASH except AttributeError: pass compileall.compile_dir(base_path, **kw) # type: ignore
def CompileAll(directory): """Recursively compiles all Python files in directory.""" # directory could contain unicode chars and py_compile chokes on unicode # paths. Using relative paths from within directory works around the problem. with files.ChDir(directory): compileall.compile_dir('.', quiet=True)
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, pycompile=True, scheme=None, isolated=False, prefix=None, strip_file_prefix=None): """Install a wheel""" if not scheme: scheme = distutils_scheme( name, user=user, home=home, root=root, isolated=isolated, prefix=prefix, ) if root_is_purelib(name, wheeldir): lib_dir = scheme['purelib'] else: lib_dir = scheme['platlib'] info_dir = [] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} changed = set() generated = [] # Compile all of the pyc files that we're going to be installing if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) def normpath(src, p): return os.path.relpath(src, p).replace(os.path.sep, '/') def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and # is self.req.project_name case preserving? s.lower().startswith(req.name.replace('-', '_').lower() )): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = (st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) assert info_dir, "%s .dist-info directory not found" % req # Get the defined entry points ep_file = os.path.join(info_dir[0], 'entry_points.txt') console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == 'scripts': fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) maker = ScriptMaker(None, scheme['scripts']) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = set(('', )) # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Simplify the script and fix the fact that the default script swallows # every single stack trace. # See https://bitbucket.org/pypa/distlib/issue/34/ # See https://bitbucket.org/pypa/distlib/issue/33/ def _get_script_text(entry): if entry.suffix is None: raise InstallationError( "Invalid script entry point: %s for req: %s - A callable " "suffix is required. Cf https://packaging.python.org/en/" "latest/distributing.html#console-scripts for more " "information." % (entry, req)) return maker.script_template % { "module": entry.prefix, "import_name": entry.suffix.split(".")[0], "func": entry.suffix, } maker._get_script_text = _get_script_text maker.script_template = """# -*- coding: utf-8 -*- import re import sys from %(module)s import %(import_name)s if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) """ # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadat 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'pip = ' + pip_script generated.extend(maker.make(spec)) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": spec = 'pip%s = %s' % (sys.version[:1], pip_script) generated.extend(maker.make(spec)) spec = 'pip%s = %s' % (sys.version[:3], pip_script) generated.extend(maker.make(spec)) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'easy_install = ' + easy_install_script generated.extend(maker.make(spec)) spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) generated.extend(maker.make(spec)) # Delete any other versioned easy_install entry points easy_install_ep = [ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) ] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel if len(console) > 0: generated.extend( maker.make_multiple(['%s = %s' % kv for kv in console.items()])) if len(gui) > 0: generated.extend( maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True})) # Record pip as the installer installer = os.path.join(info_dir[0], 'INSTALLER') temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') with open(temp_installer, 'wb') as installer_file: installer_file.write(b'pip\n') shutil.move(temp_installer, installer) generated.append(installer) # Record details of all files installed record = os.path.join(info_dir[0], 'RECORD') temp_record = os.path.join(info_dir[0], 'RECORD.pip') with open_for_csv(record, 'r') as record_in: with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) writer = csv.writer(record_out) for row in reader: row[0] = installed.pop(row[0], row[0]) if row[0] in changed: row[1], row[2] = rehash(row[0]) writer.writerow(row) for f in generated: h, l = rehash(f) final_path = normpath(f, lib_dir) if strip_file_prefix and final_path.startswith( strip_file_prefix): final_path = os.path.join( os.sep, os.path.relpath(final_path, strip_file_prefix)) writer.writerow((final_path, h, l)) for f in installed: writer.writerow((installed[f], '', '')) shutil.move(temp_record, record)
def create_package(self, package_name, package_destination, binary_package=False): """ Creates a .sublime-package file from the running Packages directory :param package_name: The package to create a .sublime-package file for :param package_destination: The full filesystem path of the directory to save the new .sublime-package file in. :param binary_package: If the created package should follow the binary package include/ exclude patterns from the settings. These normally include a setup to exclude .py files and include .pyc files, but that can be changed via settings. :return: bool if the package file was successfully created """ package_dir = self.get_package_dir(package_name) if not os.path.exists(package_dir): show_error( u'The folder for the package name specified, %s, does not exist in %s' % (package_name, sublime.packages_path())) return False package_filename = package_name + '.sublime-package' package_path = os.path.join(package_destination, package_filename) if not os.path.exists(sublime.installed_packages_path()): os.mkdir(sublime.installed_packages_path()) if os.path.exists(package_path): os.remove(package_path) try: package_file = zipfile.ZipFile(package_path, "w", compression=zipfile.ZIP_DEFLATED) except (OSError, IOError) as e: show_error( u'An error occurred creating the package file %s in %s.\n\n%s' % (package_filename, package_destination, unicode_from_os(e))) return False if int(sublime.version()) >= 3000: compileall.compile_dir(package_dir, quiet=True, legacy=True, optimize=2) dirs_to_ignore = self.settings.get('dirs_to_ignore', []) if not binary_package: files_to_ignore = self.settings.get('files_to_ignore', []) files_to_include = self.settings.get('files_to_include', []) else: files_to_ignore = self.settings.get('files_to_ignore_binary', []) files_to_include = self.settings.get('files_to_include_binary', []) slash = '\\' if os.name == 'nt' else '/' trailing_package_dir = package_dir + slash if package_dir[ -1] != slash else package_dir package_dir_regex = re.compile('^' + re.escape(trailing_package_dir)) for root, dirs, files in os.walk(package_dir): [dirs.remove(dir_) for dir_ in dirs if dir_ in dirs_to_ignore] paths = dirs paths.extend(files) for path in paths: full_path = os.path.join(root, path) relative_path = re.sub(package_dir_regex, '', full_path) ignore_matches = [ fnmatch(relative_path, p) for p in files_to_ignore ] include_matches = [ fnmatch(relative_path, p) for p in files_to_include ] if any(ignore_matches) and not any(include_matches): continue if os.path.isdir(full_path): continue package_file.write(full_path, relative_path) package_file.close() return True
import compileall import glob def show(title): print(title) for filename in glob.glob("./**", recursive=True): print(' {}'.format(filename)) print() show('Before') compileall.compile_dir('.') show('\nAfter')
def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): compileall.compile_dir(self.path, quiet=True, optimize=optimize, hardlink_dupes=dedup, force=force)
def test_compile_pool_called(self, pool_mock): compileall.compile_dir(self.directory, quiet=True, workers=5) self.assertTrue(pool_mock.called)
import os import time import compileall # import py_compile # py_compile.compile('./NoxConADB.py') compileall.compile_dir('./', maxlevels=0) time.sleep(5) for root, dirs, files in os.walk("./__pycache__", topdown=False): for name in files: if name.find('NoxConSelenium') != -1 or name.find('compile') != -1: os.remove(os.path.join(root, name)) else: os.rename(os.path.join(root, name), os.path.join(root, name).replace('cpython-36.', ''))
from energy import * from contcar_poscar import * from poscar import * from fit_energy_vs_vol import * from strain import * from create_phonon_directories import * from evaluate_manually_Elastic_constants import * from colorama import Fore, Back, Style, init from termcolor import colored from pylab import * import multiprocessing as mp import compileall print("This may take a while!") compileall.compile_dir(".", force=1) Introduction() init(autoreset=True) print(colored('@' * 80, 'red'), end='\n', flush=True) print("Number of processors Detected: ", mp.cpu_count()) print( Back.MAGENTA + ' NB: POSCAR should be in VASP 5 format & without selective dynamics', end='\n', flush=True) print(Style.RESET_ALL) print(colored('-' * 80, 'red'), end='\n', flush=True) print('>>> USAGE: execute by typing python3 sys.argv[0]') print(colored('~' * 80, 'red'), end='\n', flush=True) print("**** Following are the options: ")
import compileall if __name__ == '__main__': compileall.compile_dir( '.', force=True )
def zipup(out_path, in_path, top, exclude=None, prefix=''): zip_file = zipfile.ZipFile(out_path, 'w', compression=zipfile.ZIP_DEFLATED) for path in find(in_path, exclude=exclude)[0]: if not os.path.isdir(path): arcname = prefix + path[len(top):].lstrip('/') print 'Adding %s to %s' % (arcname, out_path) zip_file.write(path, arcname) zip_file.close() pwd = os.getcwd() print 'Installing xmppy.' xmpppy_path = os.path.join(pwd, 'python-libs', 'xmpppy', 'xmpp') compileall.compile_dir(xmpppy_path) shutil.copytree(xmpppy_path, 'output/usr/lib/python2.6/xmpp') print 'Installing BeautifulSoup.' beautifulsoup_path = os.path.join(pwd, 'python-libs','BeautifulSoup') compileall.compile_dir(beautifulsoup_path) shutil.copy(os.path.join(beautifulsoup_path, 'BeautifulSoup.pyc'), 'output/usr/lib/python2.6/BeautifulSoup.pyc') print 'Installing gdata.' gdata_path = os.path.join(pwd, 'python-libs', 'gdata') run('python setup.py build', cwd=gdata_path) gdata_build_path = os.path.join(gdata_path, 'build') gdata_result_path = os.path.join(gdata_build_path, os.listdir(gdata_build_path)[0]) compileall.compile_dir(gdata_result_path)
def test_compile_one_worker(self, compile_file_mock, pool_mock): compileall.compile_dir(self.directory, quiet=True) self.assertFalse(pool_mock.called) self.assertTrue(compile_file_mock.called)
import compileall import os root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) compileall.compile_dir(os.path.join(root, 'src', 'python'), legacy=True, optimize=2) with open(os.path.join(root, 'src', 'resources.qrc'), 'r+b') as f: data = f.read() data = data.replace(b'.py<', b'.pyc<') f.seek(0) f.write(data)
info_dir = [] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} changed = set() generated = [] # Compile all of the pyc files that we're going to be installing if pycompile: compileall.compile_dir(source, force=True, quiet=True) def normpath(src, p): return make_path_relative(src, p).replace(os.path.sep, '/') def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): if not os.path.exists(dest): # common for the 'include' path os.makedirs(dest)
def test_compile_workers_cpu_count(self, pool_mock): compileall.compile_dir(self.directory, quiet=True, workers=0) self.assertEqual(pool_mock.call_args[1]['max_workers'], None)
import Pmw version = Pmw._version if (version == '1.2') : installPMW = 0 print "Pmw %s is installed. " % version except ImportError: pass if ((installPMW == 1) and (setup_arg == 'install')): pmw_dir = os.path.join(pkg_site, 'Pmw') print " Installing Pmw 1.2 to %s " % pmw_dir os.system('gunzip -c Pmw.1.2.tar.gz | tar xvf -') os.system("cp -pr Pmw %s " % pkg_site) os.system("cp Pmw.pth %s" % os.path.join(pkg_site,'Pmw.pth')) import compileall compileall.compile_dir(pmw_dir) # remove any old Ifeffit.py installations if (setup_arg == 'install'): old_files = ('ifeffitc.so', 'Ifeffit.py', 'Ifeffit.pyc', 'GIFeffit.py', 'GIFeffit.pyc', 'TkIfeffit.py', 'TkIfeffit.pyc') for i in old_files: os.system("rm -f %s/%s" % (pkg_site,i)) version = '1.0c' # # include and parse the data from site_install from site_install import * def key_parse(s,k):
def install_unpacked_wheel( name, # type: str wheeldir, # type: str wheel_zip, # type: ZipFile scheme, # type: Scheme req_description, # type: str pycompile=True, # type: bool warn_script_location=True, # type: bool direct_url=None, # type: Optional[DirectUrl] ): # type: (...) -> None """Install a wheel. :param name: Name of the project to install :param wheeldir: Base directory of the unpacked wheel :param wheel_zip: open ZipFile for wheel being installed :param scheme: Distutils scheme dictating the install directories :param req_description: String used in place of the requirement, for logging :param pycompile: Whether to byte-compile installed Python files :param warn_script_location: Whether to check that scripts are installed into a directory on PATH :raises UnsupportedWheel: * when the directory holds an unpacked wheel with incompatible Wheel-Version * when the .dist-info dir does not match the wheel """ # TODO: Investigate and break this up. # TODO: Look into moving this into a dedicated class for representing an # installation. source = wheeldir.rstrip(os.path.sep) + os.path.sep info_dir, metadata = parse_wheel(wheel_zip, name) if wheel_root_is_purelib(metadata): lib_dir = scheme.purelib else: lib_dir = scheme.platlib subdirs = os.listdir(source) data_dirs = [s for s in subdirs if s.endswith('.data')] # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} # type: Dict[str, str] changed = set() generated = [] # type: List[str] # Compile all of the pyc files that we're going to be installing if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) def record_installed(srcfile, destfile, modified=False): # type: (str, str, bool) -> None """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber( source, # type: str dest, # type: str is_base, # type: bool fixer=None, # type: Optional[Callable[[str], Any]] filter=None # type: Optional[Callable[[str], bool]] ): # type: (...) -> None ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir == '': subdirs[:] = [s for s in subdirs if not s.endswith('.data')] for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # copyfile (called below) truncates the destination if it # exists and then writes the new contents. This is fine in most # cases, but can cause a segfault if pip has loaded a shared # object (e.g. from pyopenssl through its vendored urllib3) # Since the shared object is mmap'd an attempt to call a # symbol in it will then cause a segfault. Unlinking the file # allows writing of new contents while allowing the process to # continue to use the old copy. if os.path.exists(destfile): os.unlink(destfile) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = (st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) dest_info_dir = os.path.join(lib_dir, info_dir) # Get the defined entry points ep_file = os.path.join(dest_info_dir, 'entry_points.txt') console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # type: (str) -> bool # EP, EP.exe and EP-script.py.mako are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py.mako'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == 'scripts': fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = getattr(scheme, subdir) clobber(source, dest, False, fixer=fixer, filter=filter) maker = PipScriptMaker(None, scheme.scripts) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = {''} # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True scripts_to_generate = [] # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadata 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: scripts_to_generate.append('pip = ' + pip_script) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": scripts_to_generate.append('pip{} = {}'.format( sys.version_info[0], pip_script)) scripts_to_generate.append('pip{} = {}'.format( get_major_minor_version(), pip_script)) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: scripts_to_generate.append('easy_install = ' + easy_install_script) scripts_to_generate.append('easy_install-{} = {}'.format( get_major_minor_version(), easy_install_script)) # Delete any other versioned easy_install entry points easy_install_ep = [ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) ] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel scripts_to_generate.extend(starmap('{} = {}'.format, console.items())) gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) generated_console_scripts = [] # type: List[str] try: generated_console_scripts = maker.make_multiple(scripts_to_generate) generated.extend(generated_console_scripts) generated.extend( maker.make_multiple(gui_scripts_to_generate, {'gui': True})) except MissingCallableSuffix as e: entry = e.args[0] raise InstallationError( "Invalid script entry point: {} for req: {} - A callable " "suffix is required. Cf https://packaging.python.org/" "specifications/entry-points/#use-for-scripts for more " "information.".format(entry, req_description)) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) generated_file_mode = 0o666 - current_umask() @contextlib.contextmanager def _generate_file(path, **kwargs): # type: (str, **Any) -> Iterator[NamedTemporaryFileResult] with adjacent_tmp_file(path, **kwargs) as f: yield f os.chmod(f.name, generated_file_mode) replace(f.name, path) # Record pip as the installer installer_path = os.path.join(dest_info_dir, 'INSTALLER') with _generate_file(installer_path) as installer_file: installer_file.write(b'pip\n') generated.append(installer_path) # Record the PEP 610 direct URL reference if direct_url is not None: direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) with _generate_file(direct_url_path) as direct_url_file: direct_url_file.write(direct_url.to_json().encode("utf-8")) generated.append(direct_url_path) # Record details of all files installed record_path = os.path.join(dest_info_dir, 'RECORD') with open(record_path, **csv_io_kwargs('r')) as record_file: rows = get_csv_rows_for_installed(csv.reader(record_file), installed=installed, changed=changed, generated=generated, lib_dir=lib_dir) with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: writer = csv.writer(record_file) writer.writerows(sorted_outrows(rows)) # sort to simplify testing
def main(argv): """ The main function of the script. @param argv the list of command line arguments. """ import getopt # Parse the command line. global progName, modDir, doCleanup, doCompile, distDir global sourceDir if sys.version_info < (2, 7, 0) or sys.version_info > (3, 9, 9): print('Sorry, eric6 requires at least Python 2.7 or ' 'Python 3 for running.') exit(5) progName = os.path.basename(argv[0]) if os.path.dirname(argv[0]): os.chdir(os.path.dirname(argv[0])) initGlobals() try: if sys.platform.startswith("win"): optlist, args = getopt.getopt( argv[1:], "chzd:", ["help"]) elif sys.platform == "darwin": optlist, args = getopt.getopt( argv[1:], "chzd:i:", ["help"]) else: optlist, args = getopt.getopt( argv[1:], "chzd:i:", ["help"]) except getopt.GetoptError as err: print(err) usage() for opt, arg in optlist: if opt in ["-h", "--help"]: usage(0) elif opt == "-d": modDir = arg elif opt == "-i": distDir = os.path.normpath(arg) elif opt == "-c": doCleanup = False elif opt == "-z": doCompile = False installFromSource = not os.path.isdir(sourceDir) if installFromSource: sourceDir = os.path.dirname(__file__) or "." # cleanup source if installing from source if installFromSource: print("Cleaning up source ...") cleanupSource(os.path.join(sourceDir, "DebugClients")) print() # cleanup old installation try: if doCleanup: print("Cleaning up old installation ...") if distDir: shutil.rmtree(distDir, True) else: cleanUp() except (IOError, OSError) as msg: sys.stderr.write('Error: {0}\nTry install as root.\n'.format(msg)) exit(7) if doCompile: print("\nCompiling source files ...") if sys.version_info[0] == 3: skipRe = re.compile(r"DebugClients[\\/]Python[\\/]") else: skipRe = re.compile(r"DebugClients[\\/]Python3[\\/]") # Hide compile errors (mainly because of Py2/Py3 differences) sys.stdout = io.StringIO() if distDir: compileall.compile_dir( os.path.join(sourceDir, "DebugClients"), ddir=os.path.join(distDir, modDir, installPackage), rx=skipRe, quiet=True) else: compileall.compile_dir( os.path.join(sourceDir, "DebugClients"), ddir=os.path.join(modDir, installPackage), rx=skipRe, quiet=True) sys.stdout = sys.__stdout__ print("\nInstalling eric6 debug clients ...") res = installEricDebugClients() print("\nInstallation complete.") print() exit(res)
def test_compile_missing_multiprocessing(self, compile_file_mock): compileall.compile_dir(self.directory, quiet=True, workers=5) self.assertTrue(compile_file_mock.called)
#!/usr/bin/env python3 import sys from compileall import compile_dir from os import environ, path from subprocess import call if not environ.get('DESTDIR', ''): PREFIX = environ.get('MESON_INSTALL_PREFIX', '/usr/local') DATA_DIR = path.join(PREFIX, 'share') print('Updating icon cache...') call([ 'gtk-update-icon-cache', '-qtf', path.join(DATA_DIR, 'icons', 'hicolor') ]) print("Compiling new schemas") call(["glib-compile-schemas", path.join(DATA_DIR, 'glib-2.0', 'schemas')]) print("Updating desktop database") call(["update-desktop-database", path.join(DATA_DIR, 'applications')]) # Byte-compilation is enabled by passing the site-packages path to this script if len(sys.argv) > 1: print('Byte-compiling Python module...') python_source_install_path = sys.argv[1] compile_dir(path.join(python_source_install_path, 'meld'), optimize=1)
# Create a directory 'build' with the source code and data import compileall, os, shutil, glob path_installation = "/".join( os.path.dirname(os.path.abspath(__file__)).split(os.sep)[:-2]) path_source = os.path.join(path_installation, "src") path_data = os.path.join(path_installation, "data") path_build = os.path.join(path_installation, "build") if os.path.exists(path_build): shutil.rmtree(path_build) os.makedirs(path_build) compileall.compile_dir(path_source, force=True) files = glob.iglob(os.path.join(path_source, "*.pyc")) for file in files: shutil.move(file, path_build) shutil.copytree(path_data, path_build + "/data") os.remove(path_build + "/TranslationServer.pyc")
def generate_pyc(self): compileall.compile_dir(self.dir_name)
#!/usr/bin/env python import compileall import re compileall.compile_dir('lib', rx=re.compile('/[.]svn'), force=True)
import compileall compileall.compile_dir("persimmon", force=1)
def test_compile_workers_non_positive(self): with self.assertRaisesRegex(ValueError, "workers must be greater or equal to 0"): compileall.compile_dir(self.directory, workers=-1)