def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support if UID_GID_SUPPORT: group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] else: group = owner = 'root' base_dir, root_dir, base_name = self._create_files() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh') self.assertTrue(os.path.exists(res))
def run(self): self.run_command('build_exe') build_dir = self.get_finalized_command("build_exe").build_exe base_name = os.path.join(self.dist_dir, "pymontecarlo-%s" % platform.machine()) make_archive(base_name, 'zip', build_dir)
def bdist(dirName, archiveName, archiveTypes=['zip'], excludeAllDirs=[], excludeDirs=[], excludeAllFiles=[], excludeFiles=[], binaries=[], excludeExtensions=[]): excludeDirs.append('build') excludeDirs.append('dist') import os from distutils.errors import DistutilsExecError from distutils.spawn import spawn from distutils.dir_util import mkpath from distutils.archive_util import make_tarball,make_zipfile,ARCHIVE_FORMATS,check_archive_formats, make_archive options = { 'binaries':binaries, 'excludeAllDirs':excludeAllDirs, 'excludeDirs':excludeDirs, 'excludeAllFiles':excludeAllFiles, 'excludeFiles':excludeFiles, 'excludeExtensions':excludeExtensions, } print "Building a new binary distribution..." if os.path.exists('build'): print "Removing old builds..." shutil.rmtree('build') print "Copying directories..." os.mkdir('build') copytree('./','build/'+dirName, options) zip_filename = '../dist/'+archiveName+'.zip' if not os.path.exists('dist'): print "Creating dist directory..." os.mkdir('dist') if os.path.exists(zip_filename): print "Removing old version..." os.remove(zip_filename) os.chdir('build') type2ext = { 'zip':'.zip', 'gztar':'.tar.gz', 'bztar':'.tar.bz2', 'ztar':'.tar.Z', 'tar':'.tar', } print '\n' for atype in archiveTypes: try: make_archive(archiveName, atype, root_dir=None, base_dir=dirName,verbose=0, dry_run=0) except: print "COMPRESSION FAILED", atype, "archive:", str(sys.exc_info()[1]) else: if os.path.exists(archiveName+type2ext[atype]): if os.path.exists('../dist/'+archiveName+type2ext[atype]): print "Removing old distribution..." os.remove('../dist/'+archiveName+type2ext[atype]) os.rename(archiveName+type2ext[atype], '../dist/'+archiveName+type2ext[atype]) print "%s %s distribution created successfully."%(dirName, atype) else: print "MOVE FAILED: %s %s."%(dirName, atype)
def test_make_archive_cwd(self): current_dir = os.getcwd() def _breaks(*args, **kw): raise RuntimeError() ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) except: pass self.assertEqual(os.getcwd(), current_dir) finally: del ARCHIVE_FORMATS['xxx']
def test_make_archive_owner_group(self): if UID_GID_SUPPORT: group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] else: group = owner = 'root' base_dir, root_dir, base_name = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner=owner, group=group) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh') self.assertTrue(os.path.exists(res))
def test_make_archive_xztar(self): base_dir = self._create_files() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'xztar', base_dir, 'dist') self.assertTrue(os.path.exists(res)) self.assertEqual(os.path.basename(res), 'archive.tar.xz') self.assertEqual(self._tarinfo(res), self._created_files)
def run(self): from distutils.archive_util import make_archive archived = make_archive(self.app_name, "zip", self.dist_dir) package = os.path.join("dist", self.app_name + ".zip") if os.path.exists(package): os.unlink(package) shutil.move(archived, package)
def testUserProfile(self): """Test starting WebDriver session with custom profile.""" # Open a new session and save the user profile. profile_dir = tempfile.mkdtemp() capabilities = {'chrome.switches': ['--user-data-dir=' + profile_dir]} driver = self.GetNewDriver(capabilities) driver.get(self.GetTestDataUrl() + '/test_page.html') # Create a cookie. cookie_dict = {} cookie_dict['name'] = 'test_user_profile' cookie_dict['value'] = 'chrome profile' cookie_dict['expiry'] = time.time() + 120 driver.add_cookie(cookie_dict) driver.quit() profile_zip = archive_util.make_archive(os.path.join(profile_dir, 'profile'), 'zip', root_dir=profile_dir, base_dir='Default') f = open(profile_zip, 'rb') base64_user_profile = binascii.b2a_base64(f.read()).strip() f.close() os.remove(profile_zip) # Start new session with the saved user profile. capabilities = {'chrome.profile': base64_user_profile} driver = self.GetNewDriver(capabilities) driver.get(self.GetTestDataUrl() + '/test_page.html') cookie_dict = driver.get_cookie('test_user_profile') self.assertNotEqual(cookie_dict, None) self.assertEqual(cookie_dict['value'], 'chrome profile') driver.quit()
def run(self): py2exe.build_exe.py2exe.run(self) if self.zip: archive_name = make_archive( os.path.join(self.dist_dir, self.base_name), 'zip', self.base_dir, self.base_name) shutil.rmtree(self.archive_dir)
def egg2wheel(egg_path, dest_dir): egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict() egg = zipfile.ZipFile(egg_path) dir = tempfile.mkdtemp(suffix="_e2w") egg.extractall(dir) dist_info = "%s-%s" % (egg_info['name'], egg_info['ver']) abi = 'none' pyver = egg_info['pyver'].replace('.', '') arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') if arch != 'any': # assume all binary eggs are for CPython pyver = 'cp' + pyver[2:] wheel_name = '-'.join(( dist_info, pyver, abi, arch )) bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) bw.root_is_purelib = egg_info['arch'] is None dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='egg2wheel') bw.write_record(dir, dist_info_dir) filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir) os.rename(filename, filename[:-3] + 'whl') rmtree(dir)
def create_a_new_site_archive(root=site_root): print "\n...archiving entire website" archive_name = strftime(archive_prefix + "%Y%m%d_%H%M%S", gmtime(time())) archive_path = make_archive(archive_name, "gztar", site_root, ".") print "[COMPLETED] hjb website is archived at: ", archive_path return archive_path
def egg2wheel(egg_path, dest_dir): filename = os.path.basename(egg_path) match = egg_info_re.match(filename) if not match: raise WheelError('Invalid egg file name: {}'.format(filename)) egg_info = match.groupdict() dir = tempfile.mkdtemp(suffix="_e2w") if os.path.isfile(egg_path): # assume we have a bdist_egg otherwise egg = zipfile.ZipFile(egg_path) egg.extractall(dir) else: # support buildout-style installed eggs directories for pth in os.listdir(egg_path): src = os.path.join(egg_path, pth) if os.path.isfile(src): shutil.copy2(src, dir) else: shutil.copytree(src, os.path.join(dir, pth)) pyver = egg_info['pyver'] if pyver: pyver = pyver.replace('.', '') arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') # assume all binary eggs are for CPython abi = 'cp' + pyver[2:] if arch != 'any' else 'none' root_is_purelib = egg_info['arch'] is None if root_is_purelib: bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) else: bw = _bdist_wheel_tag(distutils.dist.Distribution()) bw.root_is_pure = root_is_purelib bw.python_tag = pyver bw.plat_name_supplied = True bw.plat_name = egg_info['arch'] or 'any' if not root_is_purelib: bw.full_tag_supplied = True bw.full_tag = (pyver, abi, arch) dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info)) bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='egg2wheel') bw.write_record(dir, dist_info_dir) wheel_name = '{name}-{ver}-{pyver}-{}-{}'.format(abi, arch, **egg_info) filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir) os.rename(filename, filename[:-3] + 'whl') shutil.rmtree(dir)
def egg2wheel(egg_path, dest_dir): egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict() dir = tempfile.mkdtemp(suffix="_e2w") if os.path.isfile(egg_path): # assume we have a bdist_egg otherwise egg = zipfile.ZipFile(egg_path) egg.extractall(dir) else: # support buildout-style installed eggs directories for pth in os.listdir(egg_path): src = os.path.join(egg_path, pth) if os.path.isfile(src): shutil.copy2(src, dir) else: shutil.copytree(src, os.path.join(dir, pth)) dist_info = "%s-%s" % (egg_info['name'], egg_info['ver']) abi = 'none' pyver = egg_info['pyver'].replace('.', '') arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') if arch != 'any': # assume all binary eggs are for CPython pyver = 'cp' + pyver[2:] wheel_name = '-'.join(( dist_info, pyver, abi, arch )) root_is_purelib = egg_info['arch'] is None if root_is_purelib: bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) else: bw = _bdist_wheel_tag(distutils.dist.Distribution()) bw.root_is_pure = root_is_purelib bw.python_tag = pyver bw.plat_name_supplied = True bw.plat_name = egg_info['arch'] or 'any' if not root_is_purelib: bw.full_tag_supplied = True bw.full_tag = (pyver, abi, arch) dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='egg2wheel') bw.write_record(dir, dist_info_dir) filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir) os.rename(filename, filename[:-3] + 'whl') shutil.rmtree(dir)
def make_archive(self, base_name, format, root_dir=None, base_dir=None): return archive_util.make_archive(base_name, format, root_dir, base_dir, dry_run=self.dry_run)
"""distutils.cmd
def zip(zname,bname,form): cpath = getcwd() chdir(path.dirname(bname)) zipfile = make_archive(zname,form,base_dir=path.basename(bname)) chdir(cpath) return zipfile
def make_archive(self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None): return archive_util.make_archive( base_name, format, root_dir, base_dir, dry_run=self.dry_run, owner=owner, group=group )
def build_binary_dist_helper(self, requirement): """ Convert a single, unpacked source distribution to a binary distribution. Raises an exception if it fails to create the binary distribution (probably because of missing binary dependencies like system libraries). :param requirement: A :py:class:`.Requirement` object. :returns: The pathname of the resulting binary distribution (a string). :raises: :py:exc:`.BuildFailed` when the build reports an error. :raises: :py:exc:`.NoBuildOutput` when the build does not produce the expected binary distribution archive. """ build_timer = Timer() # Make sure the source distribution contains a setup script. setup_script = os.path.join(requirement.source_directory, 'setup.py') if not os.path.isfile(setup_script): msg = "Directory %s (%s %s) doesn't contain a source distribution!" raise InvalidSourceDistribution( msg % (requirement.source_directory, requirement.name, requirement.version)) # Let the user know what's going on. build_text = "Building %s (%s) binary distribution" % ( requirement.name, requirement.version) logger.info("%s ..", build_text) # Cleanup previously generated distributions. dist_directory = os.path.join(requirement.source_directory, 'dist') if os.path.isdir(dist_directory): logger.debug( "Cleaning up previously generated distributions in %s ..", dist_directory) shutil.rmtree(dist_directory) makedirs(dist_directory) # Create a temporary directory for pip installing into, and set up the # install_lib directory structure inside it. We do this so that we can # pip install into this as our target. temporary_dir = tempfile.mkdtemp() distutils_inst = install(Distribution()) distutils_inst.prefix = '' # This will be changed if we're in a virtualenv. distutils_inst.finalize_options() pip_target = os.path.normpath(temporary_dir + distutils_inst.install_lib) # Compose the command line needed to build the binary distribution. command_line = ' '.join( pipes.quote(t) for t in ['pip', 'install', '.', '--target', pip_target]) logger.debug("Executing external command: %s", command_line) # Redirect all output of the build to a temporary file. fd, temporary_file = tempfile.mkstemp() command_line = '%s > "%s" 2>&1' % (command_line, temporary_file) try: # Start the build. build = subprocess.Popen(['sh', '-c', command_line], cwd=requirement.source_directory) # Wait for the build to finish and provide feedback to the user in the mean time. spinner = Spinner(label=build_text, timer=build_timer) while build.poll() is None: spinner.step() # Don't tax the CPU too much. time.sleep(0.2) spinner.clear() # At this point, we may have a number of dependencies in the directory we want # to tar up that should not be part of the package distribution. For instance, # s3 will also wrap up the concurrent, futures, and requests packages. We fix # this by reading {name}-{version}-{py-version}.egg-info/installed-files.txt # and removing any files or directories that are not in it. # 1. Find the appropriate .egg-info/ directory. egg_info_dir = None egg_info_start = '-'.join([requirement.name, requirement.version]) for egg_info_root, dirs, _ in os.walk(temporary_dir): for d in dirs: if d.startswith(egg_info_start) and d.endswith( '.egg-info'): egg_info_dir = d break if egg_info_dir is not None: break # 2. If we have a .egg-info/, try to read the installed-files.txt contents. inst_files = set() if egg_info_dir is not None: egg_info_path = os.path.join(egg_info_root, egg_info_dir) inst_files_path = os.path.join(egg_info_path, 'installed-files.txt') try: with open(inst_files_path) as f: for line in f: abs_path = os.path.abspath( os.path.join(egg_info_path, line.strip())) inst_files.add(abs_path) inst_files.add(os.path.dirname(abs_path)) except IOError, ioe: loger.warn('Unable to open %s: %s' % (inst_files_path, ioe)) # 3. If we were able to get a set of files and directories that belong in the # distribution, then we can delete everything else before archiving it. if inst_files: dirs, files = next(os.walk(egg_info_root))[1:] for d in dirs: d = os.path.abspath(os.path.join(egg_info_root, d)) if d not in inst_files: logger.info('Removing %s (not part of the package)' % d) shutil.rmtree(d) for f in files: f = os.path.abspath(os.path.join(egg_info_root, f)) if f not in inst_files: logger.info('Removing %s (not part of the package)' % f) os.unlink(f) # Tar up the contents of temporary_dir into the correct file name and put it in the dist dir. tarball_path = os.path.join(temporary_dir, requirement.name) path = archive_util.make_archive(tarball_path, 'gztar', root_dir=temporary_dir) shutil.copy(path, dist_directory) # Make sure the build succeeded and produced a binary distribution archive. try: # If the build reported an error we'll try to provide the user with # some hints about what went wrong. if build.returncode != 0: raise BuildFailed( "Failed to build {name} ({version}) binary distribution!", name=requirement.name, version=requirement.version) # Check if the build created the `dist' directory (the os.listdir() # call below will raise an exception if we don't check for this). if not os.path.isdir(dist_directory): raise NoBuildOutput( "Build of {name} ({version}) did not produce a binary distribution archive!", name=requirement.name, version=requirement.version) # Check if we can find the binary distribution archive. filenames = os.listdir(dist_directory) if len(filenames) != 1: raise NoBuildOutput( "Build of {name} ({version}) produced more than one distribution archive! (matches: {filenames})", name=requirement.name, version=requirement.version, filenames=concatenate(sorted(filenames))) except Exception as e: # Decorate the exception with the output of the failed build. with open(temporary_file) as handle: build_output = handle.read() enhanced_message = compact(""" {message} Please check the build output because it will probably provide a hint about what went wrong. Build output: {output} """, message=e.args[0], output=build_output.strip()) e.args = (enhanced_message, ) raise logger.info("Finished building %s (%s) in %s.", requirement.name, requirement.version, build_timer) return os.path.join(dist_directory, filenames[0])
# but rather bundle our libs as data_files. if nm.endswith("py%d.%d.egg" % sys.version_info[:2]): if sys.platform == "win32": platform = "win32" elif sys.platform == "darwin": platform = "macosx-10.4-universal" else: continue newname = nm.rsplit(".", 1)[0] + "-" + platform + ".egg" newpath = os.path.join(dist_dir, newname) if os.path.exists(newpath): os.unlink(newpath) os.rename(os.path.join(dist_dir, nm), newpath) # Rename any mpkgs to give better platform info, and zip them up # for easy uploading to PyPI. elif nm.endswith(".mpkg"): if sys.platform != "darwin": continue platform = "macosx-10.4-universal" if platform in nm: continue newname = nm.rsplit("macosx", 1)[0] + platform + ".mpkg" newpath = os.path.join(dist_dir, newname) if os.path.exists(newpath): shutil.rmtree(newpath) os.rename(os.path.join(dist_dir, nm), newpath) if os.path.exists(newpath + ".zip"): os.unlink(newpath + ".zip") make_archive(newpath, "zip", dist_dir, newname) shutil.rmtree(newpath)
if nm.endswith("py%d.%d.egg" % sys.version_info[:2]): if sys.platform == "win32": platform = "win32" elif sys.platform == "darwin": platform = "macosx-10.4-universal" else: continue newname = nm.rsplit(".",1)[0] + "-" + platform + ".egg" newpath = os.path.join(dist_dir,newname) if os.path.exists(newpath): os.unlink(newpath) os.rename(os.path.join(dist_dir,nm),newpath) # Rename any mpkgs to give better platform info, and zip them up # for easy uploading to PyPI. elif nm.endswith(".mpkg"): if sys.platform != "darwin": continue platform = "macosx-10.4-universal" if platform in nm: continue newname = nm.rsplit("macosx",1)[0] + platform + ".mpkg" newpath = os.path.join(dist_dir,newname) if os.path.exists(newpath): shutil.rmtree(newpath) os.rename(os.path.join(dist_dir,nm),newpath) if os.path.exists(newpath+".zip"): os.unlink(newpath+".zip") make_archive(newpath,"zip",dist_dir,newname) shutil.rmtree(newpath)
def archive_site(root=site_root): make_archive(strftime("../hjb_web_site_%Y%m%d_%H%M%S", gmtime(time())), "gztar", abspath(site_root), ".")
move_file(os.path.join(DIST_DOCTMP_DIR, "README.html"), DIST_DIRECTORY) print("\nDeleting doctmp directory\n") remove_tree(DIST_DOCTMP_DIR) print("\nRunning setup.py sdist\n") run_setup(SETUP_PY, ["sdist", "--format", "zip", "--dist-dir", DIST_LIB_DIRECTORY]) print("\nRunning setup.py bdist_wheel\n") run_setup(SETUP_PY, ["bdist_wheel", "--dist-dir", DIST_LIB_DIRECTORY, "--universal"]) print("\nCopying sample into dist directory\n") copy_tree(os.path.join(DIST_PY_FILE_LOCATION, "sample"), SAMPLE_RELEASE_DIR) print("\nCopying dist to " + DIST_RELEASE_DIR + "\n") copy_tree(DIST_DIRECTORY, DIST_RELEASE_DIR) print("\nRemoving build directory\n") remove_tree(os.path.join(DIST_PY_FILE_LOCATION, "build")) print("\nRemoving dxlmaxmindclient.egg-info\n") remove_tree(os.path.join(DIST_PY_FILE_LOCATION, "dxlmaxmindclient.egg-info")) print("\nMaking dist zip\n") make_archive(DIST_RELEASE_DIR, "zip", DIST_DIRECTORY, RELEASE_NAME) print("\nRemoving " + DIST_RELEASE_DIR + "\n") remove_tree(DIST_RELEASE_DIR)
verbose=1) # remove subversion directories (*//.svn) files = os.walk(os.path.join(targetdocdir)) dirstodelete = [] for dirpath, dn, fn in files: if '.svn' in dirpath[-4:] and \ dirpath not in dirstodelete: dirstodelete.append(dirpath) for dir in dirstodelete: dir_util.remove_tree(dir) # insert sourceforge pyscard logo in html pages files = os.walk(os.path.join(targetdocdir)) htmlfiles = [] for dirpath, dn, filenames in files: for filename in filenames: root, ext = os.path.splitext(filename) if '.html' == ext: htmlfiles.append(os.path.join(dirpath, filename)) for file in htmlfiles: insertSourceForgeLogo(file) else: print "Can't find " + smartcardpackagedir # zip documentation make_archive( os.path.join(targetgenerateddir, 'pyscard-doc'), 'zip', root_dir=targetgenerateddir, base_dir='doc')
def make_archive(self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None): return archive_util.make_archive(base_name, format, root_dir, base_dir, dry_run=self.dry_run, owner=owner, group=group)
run_setup(SETUP_PY, ["bdist_wheel", "--dist-dir", DIST_LIB_DIRECTORY, "--python-tag", "py2.7"]) print("\nCopying config into dist directory\n") copy_tree(os.path.join(DIST_PY_FILE_LOCATION, "config"), DIST_CONFIG_DIRECTORY) print("\nCopying sample into dist directory\n") copy_tree(os.path.join(DIST_PY_FILE_LOCATION, "sample"), SAMPLE_RELEASE_DIR) print("\nCopying dist to " + DIST_RELEASE_DIR + "\n") copy_tree(DIST_DIRECTORY, DIST_RELEASE_DIR) print("\nRemoving build directory\n") remove_tree(os.path.join(DIST_PY_FILE_LOCATION, "build")) print("\nRemoving dxlvtapiservice.egg-info\n") remove_tree(os.path.join(DIST_PY_FILE_LOCATION, "dxlvtapiservice.egg-info")) print("\nMaking dist zip\n") make_archive(DIST_RELEASE_DIR, "zip", DIST_DIRECTORY, RELEASE_NAME) print("\nMaking dist config zip\n") make_archive(CONFIG_RELEASE_DIR, "zip", os.path.join(DIST_RELEASE_DIR, "config")) print("\nRemoving " + DIST_RELEASE_DIR + "\n") remove_tree(DIST_RELEASE_DIR)