def ExtractInstaller(installer, outdir): '''Extract the SDK installer into a given directory If the outdir already exists, then this function deletes it Args: installer: full path of the SDK installer outdir: output directory where to extract the installer Raises: CalledProcessError - if the extract operation fails''' RemoveDir(outdir) if os.path.splitext(installer)[1] == '.exe': # If the installer has extension 'exe', assume it's a Windows NSIS-style # installer that handles silent (/S) and relocated (/D) installs. command = [installer, '/S', '/D=%s' % outdir] subprocess.check_call(command) else: os.mkdir(outdir) tar_file = None curpath = os.getcwd() try: tar_file = cygtar.CygTar(installer, 'r', verbose=True) if outdir: os.chdir(outdir) tar_file.Extract() finally: if tar_file: tar_file.Close() os.chdir(curpath)
def ExtractArchive(self, archive, extract_dir, rename_from_dir, rename_to_dir): tar_file = None archive_path = os.path.join(self.user_data_dir, archive) extract_path = os.path.join(self.install_dir, extract_dir) rename_from_path = os.path.join(self.install_dir, rename_from_dir) rename_to_path = os.path.join(self.install_dir, rename_to_dir) # Extract to extract_dir, usually "<bundle name>_update". # This way if the extraction fails, we haven't blown away the old bundle # (if it exists). sdk_update_common.RemoveDir(extract_path) sdk_update_common.MakeDirs(extract_path) curpath = os.getcwd() tar_file = None try: try: tar_file = cygtar.CygTar(archive_path, 'r', verbose=True) except Exception as e: raise Error('Can\'t open archive "%s".\n %s' % (archive_path, e)) try: logging.info('Changing the directory to %s' % (extract_path, )) os.chdir(extract_path) except Exception as e: raise Error('Unable to chdir into "%s".\n %s' % (extract_path, e)) logging.info('Extracting to %s' % (extract_path, )) tar_file.Extract() logging.info('Changing the directory to %s' % (curpath, )) os.chdir(curpath) logging.info('Renaming %s->%s' % (rename_from_path, rename_to_path)) sdk_update_common.RenameDir(rename_from_path, rename_to_path) finally: # Change the directory back so we can remove the update directory. os.chdir(curpath) # Clean up the ..._update directory. try: sdk_update_common.RemoveDir(extract_path) except Exception as e: logging.error('Failed to remove directory \"%s\". %s' % (extract_path, e)) if tar_file: tar_file.Close() # Remove the archive. os.remove(archive_path)
def Untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: webports.Log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.Extract() except Exception, err: raise webports.Error('Error unpacking %s' % str(err)) finally:
def Untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: print 'Unpacking tarball...' tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.Extract() except Exception, err: ErrorOut('Error unpacking %s' % str(err)) finally:
def _DoFillEmptyTarsCmd(arguments): package_target_packages = GetPackageTargetPackages( arguments.fillemptytars_package, arguments.package_target_packages ) if not package_target_packages: raise NameError('Unknown package: %s.' % arguments.fillemptytars_package + ' Did you forget to add "$PACKAGE_TARGET/"?') for package_target, package_name in package_target_packages: package_path = package_locations.GetLocalPackageFile(arguments.tar_dir, package_target, package_name) package_desc = package_info.PackageInfo(package_path, skip_missing=True) output_package_desc = package_info.PackageInfo() for archive in package_desc.GetArchiveList(): # If archive does not exist, fill it with an empty one. archive_data = archive.GetArchiveData() if archive_data.hash: output_package_desc.AppendArchive(archive) else: logging.info('Filling missing archive: %s.', archive_data.name) if (archive_data.name.endswith('.tar.gz') or archive_data.name.endswith('.tgz')): mode = 'w:gz' elif archive_data.name.endswith('.bz2'): mode = 'w:bz2' elif archive_data.name.endswith('.tar'): mode = 'w:' else: raise NameError('Unknown archive type: %s.' % archive_data.name) archive_file = package_locations.GetLocalPackageArchiveFile( arguments.tar_dir, package_target, package_name, archive_data.name ) tar_file = cygtar.CygTar(archive_file, mode) tar_file.Close() tar_hash = archive_info.GetArchiveHash(archive_file) empty_archive = archive_info.ArchiveInfo(name=archive_data.name, archive_hash=tar_hash) output_package_desc.AppendArchive(empty_archive) output_package_desc.SavePackageFile(package_path)
def untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: webports.log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.extract() except Exception as err: raise webports.Error('Error unpacking %s' % str(err)) finally: if tar_file: tar_file.Close() else: if subprocess.call(['tar', 'jxf', bz2_filename]): raise webports.Error('Error unpacking')
def Main(args): parser = optparse.OptionParser() # Modes parser.add_option('-s', '--sdk', help='SDK directory.', dest='sdk') parser.add_option('-t', '--tool', help='Which toolchain.', dest='tool') parser.add_option('-o', '--os', help='Untar for which OS.', dest='os') parser.add_option('-T', '--tmp', help='Temp directory.', dest='tmp') parser.add_option('-f', '--fin', help='Final output directory.', dest='fin') parser.add_option('-v', '--verbose', dest='verbose', default=False, help='Enable verbosity', action='store_true') options, args = parser.parse_args(args[1:]) if not options.sdk: parser.error('Expecting SDK directory.') if not options.os: parser.error('Expecting OS to be specified.') if not options.tool: parser.error('Expecting which tool to untar.') if not options.fin: parser.error('Expecting final output directory.') if len(args) < 1: parser.error('Expecting path(s) to tarball(s).') untar_path = os.path.join(options.tmp, options.tool) if options.tool == 'x86_newlib': tool_path = os.path.join(untar_path, 'sdk', 'nacl-sdk') elif options.tool == 'x86_glibc': tool_path = os.path.join(untar_path, 'toolchain', options.os + '_x86') elif options.tool in ('pnacl', 'arm_newlib'): tool_path = untar_path else: parser.error('Unknown tool type: ' + options.tool) final_path = os.path.abspath(options.fin) stamp_path = os.path.join(final_path, 'stamp.untar') final_path = os.path.abspath(final_path) untar_path = os.path.abspath(untar_path) stamp_path = os.path.abspath(stamp_path) tool_path = os.path.abspath(tool_path) if options.verbose: print 'Delete: ' + untar_path oshelpers.Remove(['-fr', untar_path]) if options.verbose: print 'Mkdir: ' + untar_path oshelpers.Mkdir(['-p', untar_path]) if options.verbose: print 'Delete: ' + final_path oshelpers.Remove(['-fr', final_path]) if options.verbose: print 'Mkdir: ' + os.path.join(options.sdk, 'toolchain') oshelpers.Mkdir(['-p', os.path.join(options.sdk, 'toolchain')]) args = [os.path.abspath(a) for a in args] old_path = os.getcwd() os.chdir(untar_path) for arg in args: if options.verbose: print 'Open: ' + arg tar = cygtar.CygTar(arg, 'r', verbose=options.verbose) if options.verbose: print 'Extract' tar.Extract() os.chdir(old_path) if options.verbose: print 'Move: %s to %s' % (tool_path, final_path) oshelpers.Move([tool_path, final_path]) if options.verbose: print 'Stamp: ' + stamp_path fh = open(stamp_path, 'w') fh.write(args[0] + '\n') fh.close() if options.verbose: print 'Done.' return 0
new_hashes = [] for filepath in filepaths: new_hashes.append(pynacl.download_utils.HashFile(filepath)) untar_dir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=toolchain_dir) try: if verbose: tar_file = os.path.basename(filepath) rel_dest = os.path.relpath(dst, toolchain_dir) print '%s: Extracting "%s" -> "%s"...' % (flavor_name, tar_file, rel_dest) for filepath in filepaths: tar = cygtar.CygTar(filepath, 'r:*', verbose=verbose) curdir = os.getcwd() os.chdir(untar_dir) try: tar.Extract() tar.Close() finally: os.chdir(curdir) if not keep: os.remove(filepath) # TODO(bradnelson_): get rid of this when toolchain tarballs flattened. if 'bionic' in flavor: src = os.path.join(untar_dir, flavor) elif isinstance(flavor, tuple) or 'arm' in flavor or 'pnacl' in flavor:
def ExtractArchives(self, archives, extract_dir, rename_from_dir, rename_to_dir): tar_file = None extract_path = os.path.join(self.install_dir, extract_dir) rename_from_path = os.path.join(self.install_dir, rename_from_dir) rename_to_path = os.path.join(self.install_dir, rename_to_dir) # Extract to extract_dir, usually "<bundle name>_update". # This way if the extraction fails, we haven't blown away the old bundle # (if it exists). sdk_update_common.RemoveDir(extract_path) sdk_update_common.MakeDirs(extract_path) curpath = os.getcwd() tar_file = None try: try: logging.info('Changing the directory to %s' % (extract_path, )) os.chdir(extract_path) except Exception as e: raise Error('Unable to chdir into "%s".\n %s' % (extract_path, e)) for i, archive in enumerate(archives): archive_path = os.path.join(self.archive_cache, archive) if len(archives) > 1: print '(file %d/%d - "%s")' % ( i + 1, len(archives), os.path.basename(archive_path)) logging.info('Extracting to %s' % (extract_path, )) if sys.platform == 'win32': try: logging.info('Opening file %s (%d/%d).' % (archive_path, i + 1, len(archives))) try: tar_file = cygtar.CygTar(archive_path, 'r', verbose=True) except Exception as e: raise Error("Can't open archive '%s'.\n %s" % (archive_path, e)) tar_file.Extract() finally: if tar_file: tar_file.Close() else: try: subprocess.check_call(['tar', 'xf', archive_path]) except subprocess.CalledProcessError: raise Error('Error extracting archive: %s' % archive_path) logging.info('Changing the directory to %s' % (curpath, )) os.chdir(curpath) logging.info('Renaming %s->%s' % (rename_from_path, rename_to_path)) sdk_update_common.RenameDir(rename_from_path, rename_to_path) finally: # Change the directory back so we can remove the update directory. os.chdir(curpath) # Clean up the ..._update directory. try: sdk_update_common.RemoveDir(extract_path) except Exception as e: logging.error('Failed to remove directory \"%s\". %s' % (extract_path, e))
def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir, downloader=None, skip_missing=False, quiet=False): """Extracts package targets from the tar directory to the destination. Each package archive within a package will be verified before being extracted. If a package archive does not exist or does not match the hash stored within the package file, it will be re-downloaded before being extracted. Args: package_target_packages: List of tuples of package target and package names. tar_dir: Source tar directory where package archives live. dest_dir: Root destination directory where packages will be extracted to. downloader: function which takes a url and a file path for downloading. """ if downloader is None: downloader = pynacl.gsd_storage.HttpDownload for package_target, package_name in package_target_packages: package_file = package_locations.GetLocalPackageFile(tar_dir, package_target, package_name) package_desc = package_info.PackageInfo(package_file, skip_missing=skip_missing) dest_package_dir = package_locations.GetFullDestDir(dest_dir, package_target, package_name) dest_package_file = package_locations.GetDestPackageFile(dest_dir, package_target, package_name) # Only do the extraction if the extract packages do not match. if os.path.isfile(dest_package_file): try: dest_package_desc = package_info.PackageInfo(dest_package_file) if dest_package_desc == package_desc: logging.debug('Skipping extraction for package (%s)', package_name) continue except: # Destination package file cannot be trusted, if invalid re-extract. pass # Delete the old package file before we extract. os.unlink(dest_package_file) if os.path.isdir(dest_package_dir): logging.debug('Deleting old package directory: %s', dest_package_dir) pynacl.file_tools.RemoveDir(dest_package_dir) logging.info('Extracting package (%s) to directory: %s', package_name, dest_package_dir) archive_list = package_desc.GetArchiveList() num_archives = len(archive_list) for index, archive_obj in enumerate(archive_list): archive_desc = archive_obj.GetArchiveData() archive_file = package_locations.GetLocalPackageArchiveFile( tar_dir, package_target, package_name, archive_desc.name ) # Upon extraction, some files may not be downloaded (or have stale files), # we need to check the hash of each file and attempt to download it if # they do not match. archive_hash = archive_info.GetArchiveHash(archive_file) if archive_hash != archive_desc.hash: if archive_desc.url is None: if skip_missing: logging.info('Skipping extraction of missing archive: %s' % archive_file) continue raise IOError('Invalid archive file and URL: %s' % archive_file) logging.warn('Expected archive missing, downloading: %s', archive_desc.name) pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file) downloader(archive_desc.url, archive_file) archive_hash = archive_info.GetArchiveHash(archive_file) if archive_hash != archive_desc.hash: raise IOError('Downloaded archive file does not match hash.' ' [%s] Expected %s, received %s.' % (archive_file, archive_desc.hash, archive_hash)) destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir) logging.info('Extracting %s (%d/%d)' % (archive_desc.name, index+1, num_archives)) temp_dir = os.path.join(destination_dir, '.tmp') pynacl.file_tools.RemoveDir(temp_dir) os.makedirs(temp_dir) tar_output = not quiet tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output) curdir = os.getcwd() os.chdir(temp_dir) try: tar.Extract() tar.Close() finally: os.chdir(curdir) temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir) pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir) pynacl.file_tools.RemoveDir(temp_dir) pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file) package_desc.SavePackageFile(dest_package_file)