def DetermineSDKURL(flavor, base_url, version): """Download one Native Client toolchain and extract it. Arguments: flavor: flavor of the sdk to download base_url: base url to download toolchain tarballs from version: version directory to select tarballs from Returns: A tuple of the URL and version number. """ if (os.environ.get('BUILDBOT_BUILDERNAME') and not os.environ.get('TEST_BUILDBOT')): gsutil = BOT_GSUTIL if not os.path.exists(gsutil): raise naclports.Error('gsutil not found at: %s' % gsutil) else: gsutil = LOCAL_GSUTIL if sys.platform in ['win32', 'cygwin']: gsutil += '.bat' path = flavor + '.tar.bz2' def GSList(path): """Run gsutil 'ls' on a path and return just the basenames of the elements within. """ cmd = [gsutil, 'ls', base_url + path] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) p_stdout = p.communicate()[0] if p.returncode: raise naclports.Error('gsutil command failed: %s' % str(cmd)) elements = p_stdout.splitlines() return [os.path.basename(os.path.normpath(elem)) for elem in elements] if version == 'latest': print('Looking for latest SDK upload...') # List the top level of the nacl_sdk folder versions = GSList('') # Find all trunk revision versions = [v for v in versions if v.startswith('trunk')] # Look backwards through all trunk revisions # Only look back HISTORY_SIZE revisions so this script doesn't take # forever. versions = list(reversed(sorted(versions))) for version_dir in versions[:HISTORY_SIZE]: contents = GSList(version_dir) if path in contents: version = version_dir.rsplit('.', 1)[1] break else: raise naclports.Error( 'No SDK build (%s) found in last %d trunk builds' % (path, HISTORY_SIZE)) version = int(version) return ('%strunk.%d/%s' % (GSTORE, version, path), version)
def __init__(self, arch=None, toolchain=None, debug=None): self.SetConfig(debug) if arch is None: arch = os.environ.get('NACL_ARCH') if toolchain is None: toolchain = os.environ.get('TOOLCHAIN') if not toolchain: if arch == 'pnacl': toolchain = 'pnacl' else: toolchain = self.default_toolchain self.toolchain = toolchain if not arch: if self.toolchain == 'pnacl': arch = 'pnacl' elif self.toolchain == 'bionic': arch = 'arm' else: arch = self.default_arch self.arch = arch if self.arch not in naclports.arch_to_pkgarch: raise naclports.Error("Invalid arch: %s" % arch) self.SetLibc()
def FindCygwin(): if os.path.exists(r'\cygwin'): return r'\cygwin' elif os.path.exists(r'C:\cygwin'): return r'C:\cygwin' else: raise naclports.Error(r'failed to find cygwin in \cygwin or c:\cygwin')
def main(args): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('revision', metavar='REVISION', help='naclports revision to to scan for.') parser.add_argument('-v', '--verbose', action='store_true', help='Output extra information.') parser.add_argument('-l', '--cache-listing', action='store_true', help='Cached output of gsutil -le (for testing).') parser.add_argument( '--skip-md5', action='store_true', help='Assume on-disk files are up-to-date (for testing).') args = parser.parse_args(args) if args.verbose: naclports.SetVerbose(True) sdk_version = naclports.util.GetSDKVersion() Log('Scanning packages built for pepper_%s at revsion %s' % (sdk_version, args.revision)) base_path = '%s/builds/pepper_%s/%s/packages' % ( naclports.GS_BUCKET, sdk_version, args.revision) gs_url = 'gs://' + base_path gsutil = naclports.util.FindInPath('gsutil.py') listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'listing.txt') if args.cache_listing and os.path.exists(listing_file): Log('Using pre-cached gs listing: %s' % listing_file) with open(listing_file) as f: listing = f.read() else: Log('Searching for packages at: %s' % gs_url) cmd = [sys.executable, gsutil, 'ls', '-le', gs_url] LogVerbose('Running: %s' % str(cmd)) try: listing = subprocess.check_output(cmd) except subprocess.CalledProcessError as e: naclports.Error(e) return 1 all_files = ParseGsUtilLs(listing) if args.cache_listing and not os.path.exists(listing_file): with open(listing_file, 'w') as f: f.write(listing) Log('Found %d packages [%s]' % (len(all_files), FormatSize(sum(f.size for f in all_files)))) binaries = DownloadFiles(all_files, not args.skip_md5) index_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt') Log('Generating %s' % index_file) naclports.package_index.WriteIndex(index_file, binaries) Log('Done') return 0
def main(argv): if sys.platform in ['win32', 'cygwin']: naclports.Error( 'Emscripten support is currently not available on Windows.') return 1 DownloadAndExtract(EMSDK_URL, EMSDK_SHA1, 'emsdk_portable') DownloadAndExtract(NODEJS_URL, NODEJS_SHA1, 'node-v0.12.1-linux-x64') naclports.Log('Emscripten SDK Install complete') return 0
def Untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: naclports.Log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.Extract() except Exception, err: raise naclports.Error('Error unpacking %s' % str(err)) finally:
def GSList(path): """Run gsutil 'ls' on a path and return just the basenames of the elements within. """ cmd = gsutil + ['ls', base_url + path] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) p_stdout = p.communicate()[0] if p.returncode: raise naclports.Error('gsutil command failed: %s' % str(cmd)) elements = p_stdout.splitlines() return [os.path.basename(os.path.normpath(elem)) for elem in elements]
def Download(self, package_name, config): PREBUILT_ROOT = os.path.join(package.PACKAGES_ROOT, 'prebuilt') if not os.path.exists(PREBUILT_ROOT): os.makedirs(PREBUILT_ROOT) info = self.packages[(package_name, config)] filename = os.path.join(PREBUILT_ROOT, os.path.basename(info['BIN_URL'])) if os.path.exists(filename): if VerifyHash(filename, info['BIN_SHA1']): return filename naclports.Log('Downloading prebuilt binary ...') naclports.DownloadFile(filename, info['BIN_URL']) if not VerifyHash(filename, info['BIN_SHA1']): raise naclports.Error('Unexepected SHA1: %s' % filename) return filename
def DetermineSDKURL(flavor, base_url, version): """Download one Native Client toolchain and extract it. Arguments: flavor: flavor of the sdk to download base_url: base url to download toolchain tarballs from version: version directory to select tarballs from Returns: The URL of the SDK archive """ # gsutil.py ships with depot_tools, which should be in PATH gsutil = [sys.executable, naclports.util.FindInPath('gsutil.py')] path = flavor + '.tar.bz2' def GSList(path): """Run gsutil 'ls' on a path and return just the basenames of the elements within. """ cmd = gsutil + ['ls', base_url + path] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) p_stdout = p.communicate()[0] if p.returncode: raise naclports.Error('gsutil command failed: %s' % str(cmd)) elements = p_stdout.splitlines() return [os.path.basename(os.path.normpath(elem)) for elem in elements] if version == 'latest': naclports.Log('Looking for latest SDK build...') # List the top level of the nacl_sdk folder versions = GSList('') # Find all trunk revision versions = [v for v in versions if v.startswith('trunk')] # Look backwards through all trunk revisions # Only look back HISTORY_SIZE revisions so this script doesn't take # forever. versions = list(reversed(sorted(versions))) for version_dir in versions[:HISTORY_SIZE]: contents = GSList(version_dir) if path in contents: version = version_dir.rsplit('.', 1)[1] break else: raise naclports.Error( 'No SDK build (%s) found in last %d trunk builds' % (path, HISTORY_SIZE)) return '%strunk.%s/%s' % (GSTORE, version, path)
def main(args): usage = 'Usage: %proc [options] <revision>' parser = optparse.OptionParser(description=__doc__, usage=usage) parser.add_option('-v', '--verbose', action='store_true', help='Output extra information.') parser.add_option('-l', '--cache-listing', action='store_true', help='Cached output of gsutil -le (for testing).') parser.add_option('--skip-md5', action='store_true', help='Assume on-disk files are up-to-date (for testing).') options, args = parser.parse_args(args) if options.verbose: naclports.verbose = True if len(args) != 1: parser.error('Expected exactly one argument. See --help.') ports_revision = args[0] sdk_version = naclports.GetSDKVersion() Log('Scanning packages built for pepper_%s at revsion %s' % (sdk_version, ports_revision)) base_path = '%s/builds/pepper_%s/%s/packages' % (naclports.GS_BUCKET, sdk_version, ports_revision) gs_url = 'gs://' + base_path listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'listing.txt') if options.cache_listing and os.path.exists(listing_file): Log('Using pre-cached gs listing: %s' % listing_file) with open(listing_file) as f: listing = f.read() else: try: listing = subprocess.check_output(['gsutil', 'ls', '-le', gs_url]) except subprocess.CalledProcessError as e: naclports.Error(e) return 1 all_files = ParseGsUtilLs(listing) if options.cache_listing and not os.path.exists(listing_file): with open(listing_file, 'w') as f: f.write(listing) Log('Found %d packages [%s]' % (len(all_files), FormatSize(sum(f.size for f in all_files)))) binaries = DownloadFiles(all_files, not options.skip_md5) index_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt') Log('Generating %s' % index_file) naclports.package_index.WriteIndex(index_file, binaries) Log('Done') return 0
def __init__(self, filename): self.filename = filename if not os.path.exists(self.filename): raise naclports.Error('package not found: %s'% self.filename) basename, extension = os.path.splitext(os.path.basename(filename)) basename = os.path.splitext(basename)[0] if extension != '.bz2': raise naclports.Error('invalid file extension: %s' % extension) try: with tarfile.open(self.filename) as tar: if './pkg_info' not in tar.getnames(): raise PkgFormatError('package does not contain pkg_info file') info = tar.extractfile('./pkg_info') info = naclports.ParsePkgInfo(info.read(), filename, VALID_KEYS, REQUIRED_KEYS) for key, value in info.iteritems(): setattr(self, key, value) except tarfile.TarError as e: raise naclports.PkgFormatError(e) self.config = configuration.Configuration(self.BUILD_ARCH, self.BUILD_TOOLCHAIN, self.BUILD_CONFIG == 'debug')
def ParseIndex(self, index_data): if not index_data: return for pkg_info in index_data.split('\n\n'): info = naclports.ParsePkgInfo(pkg_info, self.filename, VALID_KEYS, EXTRA_KEYS) debug = info['BUILD_CONFIG'] == 'debug' config = configuration.Configuration(info['BUILD_ARCH'], info['BUILD_TOOLCHAIN'], debug) key = (info['NAME'], config) if key in self.packages: naclports.Error('package index contains duplicate: %s' % str(key)) self.packages[key] = info
def DownloadAndExtract(url, sha1, target_dir): tar_file = DownloadToCache(url, sha1) if not os.path.exists(OUT_DIR): os.makedirs(OUT_DIR) os.chdir(OUT_DIR) # Remove previously extracted archive if os.path.exists(target_dir): naclports.Log('Cleaning up existing %s...' % target_dir) cmd = ['rm', '-rf'] cmd.append(target_dir) subprocess.check_call(cmd) # Extract archive naclports.Log('Exctacting %s...' % os.path.basename(tar_file)) if subprocess.call(['tar', 'xf', tar_file]): raise naclports.Error('Error unpacking Emscripten SDK')
def DownloadFiles(files, check_hashes=True): """Download one of more files to the local disk. Args: files: List of FileInfo objects to download. check_hashes: When False assume local files have the correct hash otherwise always check the hashes match the onces in the FileInfo ojects. Returns: List of (filename, url) tuples. """ files_to_download = [] filenames = [] download_dir = naclports.package_index.PREBUILT_ROOT if not os.path.exists(download_dir): os.makedirs(download_dir) for file_info in files: basename = os.path.basename(file_info.url) fullname = os.path.join(download_dir, basename) filenames.append((fullname, file_info.url)) if os.path.exists(fullname): if not check_hashes or CheckHash(fullname, file_info.etag): Log('Up-to-date: %s' % file_info.name) continue files_to_download.append(FileInfo(fullname, file_info.size, file_info.url, file_info.etag)) if not files_to_download: Log('All files up-to-date') else: total_size = sum(f[1] for f in files_to_download) Log('Need to download %d/%d files [%s]' % (len(files_to_download), len(files), FormatSize(total_size))) for file_info in files_to_download: naclports.DownloadFile(file_info.name, file_info.url) if check_hashes and not CheckHash(file_info.name, file_info.etag): raise naclports.Error('Checksum failed: %s' % file_info.name) return filenames
def main(args): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('revision', metavar='REVISION', help='naclports revision to to scan for.') parser.add_argument('-v', '--verbose', action='store_true', help='Output extra information.') parser.add_argument('-p', '--parallel', action='store_true', help='Download packages in parallel.') parser.add_argument('-l', '--cache-listing', action='store_true', help='Cached output of gsutil -L (for testing).') parser.add_argument( '--skip-md5', action='store_true', help='Assume on-disk files are up-to-date (for testing).') args = parser.parse_args(args) if args.verbose: naclports.SetVerbose(True) sdk_version = naclports.util.GetSDKVersion() Log('Scanning packages built for pepper_%s at revsion %s' % (sdk_version, args.revision)) base_path = '%s/builds/pepper_%s/%s/publish' % (naclports.GS_BUCKET, sdk_version, args.revision) gs_base_url = 'gs://' + base_path cmd = FindGsutil() + ['ls', gs_base_url] LogVerbose('Running: %s' % str(cmd)) try: all_published = subprocess.check_output(cmd) except subprocess.CalledProcessError as e: raise naclports.Error("Command '%s' failed: %s" % (cmd, e)) pkg_dir = re.findall(r'pkg_[\w-]+', all_published) for pkg in pkg_dir: listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', pkg + '_' + 'listing.txt') if args.cache_listing and os.path.exists(listing_file): Log('Using pre-cached gs listing: %s' % listing_file) with open(listing_file) as f: listing = f.read() else: gs_url = gs_base_url + '/' + pkg + '/*' Log('Searching for packages at: %s' % gs_url) cmd = FindGsutil() + ['stat', gs_url] LogVerbose('Running: %s' % str(cmd)) try: listing = subprocess.check_output(cmd) except subprocess.CalledProcessError as e: raise naclports.Error("Command '%s' failed: %s" % (cmd, e)) if args.cache_listing: with open(listing_file, 'w') as f: f.write(listing) all_files = ParseGsUtilOutput(listing) Log('Found %d packages [%s] for %s' % (len(all_files), FormatSize(sum(f.size for f in all_files)), pkg)) DownloadFiles(pkg, all_files, not args.skip_md5, args.parallel) Log('Done') return 0
def Check(file_info): if check_hashes and not CheckHash(file_info.name, file_info.md5): raise naclports.Error( 'Checksum failed: %s\nExpected=%s\nActual=%s' % (file_info.name, file_info.md5, GetHash(file_info.name)))
def Untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: naclports.Log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.Extract() except Exception, err: raise naclports.Error('Error unpacking %s' % str(err)) finally: if tar_file: tar_file.Close() else: if subprocess.call(['tar', 'jxf', bz2_filename]): raise naclports.Error('Error unpacking') def FindCygwin(): if os.path.exists(r'\cygwin'): return r'\cygwin' elif os.path.exists(r'C:\cygwin'): return r'C:\cygwin' else: raise naclports.Error(r'failed to find cygwin in \cygwin or c:\cygwin') def DownloadAndInstallSDK(url, target_dir): bz2_dir = OUT_DIR if not os.path.exists(bz2_dir): os.makedirs(bz2_dir)