def main(argv): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-v', '--version', default='latest', help='which version of the SDK to download') options = parser.parse_args(argv) flavor = 'naclsdk_' + PLATFORM_COLLAPSE[sys.platform] url = determine_sdk_url(flavor, base_url=GS_URL_BASE, version=options.version) stamp_file = os.path.join(TARGET_DIR, 'stamp') if os.path.exists(stamp_file): with open(stamp_file) as f: installed_url = f.read().strip() if installed_url == url: webports.log('SDK already installed: %s' % url) return 0 else: webports.log('Ignoring currently installed SDK: %s' % installed_url) download_and_install_sdk(url, TARGET_DIR) with open(stamp_file, 'w') as f: f.write(url + '\n') return 0
def gs_upload(options, filename, url): """Upload a file to Google cloud storage using gsutil""" webports.log("Uploading to mirror: %s" % url) cmd = options.gsutil + ['cp', '-a', 'public-read', filename, url] if options.dry_run: webports.log(cmd) else: subprocess.check_call(cmd)
def untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: webports.log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.extract() except Exception, err: raise webports.Error('Error unpacking %s' % str(err)) finally:
def check_packages(options, source_packages, mirror_listing): count = 0 for package in source_packages: check_mirror(options, package, mirror_listing) count += 1 if options.check: webports.log("Verfied mirroring for %d packages" % count) return 0
def main(argv): if sys.platform in ['win32', 'cygwin']: webports.Error('Emscripten support is currently not available on Windows.') return 1 download_and_extract(EMSDK_URL, EMSDK_SHA1, 'emsdk') download_and_extract(NODE_URL, NODE_SHA1, 'node-v0.12.1-linux-x64', 'node') build_optimizer() webports.log('Emscripten SDK Install complete') return 0
def determine_sdk_url(flavor, base_url, version): """Download one Native Client toolchain and extract it. Arguments: flavor: flavor of the sdk to download base_url: base url to download toolchain tarballs from version: version directory to select tarballs from Returns: The URL of the SDK archive """ # gsutil.py ships with depot_tools, which should be in PATH gsutil = [sys.executable, webports.util.find_in_path('gsutil.py')] path = flavor + '.tar.bz2' def gs_list(path): """Run gsutil 'ls' on a path and return just the basenames of the elements within. """ cmd = gsutil + ['ls', base_url + path] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) p_stdout = p.communicate()[0] if p.returncode: raise webports.Error('gsutil command failed: %s' % str(cmd)) elements = p_stdout.splitlines() return [os.path.basename(os.path.normpath(elem)) for elem in elements] if version == 'latest': webports.log('Looking for latest SDK build...') # List the top level of the nacl_sdk folder versions = gs_list('') # Find all trunk revision versions = [v for v in versions if v.startswith('trunk')] # Look backwards through all trunk revisions # Only look back HISTORY_SIZE revisions so this script doesn't take # forever. versions = list(reversed(sorted(versions))) for version_dir in versions[:HISTORY_SIZE]: contents = gs_list(version_dir) if path in contents: version = version_dir.rsplit('.', 1)[1] break else: raise webports.Error( 'No SDK build (%s) found in last %d trunk builds' % (path, HISTORY_SIZE)) return '%strunk.%s/%s' % (GSTORE, version, path)
def determine_sdk_url(flavor, base_url, version): """Download one Native Client toolchain and extract it. Arguments: flavor: flavor of the sdk to download base_url: base url to download toolchain tarballs from version: version directory to select tarballs from Returns: The URL of the SDK archive """ # gsutil.py ships with depot_tools, which should be in PATH gsutil = [sys.executable, webports.util.find_in_path('gsutil.py')] path = flavor + '.tar.bz2' def gs_list(path): """Run gsutil 'ls' on a path and return just the basenames of the elements within. """ cmd = gsutil + ['ls', base_url + path] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) p_stdout = p.communicate()[0] if p.returncode: raise webports.Error('gsutil command failed: %s' % str(cmd)) elements = p_stdout.splitlines() return [os.path.basename(os.path.normpath(elem)) for elem in elements] if version == 'latest': webports.log('Looking for latest SDK build...') # List the top level of the nacl_sdk folder versions = gs_list('') # Find all trunk revision versions = [v for v in versions if v.startswith('trunk')] # Look backwards through all trunk revisions # Only look back HISTORY_SIZE revisions so this script doesn't take # forever. versions = list(reversed(sorted(versions))) for version_dir in versions[:HISTORY_SIZE]: contents = gs_list(version_dir) if path in contents: version = version_dir.rsplit('.', 1)[1] break else: raise webports.Error('No SDK build (%s) found in last %d trunk builds' % (path, HISTORY_SIZE)) return '%strunk.%s/%s' % (GSTORE, version, path)
def main(args): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-v', '--verbose', action='store_true', help='Output extra information.') options = parser.parse_args(args) if options.verbose: webports.set_verbose(True) count = 0 for package in webports.source_package.source_package_iterator(): package.download() count += 1 webports.log("Verfied checksums for %d packages" % count) return 0
def untar(bz2_filename): if sys.platform == 'win32': tar_file = None try: webports.log('Unpacking tarball...') tar_file = cygtar.CygTar(bz2_filename, 'r:bz2') tar_file.extract() except Exception as err: raise webports.Error('Error unpacking %s' % str(err)) finally: if tar_file: tar_file.Close() else: if subprocess.call(['tar', 'jxf', bz2_filename]): raise webports.Error('Error unpacking')
def download_to_cache(url, sha1): filename = os.path.basename(url) download_dir = webports.paths.CACHE_ROOT if not os.path.exists(download_dir): os.makedirs(download_dir) full_name = os.path.join(download_dir, filename) if os.path.exists(full_name): try: webports.util.verify_hash(full_name, sha1) webports.log("Verified cached file: %s" % filename) return full_name except webports.util.HashVerificationError: webports.log("Hash mistmatch on cached download: %s" % filename) webports.download_file(full_name, url) webports.util.verify_hash(full_name, sha1) return full_name
def main(args): parser = argparse.ArgumentParser() parser.add_argument('-v', '--verbose', action='store_true', help='Output extra information.') options = parser.parse_args(args) if options.verbose: webports.set_verbose(True) count = 0 package_names = [os.path.basename(p.root) for p in webports.source_package.source_package_iterator()] for package in webports.source_package.source_package_iterator(): if not package.check_deps(package_names): return 1 count += 1 webports.log("Verfied dependencies for %d packages" % count) return 0
def check_mirror(options, package, mirror_listing): """Check that is package has is archive mirrors on Google cloud storage""" webports.log_verbose('Checking %s' % package.NAME) basename = package.get_archive_filename() if not basename: return if basename in mirror_listing: # already mirrored return if options.check: webports.log('update_mirror: Archive missing from mirror: %s' % basename) sys.exit(1) # Download upstream URL package.download(force_mirror=False) url = '%s/%s' % (MIRROR_GS, basename) gs_upload(options, package.download_location(), url)
def main(args): parser = argparse.ArgumentParser() parser.add_argument('-v', '--verbose', action='store_true', help='Output extra information.') options = parser.parse_args(args) if options.verbose: webports.set_verbose(True) count = 0 package_names = [ os.path.basename(p.root) for p in webports.source_package.source_package_iterator() ] for package in webports.source_package.source_package_iterator(): if not package.check_deps(package_names): return 1 count += 1 webports.log("Verfied dependencies for %d packages" % count) return 0
def download_and_extract(url, sha1, target_dir, link_name=None): tar_file = download_to_cache(url, sha1) if not os.path.exists(OUT_DIR): os.makedirs(OUT_DIR) os.chdir(OUT_DIR) # Remove previously extracted archive if os.path.exists(target_dir): webports.log('Cleaning up existing %s...' % target_dir) cmd = ['rm', '-rf'] cmd.append(target_dir) subprocess.check_call(cmd) # Extract archive webports.log('Exctacting %s...' % os.path.basename(tar_file)) if subprocess.call(['tar', 'xf', tar_file]): raise webports.Error('Error unpacking Emscripten SDK') if link_name: if os.path.exists(link_name): os.remove(link_name) os.symlink(target_dir, link_name)
def download_and_install_sdk(url, target_dir): bz2_dir = OUT_DIR if not os.path.exists(bz2_dir): os.makedirs(bz2_dir) bz2_filename = os.path.join(bz2_dir, url.split('/')[-1]) if sys.platform in ['win32', 'cygwin']: cygbin = os.path.join(find_cygwin(), 'bin') # Download it. webports.download_file(bz2_filename, url) # Extract toolchain. old_cwd = os.getcwd() os.chdir(bz2_dir) untar(bz2_filename) os.chdir(old_cwd) # Calculate pepper_dir by taking common prefix of tar # file contents tar = tarfile.open(bz2_filename) names = tar.getnames() tar.close() pepper_dir = os.path.commonprefix(names) actual_dir = os.path.join(bz2_dir, pepper_dir) # Drop old versions. if os.path.exists(target_dir): webports.log('Cleaning up old SDK...') if sys.platform in ['win32', 'cygwin']: cmd = [os.path.join(cygbin, 'bin', 'rm.exe'), '-rf'] else: cmd = ['rm', '-rf'] cmd.append(target_dir) returncode = subprocess.call(cmd) assert returncode == 0 webports.log('Renaming toolchain "%s" -> "%s"' % (actual_dir, target_dir)) os.rename(actual_dir, target_dir) if sys.platform in ['win32', 'cygwin']: time.sleep(2) # Wait for windows. # Clean up: remove the sdk bz2. os.remove(bz2_filename) webports.log('Install complete.')