def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >>sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >>sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != "--force": print >> sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == "--force" or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = bool(int(os.environ.get("DEPOT_TOOLS_WIN_TOOLCHAIN", "1"))) if (sys.platform in ("win32", "cygwin") or force) and depot_tools_win_toolchain: import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ["GYP_MSVS_VERSION"] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, "win_toolchain", "get_toolchain_if_necessary.py"), "--output-json", json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append("--force") subprocess.check_call(get_toolchain_args) return 0
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >>sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = False if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >> sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # On Linux, the file system is usually case-sensitive while the Windows # SDK only works on case-insensitive file systems. If it doesn't already # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive # part of the file system. toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files') if sys.platform.startswith( 'linux') and not os.path.ismount(toolchain_dir): import distutils.spawn ciopfs = distutils.spawn.find_executable('ciopfs') if not ciopfs: # TODO(thakis): Offer to auto-install this? Or have a # build/install-build-deps-win.sh script and point to that? (Or run # that?) print >>sys.stderr, \ "\n\tCouldn't set up case-insensitive mount for Windows SDK." print >>sys.stderr, \ "\tPlease run `sudo apt-get install ciopfs` and try again.\n" return 1 if not os.path.isdir(toolchain_dir): os.mkdir(toolchain_dir) if not os.path.isdir(toolchain_dir + '.ciopfs'): os.mkdir(toolchain_dir + '.ciopfs') subprocess.check_call( [ciopfs, toolchain_dir + '.ciopfs', toolchain_dir]) # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
def get_gsutil(): paths = Paths() sys.path.insert(0, os.path.join(paths.src_root, "tools")) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() return os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil")
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >>sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # On Linux, the file system is usually case-sensitive while the Windows # SDK only works on case-insensitive file systems. If it doesn't already # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive # part of the file system. toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files') # For testing this block, unmount existing mounts with # fusermount -u third_party/depot_tools/win_toolchain/vs_files if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir): import distutils.spawn ciopfs = distutils.spawn.find_executable('ciopfs') if not ciopfs: # ciopfs not found in PATH; try the one downloaded from the DEPS hook. ciopfs = os.path.join(script_dir, 'ciopfs') if not os.path.isdir(toolchain_dir): os.mkdir(toolchain_dir) if not os.path.isdir(toolchain_dir + '.ciopfs'): os.mkdir(toolchain_dir + '.ciopfs') # Without use_ino, clang's #pragma once and Wnonportable-include-path # both don't work right, see https://llvm.org/PR34931 # use_ino doesn't slow down builds, so it seems there's no drawback to # just using it always. subprocess.check_call([ ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir]) # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
def Update(force=False, no_download=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. If no_download is true then the toolchain will be configured if present but will not be downloaded. """ if force != False and force != '--force': print('Unknown parameter "%s"' % force, file=sys.stderr) return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if (_HostIsWindows() or force) and depot_tools_win_toolchain: import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # On Linux, the file system is usually case-sensitive while the Windows # SDK only works on case-insensitive file systems. If it doesn't already # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive # part of the file system. toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files') # For testing this block, unmount existing mounts with # fusermount -u third_party/depot_tools/win_toolchain/vs_files if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir): import distutils.spawn ciopfs = distutils.spawn.find_executable('ciopfs') if not ciopfs: # ciopfs not found in PATH; try the one downloaded from the DEPS hook. ciopfs = os.path.join(script_dir, 'ciopfs') if not os.path.isdir(toolchain_dir): os.mkdir(toolchain_dir) if not os.path.isdir(toolchain_dir + '.ciopfs'): os.mkdir(toolchain_dir + '.ciopfs') # Without use_ino, clang's #pragma once and Wnonportable-include-path # both don't work right, see https://llvm.org/PR34931 # use_ino doesn't slow down builds, so it seems there's no drawback to # just using it always. subprocess.check_call([ ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir]) get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') if no_download: get_toolchain_args.append('--no-download') subprocess.check_call(get_toolchain_args) return 0
def AddDepotToolsToPath(): """Locates a depot_tools checkout and adds it to the Python import path. Returns: The path to depot_tools. """ # Try to import find_depot_tools from the build subdir. import_util.AddImportPath(os.path.join(_ROOT_DIRECTORY, 'build')) import find_depot_tools return find_depot_tools.add_depot_tools_to_path()
def download_file(basename, version, tools_directory): find_depot_tools_path = os.path.join(CURRENT_PATH, tools_directory) sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() gs_path = "gs://mojo/file/" + version + "/" + basename output_file = os.path.join(PREBUILT_FILE_PATH, basename) gs.download_from_public_bucket(gs_path, output_file, depot_tools_path)
def DownloadVsToolchain(): """Download the Visual Studio toolchain on Windows. If on Windows, request that depot_tools install/update the automatic toolchain, and then use it (unless opted-out) and return a tuple containing the x64 and x86 paths. Otherwise return None. """ vs2013_runtime_dll_dirs = None depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain: import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() temp_handle, data_file = tempfile.mkstemp(suffix='.json') os.close(temp_handle) get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', data_file, ] + GetDesiredVsToolchainHashes() subprocess.check_call(get_toolchain_args) with open(data_file, 'r') as tempf: toolchain_data = json.load(tempf) os.unlink(data_file) toolchain = toolchain_data['path'] version = toolchain_data['version'] version_is_pro = version[-1] != 'e' win8sdk = toolchain_data['win8sdk'] wdk = toolchain_data['wdk'] # TODO(scottmg): The order unfortunately matters in these. They should be # split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call # below). http://crbug.com/345992 vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs'] os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain os.environ['GYP_MSVS_VERSION'] = version # We need to make sure windows_sdk_path is set to the automated # toolchain values in GYP_DEFINES, but don't want to override any # otheroptions.express # values there. gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES')) gyp_defines_dict['windows_sdk_path'] = win8sdk os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v))) for k, v in gyp_defines_dict.iteritems()) os.environ['WINDOWSSDKDIR'] = win8sdk os.environ['WDK_DIR'] = wdk # Include the VS runtime in the PATH in case it's not machine-installed. runtime_path = ';'.join(vs2013_runtime_dll_dirs) os.environ['PATH'] = runtime_path + ';' + os.environ['PATH'] return vs2013_runtime_dll_dirs
def _get_gsutil_exe(): """Get the path to gsutil executable.""" config = Config(target_os=Config.OS_ANDROID, is_debug=False, is_official_build=True) paths = Paths(config) sys.path.insert(0, os.path.join(paths.src_root, "tools")) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") return gsutil_exe
def roll(target_version): find_depot_tools_path = os.path.join(mojo_root_dir, "tools") sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() try: chromium_rev = chromium_rev_number(target_version) except urllib2.HTTPError: print ("Failed to identify a Chromium revision associated with %s. " "Ensure that target_version is a Chromium origin/master " "commit.") % (target_version) return 1 mojoms_gs_path = "gs://mojo/network_service/%s/mojoms.zip" % (target_version,) network_service_path = os.path.join( mojo_root_dir, "mojo", "services", "network") mojoms_path = os.path.join(network_service_path, "public", "interfaces") mojo_public_tools_path = os.path.join( mojo_root_dir, "mojo", "public", "tools") version_path = os.path.join(mojo_public_tools_path, "NETWORK_SERVICE_VERSION") try: with tempfile.NamedTemporaryFile() as temp_zip_file: gs.download_from_public_bucket(mojoms_gs_path, temp_zip_file.name, depot_tools_path) try: system(["git", "rm", "-r", mojoms_path], cwd=mojo_root_dir) except subprocess.CalledProcessError: print ("Could not remove %s. " "Ensure your local tree is in a clean state." % mojoms_path) return 1 with zipfile.ZipFile(temp_zip_file.name) as z: z.extractall(mojoms_path) # pylint: disable=C0302,bare-except except: print ("Failed to download the mojom files associated with %s. Ensure that " "the corresponding network service artifacts were uploaded to " "Google Storage.") % (target_version) return 1 with open(version_path, 'w') as stamp_file: stamp_file.write(target_version) system(["git", "add", "public"], cwd=network_service_path) system(["git", "add", "NETWORK_SERVICE_VERSION"], cwd=mojo_public_tools_path) commit("Roll the network service to https://crrev.com/" + chromium_rev, cwd=mojo_root_dir) return 0
def upload(config, source, dest, dry_run): paths = Paths(config) sys.path.insert(0, os.path.join(paths.src_root, "tools")) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") if dry_run: print str([gsutil_exe, "cp", source, dest]) else: subprocess.check_call([gsutil_exe, "cp", source, dest])
def GetGsutilPath(): if not 'find_depot_tools' in sys.modules: sys.path.insert(0, os.path.join(CHROMIUM_DIR, 'build')) global find_depot_tools import find_depot_tools depot_path = find_depot_tools.add_depot_tools_to_path() if depot_path is None: print ('depot_tools are not found in PATH. ' 'Follow the instructions in this document ' 'http://dev.chromium.org/developers/how-tos/install-depot-tools' ' to install depot_tools and then try again.') sys.exit(1) gsutil_path = os.path.join(depot_path, 'gsutil.py') return gsutil_path
def upload(config, source, dest, dry_run, gzip=False): paths = Paths(config) sys.path.insert(0, os.path.join(paths.src_root, "tools")) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") command_line = [gsutil_exe, "cp"] if gzip and "." in source: extension = source.split(".")[-1] command_line.extend(["-z", extension]) command_line.extend([source, dest]) check_call(command_line, dry_run)
def AddDepotToolsToPath(): """Locates a depot_tools checkout and adds it to the Python import path. Returns: The path to depot_tools. """ # Try to import the local copy of find_depot_tools from the import subdir, in # case this is being run from a checkout that hasn't been synced. AddImportPath(os.path.join(_CURRENT_DIRECTORY, 'import')) # But prefer to use the copy synced into the build dir which might be more # up-to-date. (This path will take priority over the path added above.) if _InSourceTree(): AddImportPath(os.path.join(_ROOT_DIRECTORY, 'build')) # Import the first copy of find_depot_tools found in the above paths. import find_depot_tools return find_depot_tools.add_depot_tools_to_path()
def download_version_for_platform(version, platform, tools_directory): find_depot_tools_path = os.path.join(CURRENT_PATH, tools_directory) sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools basename = platform + ".zip" gs_path = "gs://mojo/shell/" + version + "/" + basename depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") with tempfile.NamedTemporaryFile() as temp_zip_file: # We're downloading from a public bucket which does not need authentication, # but the user might have busted credential files somewhere such as ~/.boto # that the gsutil script will try (and fail) to use. Setting these # environment variables convinces gsutil not to attempt to use these, but # also generates a useless warning about failing to load the file. We want # to discard this warning but still preserve all output in the case of an # actual failure. So, we run the script and capture all output and then # throw the output away if the script succeeds (return code 0). env = os.environ.copy() env["AWS_CREDENTIAL_FILE"] = "" env["BOTO_CONFIG"] = "" try: subprocess.check_output([ gsutil_exe, "--bypass_prodaccess", "cp", gs_path, temp_zip_file.name ], stderr=subprocess.STDOUT, env=env) except subprocess.CalledProcessError as e: print e.output sys.exit(1) binary_name = BINARY_FOR_PLATFORM[platform] with zipfile.ZipFile(temp_zip_file.name) as z: zi = z.getinfo(binary_name) mode = zi.external_attr >> 16 z.extract(zi, PREBUILT_FILE_PATH) os.chmod(os.path.join(PREBUILT_FILE_PATH, binary_name), mode)
def download_version_for_platform(version, platform, tools_directory): find_depot_tools_path = os.path.join(CURRENT_PATH, tools_directory) sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() basename = platform + ".zip" gs_path = "gs://mojo/shell/" + version + "/" + basename with tempfile.NamedTemporaryFile() as temp_zip_file: gs.download_from_public_bucket(gs_path, temp_zip_file.name, depot_tools_path) binary_name = BINARY_FOR_PLATFORM[platform] output_dir = os.path.join(PREBUILT_FILE_PATH, platform) with zipfile.ZipFile(temp_zip_file.name) as z: zi = z.getinfo(binary_name) mode = zi.external_attr >> 16 z.extract(zi, output_dir) os.chmod(os.path.join(output_dir, binary_name), mode)
def download_app_for_platform(app, version, platform, tools_directory): find_depot_tools_path = os.path.join(_CURRENT_PATH, tools_directory) sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() binary_name = app + ".mojo" gs_path = "gs://mojo/%s/%s/%s/%s.zip" % (app, version, platform, binary_name) output_directory = os.path.join(script_dir, "prebuilt/%s/%s" % (app, platform)) with tempfile.NamedTemporaryFile() as temp_zip_file: gs.download_from_public_bucket(gs_path, temp_zip_file.name, depot_tools_path) with zipfile.ZipFile(temp_zip_file.name) as z: zi = z.getinfo(binary_name) mode = zi.external_attr >> 16 z.extract(zi, output_directory) os.chmod(os.path.join(output_directory, binary_name), mode)
def Update(): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain: import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() subprocess.check_call(get_toolchain_args) return 0
def download_from_public_bucket(gs_path, output_path): depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") # We're downloading from a public bucket which does not need authentication, # but the user might have busted credential files somewhere such as ~/.boto # that the gsutil script will try (and fail) to use. Setting these # environment variables convinces gsutil not to attempt to use these, but # also generates a useless warning about failing to load the file. We want # to discard this warning but still preserve all output in the case of an # actual failure. So, we run the script and capture all output and then # throw the output away if the script succeeds (return code 0). env = os.environ.copy() env["AWS_CREDENTIAL_FILE"] = "" env["BOTO_CONFIG"] = "" try: subprocess.check_output( [gsutil_exe, "--bypass_prodaccess", "cp", gs_path, output_path], stderr=subprocess.STDOUT, env=env) except subprocess.CalledProcessError as e: print e.output sys.exit(1)
import argparse import collections import logging import os import re import subprocess import sys import time SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) sys.path.insert(0, os.path.join(SRC_DIR, 'build')) import find_depot_tools find_depot_tools.add_depot_tools_to_path() import rietveld import roll_dep_svn from gclient import GClientKeywords from third_party import upload # Avoid depot_tools/third_party/upload.py print verbose messages. upload.verbosity = 0 # Errors only. CHROMIUM_GIT_URL = 'https://chromium.googlesource.com/chromium/src.git' COMMIT_POSITION_RE = re.compile('^Cr-Original-Commit-Position: .*#([0-9]+).*$') CL_ISSUE_RE = re.compile('^Issue number: ([0-9]+) \((.*)\)$') RIETVELD_URL_RE = re.compile('^https?://(.*)/(.*)') ROLL_BRANCH_NAME = 'special_webrtc_roll_branch' TRYJOB_STATUS_SLEEP_SECONDS = 30
import os import shutil import subprocess import sys import tarfile import tempfile # Path constants. (All of these should be absolute paths.) THIS_DIR = os.path.abspath(os.path.dirname(__file__)) MOJO_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..')) INSTALL_DIR = os.path.join(MOJO_DIR, 'third_party', 'rust') sys.path.insert(0, os.path.join(MOJO_DIR, 'tools')) import find_depot_tools DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') def RunCommand(command, env=None): """Run command and return success (True) or failure.""" print 'Running %s' % (str(command)) if subprocess.call(command, shell=False, env=env) == 0: return True print 'Failed.' return False def HashFile(filename): """Computes SHA1 hash of a given file by chunking it to avoid loading
import shutil import subprocess import sys import tarfile import optparse # Path constants. (All of these should be absolute paths.) THIS_DIR = os.path.abspath(os.path.dirname(__file__)) MOJO_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..')) # Should be the same as in download.py. INSTALL_DIR = os.path.join(MOJO_DIR, 'third_party', 'android_tools') sys.path.insert(0, os.path.join(MOJO_DIR, 'tools')) import find_depot_tools DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') def RunCommand(command, env=None): """Run command and return success (True) or failure.""" print 'Running %s' % (str(command)) if subprocess.call(command, shell=False, env=env) == 0: return True print 'Failed.' return False def VersionStampName(tools_name): if sys.platform.startswith('linux'): return 'VERSION_LINUX_' + tools_name.upper() elif sys.platform == 'darwin':
def download(tools_directory): current_path = os.path.dirname(os.path.realpath(__file__)) find_depot_tools_path = os.path.join(current_path, tools_directory) sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools prebuilt_file_path = os.path.join(current_path, "prebuilt") stamp_path = os.path.join(prebuilt_file_path, "VERSION") depot_tools_path = find_depot_tools.add_depot_tools_to_path() gsutil_exe = os.path.join(depot_tools_path, "third_party", "gsutil", "gsutil") version_path = os.path.join(current_path, "../VERSION") with open(version_path) as version_file: version = version_file.read().strip() try: with open(stamp_path) as stamp_file: current_version = stamp_file.read().strip() if current_version == version: return 0 # Already have the right version. except IOError: pass # If the stamp file does not exist we need to download a new binary. platform = "linux-x64" # TODO: configurate basename = platform + ".zip" gs_path = "gs://mojo/shell/" + version + "/" + basename with tempfile.NamedTemporaryFile() as temp_zip_file: # We're downloading from a public bucket which does not need authentication, # but the user might have busted credential files somewhere such as ~/.boto # that the gsutil script will try (and fail) to use. Setting these # environment variables convinces gsutil not to attempt to use these, but # also generates a useless warning about failing to load the file. We want # to discard this warning but still preserve all output in the case of an # actual failure. So, we run the script and capture all output and then # throw the output away if the script succeeds (return code 0). env = os.environ.copy() env["AWS_CREDENTIAL_FILE"] = "" env["BOTO_CONFIG"] = "" try: subprocess.check_output([ gsutil_exe, "--bypass_prodaccess", "cp", gs_path, temp_zip_file.name ], stderr=subprocess.STDOUT, env=env) except subprocess.CalledProcessError as e: print e.output sys.exit(1) with zipfile.ZipFile(temp_zip_file.name) as z: zi = z.getinfo("mojo_shell") mode = zi.external_attr >> 16 z.extract(zi, prebuilt_file_path) os.chmod(os.path.join(prebuilt_file_path, "mojo_shell"), mode) with open(stamp_path, 'w') as stamp_file: stamp_file.write(version) return 0
# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Finds depot tools path and prints it with forward slashes""" import sys from find_depot_tools import add_depot_tools_to_path DEPOT_TOOLS_PATH = add_depot_tools_to_path() def main(): if DEPOT_TOOLS_PATH is None: return 1 print DEPOT_TOOLS_PATH.replace('\\', '/') return 0 if __name__ == '__main__': sys.exit(main())
def AddDepotToolsToPath(): sys.path.append(os.path.join(CHECKOUT_SRC_DIR, 'build')) import find_depot_tools find_depot_tools.add_depot_tools_to_path()
def main(): # Disabling garbage collection saves about 1 second out of 16 on a Linux # z620 workstation. Since this is a short-lived process it's not a problem to # leak a few cyclyc references in order to spare the CPU cycles for # scanning the heap. gc.disable() args = sys.argv[1:] use_analyzer = len(args) and args[0] == '--analyzer' if use_analyzer: args.pop(0) os.environ['GYP_GENERATORS'] = 'analyzer' args.append('-Gconfig_path=' + args.pop(0)) args.append('-Ganalyzer_output_path=' + args.pop(0)) if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)): print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.' sys.exit(0) # Use the Psyco JIT if available. if psyco: psyco.profile() print "Enabled Psyco JIT." # Fall back on hermetic python if we happen to get run under cygwin. # TODO(bradnelson): take this out once this issue is fixed: # http://code.google.com/p/gyp/issues/detail?id=177 if sys.platform == 'cygwin': import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() python_dir = sorted( glob.glob(os.path.join(depot_tools_path, 'python2*_bin')))[-1] env = os.environ.copy() env['PATH'] = python_dir + os.pathsep + env.get('PATH', '') cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv sys.exit(subprocess.call(cmd, env=env)) # This could give false positives since it doesn't actually do real option # parsing. Oh well. gyp_file_specified = any(arg.endswith('.gyp') for arg in args) gyp_environment.SetEnvironment() # If we didn't get a file, check an env var, and then fall back to # assuming 'all.gyp' from the same directory as the script. if not gyp_file_specified: gyp_file = os.environ.get('CHROMIUM_GYP_FILE') if gyp_file: # Note that CHROMIUM_GYP_FILE values can't have backslashes as # path separators even on Windows due to the use of shlex.split(). args.extend(shlex.split(gyp_file)) else: args.append(os.path.join(script_dir, 'all.gyp')) supplemental_includes = GetSupplementalFiles() gyp_vars_dict = GetGypVars(supplemental_includes) # There shouldn't be a circular dependency relationship between .gyp files, # but in Chromium's .gyp files, on non-Mac platforms, circular relationships # currently exist. The check for circular dependencies is currently # bypassed on other platforms, but is left enabled on iOS, where a violation # of the rule causes Xcode to misbehave badly. # TODO(mark): Find and kill remaining circular dependencies, and remove this # option. http://crbug.com/35878. # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the # list. if gyp_vars_dict.get('OS') != 'ios': args.append('--no-circular-check') # libtool on Mac warns about duplicate basenames in static libraries, so # they're disallowed in general by gyp. We are lax on this point, so disable # this check other than on Mac. GN does not use static libraries as heavily, # so over time this restriction will mostly go away anyway, even on Mac. # https://code.google.com/p/gyp/issues/detail?id=384 if sys.platform != 'darwin': args.append('--no-duplicate-basename-check') # We explicitly don't support the make gyp generator (crbug.com/348686). Be # nice and fail here, rather than choking in gyp. if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')): print 'Error: make gyp generator not supported (check GYP_GENERATORS).' sys.exit(1) # We explicitly don't support the native msvs gyp generator. Be nice and # fail here, rather than generating broken projects. if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')): print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).' print 'Did you mean to use the `msvs-ninja` generator?' sys.exit(1) # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check # to enfore syntax checking. syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK') if syntax_check and int(syntax_check): args.append('--check') # TODO(dmikurube): Remove these checks and messages after a while. if ('linux_use_tcmalloc' in gyp_vars_dict or 'android_use_tcmalloc' in gyp_vars_dict): print '*****************************************************************' print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!' print '-----------------------------------------------------------------' print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in' print 'your GYP_DEFINES. Please switch them into "use_allocator" now.' print 'See http://crbug.com/345554 for the details.' print '*****************************************************************' # Automatically turn on crosscompile support for platforms that need it. # (The Chrome OS build sets CC_host / CC_target which implicitly enables # this mode.) if all(('ninja' in os.environ.get('GYP_GENERATORS', ''), gyp_vars_dict.get('OS') in ['android', 'ios'], 'GYP_CROSSCOMPILE' not in os.environ)): os.environ['GYP_CROSSCOMPILE'] = '1' if gyp_vars_dict.get('OS') == 'android': args.append('--check') args.extend([ '-I' + i for i in additional_include_files(supplemental_includes, args) ]) args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()]) if not use_analyzer: print 'Updating projects from gyp files...' sys.stdout.flush() # Off we go... gyp_rc = gyp.main(args) if not use_analyzer: vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs( ) if vs2013_runtime_dll_dirs: x64_runtime, x86_runtime = vs2013_runtime_dll_dirs vs_toolchain.CopyVsRuntimeDlls( os.path.join(chrome_src, GetOutputDirectory()), (x86_runtime, x64_runtime)) sys.exit(gyp_rc)
def roll(target_version, custom_build): find_depot_tools_path = os.path.join(mojo_root_dir, "tools") sys.path.insert(0, find_depot_tools_path) # pylint: disable=F0401 import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() if custom_build: match = re.search( "^custom_build_base_([^_]+)_issue_([0-9]+)_patchset_([0-9]+)$", target_version) if not match: print "Failed to parse the version name." return 1 chromium_commit_hash = match.group(1) rietveld_issue = match.group(2) rietveld_patchset = match.group(3) else: chromium_commit_hash = target_version try: chromium_rev = chromium_rev_number(chromium_commit_hash) except urllib2.HTTPError: print ("Failed to identify a Chromium revision associated with %s. " "Ensure that it is a Chromium origin/master " "commit.") % (chromium_commit_hash) return 1 mojoms_gs_path = "gs://mojo/network_service/%s/mojoms.zip" % (target_version,) network_service_path = os.path.join( mojo_root_dir, "mojo", "services", "network") mojoms_path = os.path.join(network_service_path, "public", "interfaces") mojo_public_tools_path = os.path.join( mojo_root_dir, "mojo", "public", "tools") version_path = os.path.join(mojo_public_tools_path, "NETWORK_SERVICE_VERSION") try: with tempfile.NamedTemporaryFile() as temp_zip_file: gs.download_from_public_bucket(mojoms_gs_path, temp_zip_file.name, depot_tools_path) try: system(["git", "rm", "-r", mojoms_path], cwd=mojo_root_dir) except subprocess.CalledProcessError: print ("Could not remove %s. " "Ensure your local tree is in a clean state." % mojoms_path) return 1 with zipfile.ZipFile(temp_zip_file.name) as z: z.extractall(mojoms_path) # pylint: disable=C0302,bare-except except: print ("Failed to download the mojom files associated with %s. Ensure that " "the corresponding network service artifacts were uploaded to " "Google Storage.") % (target_version) return 1 with open(version_path, 'w') as stamp_file: stamp_file.write(target_version) system(["git", "add", "public"], cwd=network_service_path) system(["git", "add", "NETWORK_SERVICE_VERSION"], cwd=mojo_public_tools_path) if custom_build: commit_message = ("Roll the network service to a custom build created from " "https://crrev.com/%s/#ps%s") % (rietveld_issue, rietveld_patchset) else: commit_message = ("Roll the network service to " "https://crrev.com/%s") % chromium_rev commit(commit_message, cwd=mojo_root_dir) return 0
def main(): # Disabling garbage collection saves about 1 second out of 16 on a Linux # z620 workstation. Since this is a short-lived process it's not a problem to # leak a few cyclyc references in order to spare the CPU cycles for # scanning the heap. gc.disable() args = sys.argv[1:] use_analyzer = len(args) and args[0] == '--analyzer' if use_analyzer: args.pop(0) os.environ['GYP_GENERATORS'] = 'analyzer' args.append('-Gconfig_path=' + args.pop(0)) args.append('-Ganalyzer_output_path=' + args.pop(0)) if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)): print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.' sys.exit(0) # Use the Psyco JIT if available. if psyco: psyco.profile() print "Enabled Psyco JIT." # Fall back on hermetic python if we happen to get run under cygwin. # TODO(bradnelson): take this out once this issue is fixed: # http://code.google.com/p/gyp/issues/detail?id=177 if sys.platform == 'cygwin': import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() python_dir = sorted(glob.glob(os.path.join(depot_tools_path, 'python2*_bin')))[-1] env = os.environ.copy() env['PATH'] = python_dir + os.pathsep + env.get('PATH', '') cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv sys.exit(subprocess.call(cmd, env=env)) # This could give false positives since it doesn't actually do real option # parsing. Oh well. gyp_file_specified = any(arg.endswith('.gyp') for arg in args) gyp_environment.SetEnvironment() # If we didn't get a file, check an env var, and then fall back to # assuming 'all.gyp' from the same directory as the script. if not gyp_file_specified: gyp_file = os.environ.get('CHROMIUM_GYP_FILE') if gyp_file: # Note that CHROMIUM_GYP_FILE values can't have backslashes as # path separators even on Windows due to the use of shlex.split(). args.extend(shlex.split(gyp_file)) else: args.append(os.path.join(script_dir, 'all.gyp')) supplemental_includes = GetSupplementalFiles() gyp_vars_dict = GetGypVars(supplemental_includes) # There shouldn't be a circular dependency relationship between .gyp files, # but in Chromium's .gyp files, on non-Mac platforms, circular relationships # currently exist. The check for circular dependencies is currently # bypassed on other platforms, but is left enabled on iOS, where a violation # of the rule causes Xcode to misbehave badly. # TODO(mark): Find and kill remaining circular dependencies, and remove this # option. http://crbug.com/35878. # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the # list. if gyp_vars_dict.get('OS') != 'ios': args.append('--no-circular-check') # libtool on Mac warns about duplicate basenames in static libraries, so # they're disallowed in general by gyp. We are lax on this point, so disable # this check other than on Mac. GN does not use static libraries as heavily, # so over time this restriction will mostly go away anyway, even on Mac. # https://code.google.com/p/gyp/issues/detail?id=384 if sys.platform != 'darwin': args.append('--no-duplicate-basename-check') # We explicitly don't support the make gyp generator (crbug.com/348686). Be # nice and fail here, rather than choking in gyp. if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')): print 'Error: make gyp generator not supported (check GYP_GENERATORS).' sys.exit(1) # We explicitly don't support the native msvs gyp generator. Be nice and # fail here, rather than generating broken projects. if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')): print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).' print 'Did you mean to use the `msvs-ninja` generator?' sys.exit(1) # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check # to enfore syntax checking. syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK') if syntax_check and int(syntax_check): args.append('--check') # TODO(dmikurube): Remove these checks and messages after a while. if ('linux_use_tcmalloc' in gyp_vars_dict or 'android_use_tcmalloc' in gyp_vars_dict): print '*****************************************************************' print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!' print '-----------------------------------------------------------------' print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in' print 'your GYP_DEFINES. Please switch them into "use_allocator" now.' print 'See http://crbug.com/345554 for the details.' print '*****************************************************************' # Automatically turn on crosscompile support for platforms that need it. # (The Chrome OS build sets CC_host / CC_target which implicitly enables # this mode.) if all(('ninja' in os.environ.get('GYP_GENERATORS', ''), gyp_vars_dict.get('OS') in ['android', 'ios'], 'GYP_CROSSCOMPILE' not in os.environ)): os.environ['GYP_CROSSCOMPILE'] = '1' if gyp_vars_dict.get('OS') == 'android': args.append('--check') args.extend( ['-I' + i for i in additional_include_files(supplemental_includes, args)]) args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()]) if not use_analyzer: print 'Updating projects from gyp files...' sys.stdout.flush() # Off we go... gyp_rc = gyp.main(args) if not use_analyzer: vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() if vs2013_runtime_dll_dirs: x64_runtime, x86_runtime = vs2013_runtime_dll_dirs vs_toolchain.CopyVsRuntimeDlls( os.path.join(chrome_src, GetOutputDirectory()), (x86_runtime, x64_runtime)) sys.exit(gyp_rc)