def _fetch_and_cache_package(self): """Downloads an update file to a temp directory, and manages replacing the final directory with the stage directory contents.""" try: # Clean out the cache unpack location. logging.info('%s: Cleaning %s', self._name, self._unpacked_cache_path) file_util.rmtree(self._unpacked_cache_path, ignore_errors=True) file_util.makedirs_safely(self._unpacked_cache_path) # Setup the temporary location for the download. tmp_dir = tempfile.mkdtemp() try: downloaded_package_path = os.path.join(tmp_dir, self._name) # Download the package. logging.info('%s: Downloading %s', self._name, downloaded_package_path) self._download_package_with_retries(self._url, downloaded_package_path) # Unpack it. logging.info('%s: Unpacking %s to %s', self._name, downloaded_package_path, self._unpacked_cache_path) self._unpack_method(downloaded_package_path, self._unpacked_cache_path) finally: file_util.rmtree(tmp_dir, ignore_errors=True) except: file_util.rmtree(self._unpacked_cache_path, ignore_errors=True) raise
def _configure_build_options(): if OPTIONS.parse(sys.argv[1:]): print 'Args error' return False # Write out the configure file early so all other scripts can use # the options passed into configure. (e.g., sync_chrome). OPTIONS.write_configure_file() # Target directory is replaced. If an old directory, out/target/<target>, # exists, move it to the new place, out/target/<target>_<opt>. old_path = os.path.join('out/target', OPTIONS.target()) new_path = build_common.get_build_dir() if os.path.lexists(old_path): if os.path.isdir(old_path) and not os.path.islink(old_path): if os.path.exists(new_path): file_util.rmtree(old_path) else: shutil.move(old_path, new_path) else: os.remove(old_path) # Create an empty directory as a placeholder if necessary. file_util.makedirs_safely(new_path) # Create a symlink from new place to old place to keep as compatible as # possible. os.symlink(os.path.basename(new_path), old_path) # Write out the configure file to a target specific location, which can be # queried later to find out what the config for a target was. OPTIONS.write_configure_file(build_common.get_target_configure_options_file()) OPTIONS.set_up_goma() return True
def _persisted_cache_history(name, base_path, history_size): """Persists the cache history using a context.""" # Ensure we have a cache directory file_util.makedirs_safely(base_path) cache_contents_path = os.path.join(base_path, 'contents.json') # Load in the existing cache content history. cache_contents = {} if os.path.exists(cache_contents_path): with open(cache_contents_path) as cache_contents_file: try: cache_contents = json.load(cache_contents_file) except ValueError: pass # Get the history for this particular download, and yield it for use by the # caller. history = CacheHistory( name, base_path, history_size, cache_contents.setdefault('cache', {}).setdefault(name, [])) # If the user of this contextmanager generates an exception, this yield # will effectively reraise the exception, and the rest of this function will # not be executed since we do not have anything like a try...finally block # here. yield history history.clean_old() # Save out the modified cache content history. with open(cache_contents_path, 'w') as cache_contents_file: json.dump(cache_contents, cache_contents_file, indent=2, sort_keys=True)
def _extract_symbols_from_one_binary(binary): # If the marker is already written, we should already have the # extracted symbols. marker_path = _get_symbol_marker(binary) if os.path.exists(marker_path): logging.info('Skip extracting symbols from: %s' % binary) return logging.info('Extracting symbols from: %s' % binary) dump_syms_tool = build_common.get_build_path_for_executable( 'dump_syms', is_host=True) p = concurrent_subprocess.Popen([dump_syms_tool, binary]) my_filter = _DumpSymsFilter() p.handle_output(my_filter) syms = ''.join(my_filter.stdout_result) # The first line should look like: # MODULE Linux arm 0222CE01F27D6870B1FA991F84B9E0460 libc.so symhash = syms.splitlines()[0].split()[3] base = os.path.basename(binary) sympath = os.path.join(_SYMBOL_OUT_DIR, base, symhash, base + '.sym') file_util.makedirs_safely(os.path.dirname(sympath)) with open(sympath, 'w') as f: f.write(syms) # Create the marker directory so we will not need to extract symbols # in the next time. file_util.makedirs_safely(marker_path)
def make_download_args_list(builders_info, outdir, number_of_logs): download_args_list = [] for builder, build_number in builders_info.iteritems(): logs_dir = os.path.join(outdir, builder) file_util.makedirs_safely(logs_dir) build_range = range(max(build_number - number_of_logs + 1, 0), build_number + 1) download_args_list += [(builder, build_number, logs_dir) for build_number in build_range] return download_args_list
def populate_final_directory(self): """Sets up the final location for the download from the cache.""" logging.info('%s: Setting up %s from cache %s', self._name, self._unpacked_final_path, self.unpacked_linked_cache_path) file_util.makedirs_safely(self._unpacked_final_path) # We create a directory, and make symbolic links for the first level # of contents for backwards compatibility with an older version of # this code, which could only handle FINAL_DIR being a directory. for child in os.listdir(self.unpacked_linked_cache_path): file_util.create_link( os.path.join(self._unpacked_final_path, child), os.path.join(self.unpacked_linked_cache_path, child), overwrite=True)
def _run_chrome(parsed_args, **kwargs): if parsed_args.logcat is not None: # adb process will be terminated in the atexit handler, registered # in the signal_util.setup(). subprocess.Popen( [toolchain.get_tool('host', 'adb'), 'logcat'] + parsed_args.logcat) params = _compute_chrome_params(parsed_args) gdb_util.create_or_remove_bare_metal_gdb_lock_dir(parsed_args.gdb) # Similar to adb subprocess, using atexit has timing issue. See above comment # for the details. chrome_timeout = _select_chrome_timeout(parsed_args) for i in xrange(parsed_args.chrome_flakiness_retry + 1): if i > 0: logging.error('Chrome is flaky. Retrying...: %d', i) p = chrome_process.ChromeProcess(params, timeout=chrome_timeout) atexit.register(_terminate_chrome, p) gdb_util.maybe_launch_gdb(parsed_args.gdb, parsed_args.gdb_type, p.pid) jdb_util.maybe_launch_jdb(parsed_args.jdb_port, parsed_args.jdb_type) # Write the PID to a file, so that other launch_chrome process sharing the # same user data can find the process. In common case, the file will be # removed by _terminate_chrome() defined above. file_util.makedirs_safely(_USER_DATA_DIR) with open(_CHROME_PID_PATH, 'w') as pid_file: pid_file.write('%d\n' % p.pid) stats = startup_stats.StartupStats() handler = _select_output_handler(parsed_args, stats, p, **kwargs) # Wait for the process to finish or us to be interrupted. try: returncode = p.handle_output(handler) except output_handler.ChromeFlakinessError: # Chrome is terminated due to its flakiness. Retry. continue if returncode: sys.exit(returncode) return stats # Here, the Chrome flakiness failure has continued too many times. # Terminate the script. logging.error('Chrome is too flaky so that it hits retry limit.') sys.exit(1)
def main(): stamp_content = "\n".join("%s %s" % (checksum, url) for url, checksum in _GDB_PACKAGES) + "\n" stamp = build_common.StampFile(stamp_content, posixpath.join(build_common.get_gdb_multiarch_dir(), "STAMP")) if stamp.is_up_to_date(): return 0 print "Need to download gdb-multiarch" file_util.makedirs_safely(build_common.get_gdb_multiarch_dir()) with _change_directory(build_common.get_gdb_multiarch_dir()): for url, checksum in _GDB_PACKAGES: deb_filename = posixpath.basename(url) _download_and_verify_package(url, checksum, deb_filename) _extract_package(deb_filename) stamp.update() return 0
def populate_final_directory(self): """Sets up the final location for the download from the cache.""" logging.info('%s: Setting up %s from cache %s', self._name, self._unpacked_final_path, self.unpacked_linked_cache_path) file_util.makedirs_safely(self._unpacked_final_path) # We create a directory, and make symbolic links for the first level # of contents for backwards compatibility with an older version of # this code, which could only handle FINAL_DIR being a directory. for child in os.listdir(self.unpacked_linked_cache_path): file_util.create_link(os.path.join(self._unpacked_final_path, child), os.path.join(self.unpacked_linked_cache_path, child), overwrite=True)
def main(): stamp_content = ('\n'.join( '%s %s' % (checksum, url) for url, checksum in _GDB_PACKAGES) + '\n') stamp = build_common.StampFile( stamp_content, posixpath.join(build_common.get_gdb_multiarch_dir(), 'STAMP')) if stamp.is_up_to_date(): return 0 print 'Need to download gdb-multiarch' file_util.makedirs_safely(build_common.get_gdb_multiarch_dir()) with _change_directory(build_common.get_gdb_multiarch_dir()): for url, checksum in _GDB_PACKAGES: deb_filename = posixpath.basename(url) _download_and_verify_package(url, checksum, deb_filename) _extract_package(deb_filename) stamp.update() return 0
def _create_symlink_tree(mods_root, third_party_root, staging_root): """Creates a symlink tree of mods_root overlaid on third_party_root. This method creates the symlink tree of mods_root directory (working as same as recursive copy, but all files are symlinked instead of actual file copy). If third_party_root is given, each created directory is overlaid on the corresponding directory in third_party_root (if exists). For example: Suppose mods_root is "mods/", third_party_root is "third_party/" and staging_root is "out/staging/", then the symlink tree of mods/android/... will be created at out/staging/android/..., with overlaying third_party/android/... """ staging_root_parent = os.path.dirname(staging_root) file_util.makedirs_safely(staging_root_parent) if os.path.exists('mods/chromium-ppapi/base'): # See comments in _create_overlay_base. raise Exception('Putting headers in mods/chromium-ppapi/base will ' 'cause code in chromium_org libbase implementation to ' 'include headers from chromium-ppapi libbase and will ' 'result in compilation errors or worse.') for dirpath, dirs, fnames in os.walk(mods_root): # Do not track .git directory. if _GIT_DIR in dirs: dirs.remove(_GIT_DIR) relpath = os.path.relpath(dirpath, mods_root) dest_dir = os.path.normpath(os.path.join(staging_root, relpath)) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # Create symlinks for files. for name in fnames: _create_symlink(os.path.join(dirpath, name), dest_dir) if third_party_root: _create_overlay_base(os.path.join(third_party_root, relpath), dirs + fnames, dest_dir)
def _create_symlink_tree(mods_root, third_party_root, staging_root): """Creates a symlink tree of mods_root overlaid on third_party_root. This method creates the symlink tree of mods_root directory (working as same as recursive copy, but all files are symlinked instead of actual file copy). If third_party_root is given, each created directory is overlaid on the corresponding directory in third_party_root (if exists). For example: Suppose mods_root is "mods/", third_party_root is "third_party/" and staging_root is "out/staging/", then the symlink tree of mods/android/... will be created at out/staging/android/..., with overlaying third_party/android/... """ staging_root_parent = os.path.dirname(staging_root) file_util.makedirs_safely(staging_root_parent) if os.path.exists('mods/chromium-ppapi/base'): # See comments in _create_overlay_base. raise Exception('Putting headers in mods/chromium-ppapi/base will ' 'cause code in chromium_org libbase implementation to ' 'include headers from chromium-ppapi libbase and will ' 'result in compilation errors or worse.') for dirpath, dirs, fnames in os.walk(mods_root): # Do not track .git directory. if _GIT_DIR in dirs: dirs.remove(_GIT_DIR) relpath = os.path.relpath(dirpath, mods_root) dest_dir = os.path.normpath(os.path.join(staging_root, relpath)) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # Create symlinks for files. for name in fnames: _create_symlink(os.path.join(dirpath, name), dest_dir) if third_party_root: _create_overlay_base( os.path.join(third_party_root, relpath), dirs + fnames, dest_dir)
def _write_args_if_needed(self, args): """Writes args to a file if it is too long and returns a new args.""" # Do not rewrite args of the commands other than launch_chrome because # the commands do not necessarily support the syntax of reading arguments # from a file. if not launch_chrome_util.is_launch_chrome_command(args): return args remaining_args = launch_chrome_util.remove_leading_launch_chrome_args(args) args_string = '\n'.join(remaining_args) # Do not rewrite args to file if the argument list is short enough. if len(args_string) < SuiteRunnerBase.WRITE_ARGS_MIN_LENGTH: return args args_dir = os.path.join(build_common.get_build_dir(), 'integration_tests') file_util.makedirs_safely(args_dir) args_file = os.path.join(args_dir, self._name + '_args') with open(args_file, 'w') as f: f.write(args_string) return args[:-len(remaining_args)] + ['@' + args_file]
def check_and_perform_update(self): with open(self._deps_file_path) as f: deps_file_contents = f.read() stamp_file_path = os.path.join(self._unpacked_final_path, 'STAMP') stamp_file = build_common.StampFile(deps_file_contents, stamp_file_path) if stamp_file.is_up_to_date(): # Nothing has changed. return logging.info('%s: Updating npm package.', self._name) file_util.makedirs_safely(self._unpacked_final_path) package_json_file_path = os.path.join(self._unpacked_final_path, 'package.json') shutil.copy(self._deps_file_path, package_json_file_path) # The Ubuntu npm package uses ~/tmp as its temporary directory # (https://github.com/npm/npm/issues/2936). On the buildbots we do not # have write access to this dir. Set a different dir to use here. npm_env = os.environ.copy() npm_env['TMP'] = '/tmp' # npm installs packages listed in the package.json file of the CWD to a # directory named 'node_modules' in the CWD. npm_command = [ 'npm', 'install', ] try: subprocess.check_output(npm_command, stderr=subprocess.STDOUT, cwd=self._unpacked_final_path, env=npm_env) except subprocess.CalledProcessError as e: print 'Output of failed npm install:' print e.output raise e stamp_file.update()
def _write_args_if_needed(self, args): """Writes args to a file if it is too long and returns a new args.""" # Do not rewrite args of the commands other than launch_chrome because # the commands do not necessarily support the syntax of reading arguments # from a file. if not launch_chrome_util.is_launch_chrome_command(args): return args remaining_args = launch_chrome_util.remove_leading_launch_chrome_args( args) args_string = '\n'.join(remaining_args) # Do not rewrite args to file if the argument list is short enough. if len(args_string) < SuiteRunnerBase.WRITE_ARGS_MIN_LENGTH: return args args_dir = os.path.join(build_common.get_build_dir(), 'integration_tests') file_util.makedirs_safely(args_dir) args_file = os.path.join(args_dir, self._name + '_args') with open(args_file, 'w') as f: f.write(args_string) return args[:-len(remaining_args)] + ['@' + args_file]
def _populate_cache_from_non_symlinked_files(self, history): final_url_path = os.path.join(self._unpacked_final_path, 'URL') # See if there is an existing URL file if not os.path.isfile(final_url_path): return # Read the content of the URL file in the subdirectory to figure out # how to move it into the cache (the DEPS hash may not match!) url_file_content = file_util.read_metadata_file(final_url_path) cache_path = self._get_cache_entry_path(url_file_content) cache_link = os.path.abspath(os.path.join(cache_path, self._link_subdir)) # Ensure that this cache path is in our history as the most recent entry. history.ensure_recent(cache_path) # If there appears to be something already cached, then we do not need to do # anything. if os.path.isdir(cache_path): return # Move the existing unpacked download into the cache directory file_util.makedirs_safely(os.path.dirname(cache_link)) os.rename(self._unpacked_final_path, cache_link)
def _populate_cache_from_non_symlinked_files(self, history): final_url_path = os.path.join(self._unpacked_final_path, 'URL') # See if there is an existing URL file if not os.path.isfile(final_url_path): return # Read the content of the URL file in the subdirectory to figure out # how to move it into the cache (the DEPS hash may not match!) url_file_content = file_util.read_metadata_file(final_url_path) cache_path = self._get_cache_entry_path(url_file_content) cache_link = os.path.abspath( os.path.join(cache_path, self._link_subdir)) # Ensure that this cache path is in our history as the most recent entry. history.ensure_recent(cache_path) # If there appears to be something already cached, then we do not need to do # anything. if os.path.isdir(cache_path): return # Move the existing unpacked download into the cache directory file_util.makedirs_safely(os.path.dirname(cache_link)) os.rename(self._unpacked_final_path, cache_link)
def _configure_build_options(): if OPTIONS.parse(sys.argv[1:]): print 'Args error' return False # Write out the configure file early so all other scripts can use # the options passed into configure. (e.g., sync_chrome). OPTIONS.write_configure_file() # Target directory is replaced. If an old directory, out/target/<target>, # exists, move it to the new place, out/target/<target>_<opt>. old_path = os.path.join('out/target', OPTIONS.target()) new_path = build_common.get_build_dir() if os.path.lexists(old_path): if os.path.isdir(old_path) and not os.path.islink(old_path): if os.path.exists(new_path): file_util.rmtree(old_path) else: shutil.move(old_path, new_path) else: os.remove(old_path) # Create an empty directory as a placeholder if necessary. file_util.makedirs_safely(new_path) # Create a symlink from new place to old place to keep as compatible as # possible. os.symlink(os.path.basename(new_path), old_path) # Write out the configure file to a target specific location, which can be # queried later to find out what the config for a target was. OPTIONS.write_configure_file( build_common.get_target_configure_options_file()) OPTIONS.set_up_goma() return True
def setup_output_directory(output_dir): """Creates a directory to put all test log files.""" if os.path.exists(output_dir): file_util.rmtree(output_dir) file_util.makedirs_safely(output_dir)
def get_remote_binaries_dir(): """Gets a directory for storing remote binaries like nacl_helper.""" path = os.path.join(_get_temp_dir(), _TEMP_REMOTE_BINARIES_DIR) file_util.makedirs_safely(path) return path
def setup_work_root(): file_util.makedirs_safely(ArtTestRunner.get_work_root())
def create_or_remove_bare_metal_gdb_lock_dir(gdb_target_list): file_util.rmtree(_BARE_METAL_GDB_LOCK_DIR, ignore_errors=True) if 'plugin' in gdb_target_list and OPTIONS.is_bare_metal_build(): file_util.makedirs_safely(_BARE_METAL_GDB_LOCK_DIR)
def _save_dict_to_file(dict, path): file_util.makedirs_safely(os.path.dirname(path)) file_util.generate_file_atomically(path, lambda f: marshal.dump(dict, f))