def setup_asan_if_needed(): """Sets the asan.options device property.""" if not environment.get_value('ASAN_DEVICE_SETUP'): # Only do this step if explicitly enabled in the job type. This cannot be # determined from libraries in application directory since they can go # missing in a bad build, so we want to catch that. return if get_sanitizer_tool_name(): # If this is a sanitizer build, no need to setup ASAN (incompatible). return app_directory = environment.get_value('APP_DIR') if not app_directory: # No app directory -> No ASAN runtime library. No work to do, bail out. return # Initialize variables. android_directory = environment.get_platform_resources_directory() asan_rt_arch32_lib = ASAN_RT_LIB.format(arch=ARCH32_ID) asan_rt_arch64_lib = ASAN_RT_LIB.format(arch=ARCH64_ID) cpu_arch = get_cpu_arch() device_id = environment.get_value('ANDROID_SERIAL') file_list = os.listdir(app_directory) # Hack for missing arm64 lib in older builds. if (cpu_arch.startswith('arm64') and asan_rt_arch32_lib in file_list and asan_rt_arch64_lib not in file_list): # Copy arm64 library from local copy. source_asan_rt_arch64_lib = os.path.join(android_directory, asan_rt_arch64_lib) dest_asan_rt_arch64_lib = os.path.join(app_directory, asan_rt_arch64_lib) shell.copy_file(source_asan_rt_arch64_lib, dest_asan_rt_arch64_lib) # Execute the script. logs.log('Executing ASan device setup script.') asan_device_setup_script_path = os.path.join(android_directory, 'third_party', 'asan_device_setup.sh') asan_runtime_library_argument = '--lib %s' % app_directory device_argument = '--device %s' % device_id asan_options_file_path = get_sanitizer_options_file_path('ASAN') extra_asan_options = ('--extra-options include_if_exists=%s' % asan_options_file_path) command = '%s %s %s %s' % (asan_device_setup_script_path, device_argument, asan_runtime_library_argument, extra_asan_options) adb.execute_command(command, timeout=ASAN_SCRIPT_TIMEOUT)
def mock_get_corpus(self, corpus_directory, _): """ Copy over training corpus to temp dir. """ train_files = glob.glob(os.path.join(self.corpus_dir, '*')) for train_file in train_files: target_path = os.path.join(corpus_directory, os.path.basename(train_file)) self.assertTrue(shell.copy_file(train_file, target_path)) return True
def recreate_gce_device(): """Recreate gce device, restoring from backup images.""" logs.log('Reimaging gce device.') cvd_dir = environment.get_value('CVD_DIR') assert cvd_dir, 'CVD_DIR needs to be set in environment.' cvd_bin_dir = os.path.join(cvd_dir, 'bin') launch_cvd_path = os.path.join(cvd_bin_dir, 'launch_cvd') stop_cvd_path = os.path.join(cvd_bin_dir, 'stop_cvd') execute_command(stop_cvd_path, timeout=RECOVERY_CMD_TIMEOUT) image_dir = cvd_dir backup_image_dir = os.path.join(cvd_dir, 'backup') for image_filename in os.listdir(backup_image_dir): image_src = os.path.join(backup_image_dir, image_filename) image_dest = os.path.join(image_dir, image_filename) shell.copy_file(image_src, image_dest) execute_command(launch_cvd_path + ' -daemon')
def recreate_gce_device(): """Recreate gce device, restoring from backup images.""" logs.log('Reimaging gce device.') cvd_dir = environment.get_value('CVD_DIR') stop_gce_device() # Delete all existing images. image_dir = cvd_dir for image_file_path in glob.glob(os.path.join(image_dir, '*.img')): shell.remove_file(image_file_path) # Restore images from backup. backup_image_dir = os.path.join(cvd_dir, 'backup') for image_filename in os.listdir(backup_image_dir): image_src = os.path.join(backup_image_dir, image_filename) image_dest = os.path.join(image_dir, image_filename) shell.copy_file(image_src, image_dest) start_gce_device()
def copy_file_to(self, local_path_or_handle, remote_path, metadata=None): """Copy file from a local path to a remote path.""" fs_path = self.convert_path_for_write(remote_path) if isinstance(local_path_or_handle, basestring): if not shell.copy_file(local_path_or_handle, fs_path): return False else: with open(fs_path, 'wb') as f: shutil.copyfileobj(local_path_or_handle, f) self._write_metadata(remote_path, metadata) return True
def get_file_from_cache_if_exists(file_path, update_modification_time_on_access=True): """Get file from nfs cache if available.""" cache_file_path = get_cache_file_path(file_path) if not cache_file_path or not file_exists_in_cache(cache_file_path): # If the file does not exist in cache, bail out. return False # Fetch cache file size before starting the actual copy. cache_file_size = get_cache_file_size_from_metadata(cache_file_path) # Copy file from cache to local. if not shell.copy_file(cache_file_path, file_path): return False # Update timestamp to later help with eviction of old files. if update_modification_time_on_access: update_access_and_modification_timestamp(cache_file_path) # Return success or failure based on existence of local file and size # comparison. return (os.path.exists(file_path) and os.path.getsize(file_path) == cache_file_size)
def store_file_in_cache(file_path, cached_files_per_directory_limit=True, force_update=False): """Get file from nfs cache if available.""" if not os.path.exists(file_path): logs.log_error( 'Local file %s does not exist, nothing to store in cache.' % file_path) return if os.path.getsize(file_path) > CACHE_SIZE_LIMIT: logs.log('File %s is too large to store in cache, skipping.' % file_path) return nfs_root = environment.get_value('NFS_ROOT') if not nfs_root: # No NFS, nothing to store in cache. return # If NFS server is not available due to heavy load, skip storage operation # altogether as we would fail to store file. if not os.path.exists(os.path.join(nfs_root, '.')): # Use . to iterate mount. logs.log_warn('Cache %s not available.' % nfs_root) return cache_file_path = get_cache_file_path(file_path) cache_directory = os.path.dirname(cache_file_path) filename = os.path.basename(file_path) if not os.path.exists(cache_directory): if not shell.create_directory(cache_directory, create_intermediates=True): logs.log_error('Failed to create cache directory %s.' % cache_directory) return # Check if the file already exists in cache. if file_exists_in_cache(cache_file_path): if not force_update: return # If we are forcing update, we need to remove current cached file and its # metadata. remove_cache_file_and_metadata(cache_file_path) # Delete old cached files beyond our maximum storage limit. if cached_files_per_directory_limit: # Get a list of cached files. cached_files_list = [] for cached_filename in os.listdir(cache_directory): if cached_filename.endswith(CACHE_METADATA_FILE_EXTENSION): continue cached_file_path = os.path.join(cache_directory, cached_filename) cached_files_list.append(cached_file_path) mtime = lambda f: os.stat(f).st_mtime last_used_cached_files_list = list( sorted(cached_files_list, key=mtime, reverse=True)) for cached_file_path in ( last_used_cached_files_list[MAX_CACHED_FILES_PER_DIRECTORY - 1:]): remove_cache_file_and_metadata(cached_file_path) # Start storing the actual file in cache now. logs.log('Started storing file %s into cache.' % filename) # Fetch lock to store this file. Try only once since if any other bot has # started to store it, we don't need to do it ourselves. Just bail out. lock_name = 'store:cache_file:%s' % utils.string_hash(cache_file_path) if not locks.acquire_lock( lock_name, max_hold_seconds=CACHE_LOCK_TIMEOUT, retries=1, by_zone=True): logs.log_warn( 'Unable to fetch lock to update cache file %s, skipping.' % filename) return # Check if another bot already updated it. if file_exists_in_cache(cache_file_path): locks.release_lock(lock_name, by_zone=True) return shell.copy_file(file_path, cache_file_path) write_cache_file_metadata(cache_file_path, file_path) time.sleep(CACHE_COPY_WAIT_TIME) error_occurred = not file_exists_in_cache(cache_file_path) locks.release_lock(lock_name, by_zone=True) if error_occurred: logs.log_error('Failed to store file %s into cache.' % filename) else: logs.log('Completed storing file %s into cache.' % filename)
def copy_blob(self, remote_source, remote_target): """Copy a remote file to another remote location.""" fs_source_path = self.convert_path(remote_source) fs_target_path = self.convert_path_for_write(remote_target) return shell.copy_file(fs_source_path, fs_target_path)
def copy_file_from(self, remote_path, local_path): """Copy file from a remote path to a local path.""" fs_path = self.convert_path(remote_path) return shell.copy_file(fs_path, local_path)
def _setup_application_path(self, build_dir=None, app_path='APP_PATH', build_update=False): """Sets up APP_PATH environment variables for revision build.""" logs.log('Setup application path.') if not build_dir: build_dir = self.build_dir # Make sure to initialize so that we don't carry stale values # in case of errors. app_path can be APP_PATH or APP_PATH_DEBUG. environment.set_value(app_path, '') environment.set_value('APP_DIR', '') environment.set_value('BUILD_DIR', build_dir) environment.set_value('GN_ARGS_PATH', '') environment.set_value('LLVM_SYMBOLIZER_PATH', environment.get_default_tool_path('llvm-symbolizer')) # Initialize variables. fuzzer_directory = environment.get_value('FUZZER_DIR') search_directories = [build_dir] if fuzzer_directory: search_directories.append(fuzzer_directory) set_environment_vars(search_directories, app_path=app_path) absolute_file_path = environment.get_value(app_path) app_directory = environment.get_value('APP_DIR') if not absolute_file_path: return # Set the symlink if needed. symbolic_link_target = environment.get_value('SYMBOLIC_LINK') if symbolic_link_target: os.system('mkdir --parents %s' % os.path.dirname(symbolic_link_target)) os.system('rm %s' % symbolic_link_target) os.system('ln -s %s %s' % (app_directory, symbolic_link_target)) # Android specific initialization. if environment.platform() == 'ANDROID': # Prepare device for app install. android.device.initialize_device() # On Android, we may need to write a command line file. We do this in # advance so that we do not have to write this to the device multiple # times. # TODO(mbarbella): Build code should not depend on fuzzing. from fuzzing import tests tests.get_command_line_for_application(write_command_line_file=True) # Install the app if it does not exist. android.device.install_application_if_needed(absolute_file_path, build_update) return if not build_update: return # The following hacks are only applicable in Chromium. if utils.is_chromium(): return # Chromium specific workaround for missing ICU data file in root directory. # Copy it from relative folders. See crbug.com/741603. root_icu_data_file_path = os.path.join(app_directory, ICU_DATA_FILENAME) find_icu_data_file_path = utils.find_binary_path(app_directory, ICU_DATA_FILENAME) if find_icu_data_file_path and not os.path.exists(root_icu_data_file_path): shell.copy_file(find_icu_data_file_path, root_icu_data_file_path)