def throttle(summary, max_result_size_KB, file_size_threshold_byte=DEFAULT_FILE_SIZE_THRESHOLD_BYTE, skip_autotest_log=False): """Throttle the files in summary by compressing file. Stop throttling until all files are processed or the result file size is already reduced to be under the given max_result_size_KB. @param summary: A ResultInfo object containing result summary. @param max_result_size_KB: Maximum test result size in KB. @param file_size_threshold_byte: Threshold of file size in byte for it to be qualified for compression. @param skip_autotest_log: True to skip shrink Autotest logs, default is False. """ file_infos, _ = throttler_lib.sort_result_files(summary) extra_patterns = ([throttler_lib.AUTOTEST_LOG_PATTERN] if skip_autotest_log else []) file_infos = throttler_lib.get_throttleable_files(file_infos, extra_patterns) file_infos = _get_zippable_files(file_infos, file_size_threshold_byte) for info in file_infos: _zip_file(info) if throttler_lib.check_throttle_limit(summary, max_result_size_KB): return
def throttle(summary, max_result_size_KB, file_size_limit_byte=DEFAULT_FILE_SIZE_LIMIT_BYTE, skip_autotest_log=False): """Throttle the files in summary by trimming file content. Stop throttling until all files are processed or the result file size is already reduced to be under the given max_result_size_KB. @param summary: A ResultInfo object containing result summary. @param max_result_size_KB: Maximum test result size in KB. @param file_size_limit_byte: Limit each file's size in the summary to be under the given threshold, until all files are processed or the result size is under the given max_result_size_KB. @param skip_autotest_log: True to skip shrink Autotest logs, default is False. """ file_infos, _ = throttler_lib.sort_result_files(summary) extra_patterns = ([throttler_lib.AUTOTEST_LOG_PATTERN] if skip_autotest_log else []) file_infos = throttler_lib.get_throttleable_files( file_infos, extra_patterns) file_infos = _get_shrinkable_files(file_infos, file_size_limit_byte) for info in file_infos: _trim_file(info, file_size_limit_byte) if throttler_lib.check_throttle_limit(summary, max_result_size_KB): return
def throttle(summary, max_result_size_KB, file_size_threshold_byte=DEFAULT_FILE_SIZE_THRESHOLD_BYTE, exclude_file_patterns=[]): """Throttle the files in summary by trimming file content. Stop throttling until all files are processed or the result size is already reduced to be under the given max_result_size_KB. @param summary: A ResultInfo object containing result summary. @param max_result_size_KB: Maximum test result size in KB. @param file_size_threshold_byte: Threshold of file size in byte for a file to be qualified for deletion. All qualified files will be deleted, until all files are processed or the result size is under the given max_result_size_KB. @param exclude_file_patterns: A list of regex pattern for files not to be throttled. Default is an empty list. """ file_infos, _ = throttler_lib.sort_result_files(summary) file_infos = throttler_lib.get_throttleable_files( file_infos, exclude_file_patterns + NON_DELETABLE_FILE_PATH_PATTERNS) for info in file_infos: if info.trimmed_size > file_size_threshold_byte: _delete_file(info) if throttler_lib.check_throttle_limit(summary, max_result_size_KB): return
def throttle(summary, max_result_size_KB): """Throttle the files in summary by de-duplicating files. Stop throttling until all files are processed or the result size is already reduced to be under the given max_result_size_KB. @param summary: A ResultInfo object containing result summary. @param max_result_size_KB: Maximum test result size in KB. """ _, grouped_files = throttler_lib.sort_result_files(summary) for pattern in throttler_lib.RESULT_THROTTLE_PRIORITY: throttable_files = list( throttler_lib.get_throttleable_files(grouped_files[pattern], NO_DEDUPE_FILE_PATTERNS)) for info in throttable_files: info.parent_dir = os.path.dirname(info.path) info.prefix = re.match(PREFIX_PATTERN, info.name).group(1) # Group files for each parent directory grouped_infos = _group_by(throttable_files, ['parent_dir', 'prefix']) for infos in grouped_infos.values(): if (len(infos) <= OLDEST_FILES_TO_KEEP_COUNT + NEWEST_FILES_TO_KEEP_COUNT): # No need to dedupe if the count of file is too few. continue # Remove files can be deduped utils_lib.LOG('De-duplicating files in %s with the same prefix of ' '"%s"' % (infos[0].parent_dir, infos[0].prefix)) #dedupe_file_infos = [i.result_info for i in infos] _dedupe_files(summary, infos, max_result_size_KB) if throttler_lib.check_throttle_limit(summary, max_result_size_KB): return