def download_live_logs(args): tasks = tasks_to_check(args) async_requests = [] zipped_files = [] all_logs = [] sys.stderr.write(colored('Finding current live log files', 'cyan') + '\n') for task in tasks: metadata = files_json(args, task) if 'slaveHostname' in metadata: uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname']) if args.verbose: sys.stderr.write(colored('Finding logs in base directory on {0}'.format(metadata['slaveHostname']), 'magenta') + '\n') for log_file in base_directory_files(args, task, metadata): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose), params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) if logfile_name.endswith('.gz'): zipped_files.append('{0}/{1}'.format(args.dest, logfile_name)) else: all_logs.append('{0}/{1}'.format(args.dest, logfile_name.replace('.gz', '.log'))) elif args.logtype and args.verbose: sys.stderr.write(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n') if args.verbose: sys.stderr.write(colored('Finding logs in logs directory on {0}'.format(metadata['slaveHostname']), 'magenta') + '\n') for log_file in logs_folder_files(args, task): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose), params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) if logfile_name.endswith('.gz'): zipped_files.append('{0}/{1}'.format(args.dest, logfile_name)) else: all_logs.append('{0}/{1}'.format(args.dest, logfile_name.replace('.gz', '.log'))) elif args.logtype and args.verbose: sys.stderr.write(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n') if async_requests: sys.stderr.write(colored('Starting live logs downloads\n', 'cyan')) grequests.map(async_requests, stream=True, size=args.num_parallel_fetches) if zipped_files: sys.stderr.write(colored('\nUnpacking logs\n', 'cyan')) all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files) return all_logs
def download_live_logs(args): logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False) tasks = tasks_to_check(args) async_requests = [] all_logs = [] callbacks.progress = 0 tasks_check_progress = 0 tasks_check_goal = len(tasks) for task in tasks: metadata = files_json(args, task) if 'slaveHostname' in metadata: uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname']) for log_file in base_directory_files(args, task, metadata): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent), params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) all_logs.append('{0}/{1}'.format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True) for log_file in logs_folder_files(args, task): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent), params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) all_logs.append('{0}/{1}'.format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True) tasks_check_progress += 1 logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent or args.verbose) if async_requests: logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False) callbacks.goal = len(async_requests) grequests.map(async_requests, stream=True, size=args.num_parallel_fetches) return all_logs
def download_live_logs(args): logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False) tasks = tasks_to_check(args) async_requests = [] all_logs = [] callbacks.progress = 0 tasks_check_progress = 0 tasks_check_goal = len(tasks) for task in tasks: metadata = files_json(args, task) if 'slaveHostname' in metadata: uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname']) for log_file in base_directory_files(args, task, metadata): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent), params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) all_logs.append('{0}/{1}'.format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True) for log_file in logs_folder_files(args, task): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest('GET',uri , callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent), params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}, headers=args.headers ) ) all_logs.append('{0}/{1}'.format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True) tasks_check_progress += 1 logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent) if async_requests: logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False) callbacks.goal = len(async_requests) grequests.map(async_requests, stream=True, size=args.num_parallel_fetches) return all_logs
def log_matches(args, filename): if 'filename' in args.file_pattern: return logfetch_base.log_matches(filename, '*{0}*'.format(args.logtype.replace('logs/', ''))) else: sys.stderr.write(colored('Cannot match on log file names for s3 logs when filename is not in s3 pattern', 'red')) if args.no_name_fetch_off: sys.stderr.write(colored('Will not fetch any s3 logs beacuse --no-name-fetch-off is set, remove this setting to fetch all for this case instead', 'red')) return False else: sys.stderr.write(colored('Will fetch all s3 logs, set --no-name-fetch-off to skip s3 logs instead for this case', 'red')) return True
def log_matches(args, filename): if "filename" in args.file_pattern: return logfetch_base.log_matches(filename, "*{0}*".format(args.logtype.replace("logs/", ""))) else: sys.stderr.write( colored("Cannot match on log file names for s3 logs when filename is not in s3 pattern", "red") ) if args.no_name_fetch_off: sys.stderr.write( colored( "Will not fetch any s3 logs beacuse --no-name-fetch-off is set, remove this setting to fetch all for this case instead", "red", ) ) return False else: sys.stderr.write( colored("Will fetch all s3 logs, set --no-name-fetch-off to skip s3 logs instead for this case", "red") ) return True
def download_live_logs(args): tasks = tasks_to_check(args) async_requests = [] zipped_files = [] all_logs = [] sys.stderr.write(colored('Finding current live log files', 'cyan') + '\n') for task in tasks: metadata = files_json(args, task) if 'slaveHostname' in metadata: uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname']) if args.verbose: sys.stderr.write( colored( 'Finding logs in base directory on {0}'.format( metadata['slaveHostname']), 'magenta') + '\n') for log_file in base_directory_files(args, task, metadata): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or ( args.logtype and logfetch_base.log_matches( log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest( 'GET', uri, callback=generate_callback( uri, args.dest, logfile_name, args.chunk_size, args.verbose), params={ 'path': '{0}/{1}/{2}'.format( metadata['fullPathToRoot'], metadata['currentDirectory'], log_file) }, headers=args.headers)) if logfile_name.endswith('.gz'): zipped_files.append('{0}/{1}'.format( args.dest, logfile_name)) else: all_logs.append('{0}/{1}'.format( args.dest, logfile_name.replace('.gz', '.log'))) elif args.logtype and args.verbose: sys.stderr.write( colored( 'Excluding log {0}, doesn\'t match {1}'.format( log_file, args.logtype), 'magenta') + '\n') if args.verbose: sys.stderr.write( colored( 'Finding logs in logs directory on {0}'.format( metadata['slaveHostname']), 'magenta') + '\n') for log_file in logs_folder_files(args, task): logfile_name = '{0}-{1}'.format(task, log_file) if not args.logtype or ( args.logtype and logfetch_base.log_matches( log_file, args.logtype.replace('logs/', ''))): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest( 'GET', uri, callback=generate_callback( uri, args.dest, logfile_name, args.chunk_size, args.verbose), params={ 'path': '{0}/{1}/logs/{2}'.format( metadata['fullPathToRoot'], metadata['currentDirectory'], log_file) }, headers=args.headers)) if logfile_name.endswith('.gz'): zipped_files.append('{0}/{1}'.format( args.dest, logfile_name)) else: all_logs.append('{0}/{1}'.format( args.dest, logfile_name.replace('.gz', '.log'))) elif args.logtype and args.verbose: sys.stderr.write( colored( 'Excluding log {0}, doesn\'t match {1}'.format( log_file, args.logtype), 'magenta') + '\n') if async_requests: sys.stderr.write(colored('Starting live logs downloads\n', 'cyan')) grequests.map(async_requests, stream=True, size=args.num_parallel_fetches) if zipped_files: sys.stderr.write(colored('\nUnpacking logs\n', 'cyan')) all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files) return all_logs
def download_live_logs(args): logfetch_base.log(colored("Finding current live log files", "cyan") + "\n", args, False) tasks = tasks_to_check(args) async_requests = [] all_logs = [] callbacks.progress = 0 tasks_check_progress = 0 tasks_check_goal = len(tasks) for task in tasks: metadata = files_json(args, task) if "slaveHostname" in metadata: uri = DOWNLOAD_FILE_FORMAT.format(metadata["slaveHostname"]) for log_file in base_directory_files(args, task, metadata): logfile_name = "{0}-{1}".format(task, log_file) if not args.logtype or ( args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", "")) ): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest( "GET", uri, callback=callbacks.generate_callback( uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent ), params={ "path": "{0}/{1}/{2}".format( metadata["fullPathToRoot"], metadata["currentDirectory"], log_file ) }, headers=args.headers, ) ) all_logs.append("{0}/{1}".format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log( colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta") + "\n", args, True, ) for log_file in logs_folder_files(args, task): logfile_name = "{0}-{1}".format(task, log_file) if not args.logtype or ( args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", "")) ): if should_download(args, logfile_name, task): async_requests.append( grequests.AsyncRequest( "GET", uri, callback=callbacks.generate_callback( uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent ), params={ "path": "{0}/{1}/logs/{2}".format( metadata["fullPathToRoot"], metadata["currentDirectory"], log_file ) }, headers=args.headers, ) ) all_logs.append("{0}/{1}".format(args.dest, logfile_name)) elif args.logtype: logfetch_base.log( colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta") + "\n", args, True, ) tasks_check_progress += 1 logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, "Log Finder", args.silent) if async_requests: logfetch_base.log( colored("\nStarting {0} live logs downloads\n".format(len(async_requests)), "cyan"), args, False ) callbacks.goal = len(async_requests) grequests.map(async_requests, stream=True, size=args.num_parallel_fetches) return all_logs