Beispiel #1
0
def download_live_logs(args):
  tasks = tasks_to_check(args)
  async_requests = []
  zipped_files = []
  all_logs = []
  sys.stderr.write(colored('Finding current live log files', 'cyan') + '\n')
  for task in tasks:
    metadata = files_json(args, task)
    if 'slaveHostname' in metadata:
      uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
      if args.verbose:
        sys.stderr.write(colored('Finding logs in base directory on {0}'.format(metadata['slaveHostname']), 'magenta') + '\n')
      for log_file in base_directory_files(args, task, metadata):
        logfile_name = '{0}-{1}'.format(task, log_file)
        if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
          if should_download(args, logfile_name, task):
            async_requests.append(
              grequests.AsyncRequest('GET',uri ,
                callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose),
                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                headers=args.headers
              )
            )
          if logfile_name.endswith('.gz'):
            zipped_files.append('{0}/{1}'.format(args.dest, logfile_name))
          else:
            all_logs.append('{0}/{1}'.format(args.dest, logfile_name.replace('.gz', '.log')))
        elif args.logtype and args.verbose:
          sys.stderr.write(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n')

      if args.verbose:
        sys.stderr.write(colored('Finding logs in logs directory on {0}'.format(metadata['slaveHostname']), 'magenta') + '\n')
      for log_file in logs_folder_files(args, task):
        logfile_name = '{0}-{1}'.format(task, log_file)
        if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
          if should_download(args, logfile_name, task):
            async_requests.append(
              grequests.AsyncRequest('GET',uri ,
                callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose),
                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                headers=args.headers
              )
            )
          if logfile_name.endswith('.gz'):
            zipped_files.append('{0}/{1}'.format(args.dest, logfile_name))
          else:
            all_logs.append('{0}/{1}'.format(args.dest, logfile_name.replace('.gz', '.log')))
        elif args.logtype and args.verbose:
          sys.stderr.write(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n')

  if async_requests:
    sys.stderr.write(colored('Starting live logs downloads\n', 'cyan'))
    grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  if zipped_files:
    sys.stderr.write(colored('\nUnpacking logs\n', 'cyan'))
    all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
  return all_logs
Beispiel #2
0
def download_live_logs(args):
    logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent or args.verbose)

    if async_requests:
        logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs
Beispiel #3
0
def download_live_logs(args):
    logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent)

    if async_requests:
        logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs
Beispiel #4
0
def download_s3_logs(args):
  sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
  logs = logs_for_all_requests(args)
  async_requests = []
  zipped_files = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    if logfetch_base.is_in_date_range(args, time_from_filename(filename)):
      if not already_downloaded(args.dest, filename):
        async_requests.append(
          grequests.AsyncRequest('GET', log_file['getUrl'], callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose))
        )
      else:
        if args.verbose:
          sys.stderr.write(colored('Log already downloaded {0}'.format(filename), 'magenta') + '\n')
        all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
      zipped_files.append('{0}/{1}'.format(args.dest, filename))
    else:
      if args.verbose:
        sys.stderr.write(colored('Excluding {0}, not in date range'.format(filename), 'magenta') + '\n')
  if async_requests:
    sys.stderr.write(colored('Starting S3 Downloads with {0} parallel fetches'.format(args.num_parallel_fetches), 'cyan'))
    grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  else:
    sys.stderr.write(colored('No S3 logs to download', 'cyan'))
  sys.stderr.write(colored('\nUnpacking S3 logs\n', 'cyan'))
  all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
  sys.stderr.write(colored('All S3 logs up to date', 'cyan') + '\n')
  return all_logs
Beispiel #5
0
def download_s3_logs(args):
    if not args.silent:
        sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
    callbacks.progress = 0
    logs = logs_for_all_requests(args)
    async_requests = []
    all_logs = []
    for log_file in logs:
        filename = log_file['key'].rsplit("/", 1)[1]
        if log_file_in_date_range(args, log_file):
            if not args.logtype or log_matches(args, filename):
                logfetch_base.log(colored('Including log {0}'.format(filename), 'blue') + '\n', args, True)
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest('GET', log_file['getUrl'], callback=callbacks.generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose, args.silent), headers=args.headers)
                    )
                else:
                    logfetch_base.log(colored('Log already downloaded {0}'.format(filename), 'blue') + '\n', args, True)
                all_logs.append('{0}/{1}'.format(args.dest, filename))
            else:
                logfetch_base.log(colored('Excluding {0} log does not match logtype argument {1}'.format(filename, args.logtype), 'magenta') + '\n', args, True)
        else:
            logfetch_base.log(colored('Excluding {0}, not in date range'.format(filename), 'magenta') + '\n', args, True)
    if async_requests:
        logfetch_base.log(colored('Starting {0} S3 Downloads with {1} parallel fetches\n'.format(len(async_requests), args.num_parallel_fetches), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    else:
        logfetch_base.log(colored('No S3 logs to download\n', 'cyan'), args, False)
    logfetch_base.log(colored('All S3 logs up to date\n', 'cyan'), args, False)
    all_logs = modify_download_list(all_logs)
    return all_logs
Beispiel #6
0
def download_s3_logs(args):
    if not args.silent:
        sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
    callbacks.progress = 0
    logs = logs_for_all_requests(args)
    async_requests = []
    all_logs = []
    for log_file in logs:
        filename = log_file['key'].rsplit("/", 1)[1]
        if logfetch_base.is_in_date_range(args, int(str(log_file['lastModified'])[0:-3])):
            if not args.logtype or log_matches(args, filename):
                logfetch_base.log(colored('Including log {0}'.format(filename), 'blue') + '\n', args, True)
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest('GET', log_file['getUrl'], callback=callbacks.generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose, args.silent), headers=args.headers)
                    )
                else:
                    logfetch_base.log(colored('Log already downloaded {0}'.format(filename), 'blue') + '\n', args, True)
                all_logs.append('{0}/{1}'.format(args.dest, filename))
            else:
                logfetch_base.log(colored('Excluding {0} log does not match logtype argument {1}'.format(filename, args.logtype), 'magenta') + '\n', args, True)
        else:
            logfetch_base.log(colored('Excluding {0}, not in date range'.format(filename), 'magenta') + '\n', args, True)
    if async_requests:
        logfetch_base.log(colored('Starting {0} S3 Downloads with {1} parallel fetches\n'.format(len(async_requests), args.num_parallel_fetches), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    else:
        logfetch_base.log(colored('No S3 logs to download\n', 'cyan'), args, False)
    logfetch_base.log(colored('All S3 logs up to date\n', 'cyan'), args, False)
    all_logs = modify_download_list(all_logs)
    return all_logs
Beispiel #7
0
def download_s3_logs(args):
    if not args.silent:
        sys.stderr.write(colored("Checking for S3 log files", "cyan") + "\n")
    callbacks.progress = 0
    logs = logs_for_all_requests(args)
    async_requests = []
    all_logs = []
    for log_file in logs:
        filename = log_file["key"].rsplit("/", 1)[1]
        if logfetch_base.is_in_date_range(args, int(str(log_file["lastModified"])[0:-3])):
            if not args.logtype or log_matches(args, filename):
                logfetch_base.log(colored("Including log {0}".format(filename), "blue") + "\n", args, True)
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest(
                            "GET",
                            log_file["getUrl"],
                            callback=callbacks.generate_callback(
                                log_file["getUrl"], args.dest, filename, args.chunk_size, args.verbose, args.silent
                            ),
                            headers=args.headers,
                        )
                    )
                else:
                    logfetch_base.log(colored("Log already downloaded {0}".format(filename), "blue") + "\n", args, True)
                all_logs.append("{0}/{1}".format(args.dest, filename))
            else:
                logfetch_base.log(
                    colored(
                        "Excluding {0} log does not match logtype argument {1}".format(filename, args.logtype),
                        "magenta",
                    )
                    + "\n",
                    args,
                    True,
                )
        else:
            logfetch_base.log(
                colored("Excluding {0}, not in date range".format(filename), "magenta") + "\n", args, True
            )
    if async_requests:
        logfetch_base.log(
            colored(
                "Starting {0} S3 Downloads with {1} parallel fetches\n".format(
                    len(async_requests), args.num_parallel_fetches
                ),
                "cyan",
            ),
            args,
            False,
        )
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    else:
        logfetch_base.log(colored("No S3 logs to download\n", "cyan"), args, False)
    logfetch_base.log(colored("All S3 logs up to date\n", "cyan"), args, False)
    return all_logs
Beispiel #8
0
def download_s3_logs(args):
    sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
    logs = logs_for_all_requests(args)
    async_requests = []
    zipped_files = []
    all_logs = []
    for log_file in logs:
        filename = log_file['key'].rsplit("/", 1)[1]
        if logfetch_base.is_in_date_range(
                args, int(str(log_file['lastModified'])[0:-3])):
            if not args.logtype or log_matches(args, filename):
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest('GET',
                                               log_file['getUrl'],
                                               callback=generate_callback(
                                                   log_file['getUrl'],
                                                   args.dest, filename,
                                                   args.chunk_size,
                                                   args.verbose),
                                               headers=args.headers))
                else:
                    if args.verbose:
                        sys.stderr.write(
                            colored(
                                'Log already downloaded {0}'.format(filename),
                                'magenta') + '\n')
                    all_logs.append('{0}/{1}'.format(
                        args.dest, filename.replace('.gz', '.log')))
                zipped_files.append('{0}/{1}'.format(args.dest, filename))
            else:
                if args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding {0} log does not match logtype argument {1}'
                            .format(filename, args.logtype), 'magenta') + '\n')
        else:
            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Excluding {0}, not in date range'.format(filename),
                        'magenta') + '\n')
    if async_requests:
        sys.stderr.write(
            colored(
                'Starting S3 Downloads with {0} parallel fetches'.format(
                    args.num_parallel_fetches), 'cyan'))
        grequests.map(async_requests,
                      stream=True,
                      size=args.num_parallel_fetches)
    else:
        sys.stderr.write(colored('No S3 logs to download', 'cyan'))
    sys.stderr.write(colored('\nUnpacking S3 logs\n', 'cyan'))
    all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
    sys.stderr.write(colored('All S3 logs up to date', 'cyan') + '\n')
    return all_logs
Beispiel #9
0
def download_s3_logs(args):
  if not args.silent:
    sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
  callbacks.progress = 0
  logs = logs_for_all_requests(args)
  async_requests = []
  zipped_files = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    if logfetch_base.is_in_date_range(args, int(str(log_file['lastModified'])[0:-3])):
      if not args.logtype or log_matches(args, filename):
        if args.verbose and not args.silent:
          sys.stderr.write(colored('Including log {0}'.format(filename), 'blue') + '\n')
        if not already_downloaded(args.dest, filename):
          async_requests.append(
            grequests.AsyncRequest('GET', log_file['getUrl'], callback=callbacks.generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose, args.silent), headers=args.headers)
          )
        else:
          if args.verbose and not args.silent:
            sys.stderr.write(colored('Log already downloaded {0}'.format(filename), 'blue') + '\n')
          all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
        zipped_files.append('{0}/{1}'.format(args.dest, filename))
      else:
        if args.verbose and not args.silent:
          sys.stderr.write(colored('Excluding {0} log does not match logtype argument {1}'.format(filename, args.logtype), 'magenta') + '\n')
    else:
      if args.verbose and not args.silent:
        sys.stderr.write(colored('Excluding {0}, not in date range'.format(filename), 'magenta') + '\n')
  if async_requests:
    if not args.silent:
      sys.stderr.write(colored('Starting {0} S3 Downloads with {1} parallel fetches\n'.format(len(async_requests), args.num_parallel_fetches), 'cyan'))
    callbacks.goal = len(async_requests)
    grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    if not args.silent and not args.download_only:
      sys.stderr.write(colored('\nUnpacking {0} S3 log(s)\n'.format(len(async_requests)), 'cyan'))
  else:
    if not args.silent:
      sys.stderr.write(colored('No S3 logs to download\n', 'cyan'))
  if not args.download_only:
    all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
  if not args.silent:
    sys.stderr.write(colored('All S3 logs up to date\n', 'cyan'))
  return all_logs
Beispiel #10
0
def download_s3_logs(args):
  sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
  logs = logs_for_all_requests(args)
  async_requests = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    if logfetch_base.is_in_date_range(args, time_from_filename(filename)):
      if not already_downloaded(args.dest, filename):
        async_requests.append(
          grequests.AsyncRequest('GET', log_file['getUrl'], callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size))
        )
      all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
  if async_requests:
    sys.stderr.write(colored('Starting S3 Downloads', 'cyan'))
    grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  zipped_files = ['{0}/{1}'.format(args.dest, log_file['key'].rsplit("/", 1)[1]) for log_file in logs]
  sys.stderr.write(colored('Unpacking S3 logs\n', 'cyan'))
  logfetch_base.unpack_logs(zipped_files)
  sys.stderr.write(colored('All S3 logs up to date', 'cyan') + '\n')
  return all_logs
Beispiel #11
0
def download_s3_logs(args):
  sys.stderr.write(colored('Checking for S3 log files', 'blue') + '\n')
  logs = get_json_response(singularity_s3logs_uri(args))
  async_requests = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    full_log_path = '{0}/{1}'.format(args.dest, filename.replace('.gz', '.log'))
    full_gz_path = '{0}/{1}'.format(args.dest, filename)
    if in_date_range(args, filename):
      if not (os.path.isfile(full_log_path) or os.path.isfile(full_gz_path)):
        async_requests.append(
          grequests.AsyncRequest('GET', log_file['getUrl'],
            callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size)
          )
        )
      all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
  grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  zipped_files = ['{0}/{1}'.format(args.dest, log_file['key'].rsplit("/", 1)[1]) for log_file in logs]
  logfetch_base.unpack_logs(zipped_files)
  sys.stderr.write(colored('All S3 logs up to date', 'blue') + '\n')
  return all_logs
Beispiel #12
0
def download_live_logs(args):
    tasks = tasks_to_check(args)
    async_requests = []
    zipped_files = []
    all_logs = []
    sys.stderr.write(colored('Finding current live log files', 'cyan') + '\n')
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Finding logs in base directory on {0}'.format(
                            metadata['slaveHostname']), 'magenta') + '\n')
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (
                        args.logtype and logfetch_base.log_matches(
                            log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                'GET',
                                uri,
                                callback=generate_callback(
                                    uri, args.dest, logfile_name,
                                    args.chunk_size, args.verbose),
                                params={
                                    'path':
                                    '{0}/{1}/{2}'.format(
                                        metadata['fullPathToRoot'],
                                        metadata['currentDirectory'], log_file)
                                },
                                headers=args.headers))
                    if logfile_name.endswith('.gz'):
                        zipped_files.append('{0}/{1}'.format(
                            args.dest, logfile_name))
                    else:
                        all_logs.append('{0}/{1}'.format(
                            args.dest, logfile_name.replace('.gz', '.log')))
                elif args.logtype and args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding log {0}, doesn\'t match {1}'.format(
                                log_file, args.logtype), 'magenta') + '\n')

            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Finding logs in logs directory on {0}'.format(
                            metadata['slaveHostname']), 'magenta') + '\n')
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (
                        args.logtype and logfetch_base.log_matches(
                            log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                'GET',
                                uri,
                                callback=generate_callback(
                                    uri, args.dest, logfile_name,
                                    args.chunk_size, args.verbose),
                                params={
                                    'path':
                                    '{0}/{1}/logs/{2}'.format(
                                        metadata['fullPathToRoot'],
                                        metadata['currentDirectory'], log_file)
                                },
                                headers=args.headers))
                    if logfile_name.endswith('.gz'):
                        zipped_files.append('{0}/{1}'.format(
                            args.dest, logfile_name))
                    else:
                        all_logs.append('{0}/{1}'.format(
                            args.dest, logfile_name.replace('.gz', '.log')))
                elif args.logtype and args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding log {0}, doesn\'t match {1}'.format(
                                log_file, args.logtype), 'magenta') + '\n')

    if async_requests:
        sys.stderr.write(colored('Starting live logs downloads\n', 'cyan'))
        grequests.map(async_requests,
                      stream=True,
                      size=args.num_parallel_fetches)
    if zipped_files:
        sys.stderr.write(colored('\nUnpacking logs\n', 'cyan'))
        all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
    return all_logs
Beispiel #13
0
def download_live_logs(args):
    logfetch_base.log(colored("Finding current live log files", "cyan") + "\n", args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if "slaveHostname" in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata["slaveHostname"])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = "{0}-{1}".format(task, log_file)
                if not args.logtype or (
                    args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", ""))
                ):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                "GET",
                                uri,
                                callback=callbacks.generate_callback(
                                    uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent
                                ),
                                params={
                                    "path": "{0}/{1}/{2}".format(
                                        metadata["fullPathToRoot"], metadata["currentDirectory"], log_file
                                    )
                                },
                                headers=args.headers,
                            )
                        )
                    all_logs.append("{0}/{1}".format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(
                        colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta")
                        + "\n",
                        args,
                        True,
                    )
            for log_file in logs_folder_files(args, task):
                logfile_name = "{0}-{1}".format(task, log_file)
                if not args.logtype or (
                    args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", ""))
                ):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                "GET",
                                uri,
                                callback=callbacks.generate_callback(
                                    uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent
                                ),
                                params={
                                    "path": "{0}/{1}/logs/{2}".format(
                                        metadata["fullPathToRoot"], metadata["currentDirectory"], log_file
                                    )
                                },
                                headers=args.headers,
                            )
                        )
                    all_logs.append("{0}/{1}".format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(
                        colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta")
                        + "\n",
                        args,
                        True,
                    )
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, "Log Finder", args.silent)

    if async_requests:
        logfetch_base.log(
            colored("\nStarting {0} live logs downloads\n".format(len(async_requests)), "cyan"), args, False
        )
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs