Ejemplo n.º 1
0
def logs_for_all_requests(args):
    s3_params = {
        'start': int(time.mktime(args.start.timetuple()) * 1000),
        'end': int(time.mktime(args.end.timetuple()) * 1000)
    }
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId), args,
                                 s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task), args,
                                        s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal,
                                              'S3 Log Finder', args.silent)
        if not args.silent:
            sys.stderr.write(
                colored('\nAlso searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request),
                                        args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Ejemplo n.º 2
0
def logs_for_all_requests(args):
  if args.taskId:
    return get_json_response(singularity_s3logs_uri(args, args.taskId))
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    for task in tasks:
      s3_logs = get_json_response(singularity_s3logs_uri(args, task))
      logs = logs + s3_logs if s3_logs else logs
    return logs
Ejemplo n.º 3
0
def logs_for_all_requests(args):
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId))
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task))
      logs = logs + s3_logs if s3_logs else logs
    sys.stderr.write(colored('Also searching s3 history...\n', 'magenta'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request))
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])]
Ejemplo n.º 4
0
def all_tasks_for_request(args, request):
  uri = '{0}{1}'.format(base_uri(args), ACTIVE_TASKS_FORMAT.format(request))
  active_tasks = get_json_response(uri, args)
  if hasattr(args, 'start'):
    uri = '{0}{1}'.format(base_uri(args), REQUEST_TASKS_FORMAT.format(request))
    historical_tasks = get_json_response(uri, args)
    if len(historical_tasks) == 0:
      return active_tasks
    elif len(active_tasks) == 0:
      return historical_tasks
    else:
      return active_tasks + [h for h in historical_tasks if is_in_date_range(args, int(str(h['updatedAt'])[0:-3]))]
  else:
    return active_tasks
Ejemplo n.º 5
0
def all_tasks_for_request(args, request):
  uri = '{0}{1}'.format(base_uri(args), ACTIVE_TASKS_FORMAT.format(request))
  active_tasks = get_json_response(uri, args)
  if hasattr(args, 'start'):
    uri = '{0}{1}'.format(base_uri(args), REQUEST_TASKS_FORMAT.format(request))
    historical_tasks = get_json_response(uri, args)
    if len(historical_tasks) == 0:
      return active_tasks
    elif len(active_tasks) == 0:
      return historical_tasks
    else:
      return active_tasks + [h for h in historical_tasks if is_in_date_range(args, int(str(h['updatedAt'])[0:-3]))]
  else:
    return active_tasks
Ejemplo n.º 6
0
def logs_for_all_requests(args):
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId))
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task))
            logs = logs + s3_logs if s3_logs else logs
        sys.stderr.write(colored('Also searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request))
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Ejemplo n.º 7
0
def logs_folder_files(args, task):
  uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
  files_json = get_json_response(uri, args, {'path' : '{0}/logs'.format(task)})
  if 'files' in files_json:
    files = files_json['files']
    return [f['name'] for f in files if logfetch_base.is_in_date_range(args, f['mtime'])]
  else:
    return [f['path'].rsplit('/')[-1] for f in files_json if logfetch_base.is_in_date_range(args, f['mtime'])]
Ejemplo n.º 8
0
def all_requests(args):
  uri = '{0}{1}'.format(base_uri(args),  ALL_REQUESTS)
  requests = get_json_response(uri, args)
  included_requests = []
  for request in requests:
    if fnmatch.fnmatch(request['request']['id'], args.requestId):
      included_requests.append(request['request']['id'])
  return included_requests
Ejemplo n.º 9
0
def all_requests(args):
    uri = '{0}{1}'.format(base_uri(args), ALL_REQUESTS)
    requests = get_json_response(uri)
    included_requests = []
    for request in requests:
        if fnmatch.fnmatch(request['request']['id'], args.requestId):
            included_requests.append(request['request']['id'])
    return included_requests
Ejemplo n.º 10
0
def base_directory_files(args, task):
  uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
  files_json = get_json_response(uri, args)
  if 'files' in files_json:
    files = files_json['files']
    return [f['name'] for f in files if valid_logfile(f)]
  else:
    return [f['path'].rsplit('/')[-1] for f in files_json if valid_logfile(f)]
Ejemplo n.º 11
0
def logs_for_all_requests(args):
  s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    tasks_progress = 0
    tasks_goal = len(tasks)
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
      tasks_progress += 1
      logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder')
    sys.stderr.write(colored('\nAlso searching s3 history...\n', 'cyan'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
Ejemplo n.º 12
0
def base_directory_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = get_json_response(uri, args)
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if valid_logfile(f)]
    else:
        return [
            f['path'].rsplit('/')[-1] for f in files_json if valid_logfile(f)
        ]
Ejemplo n.º 13
0
def logs_folder_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = get_json_response(uri, {'path': '{0}/logs'.format(task)})
    if 'files' in files_json:
        files = files_json['files']
        return [
            f['name'] for f in files
            if logfetch_base.is_in_date_range(args, f['mtime'])
        ]
    else:
        return [
            f['path'].rsplit('/')[-1] for f in files_json
            if logfetch_base.is_in_date_range(args, f['mtime'])
        ]
Ejemplo n.º 14
0
def download_s3_logs(args):
  sys.stderr.write(colored('Checking for S3 log files', 'blue') + '\n')
  logs = get_json_response(singularity_s3logs_uri(args))
  async_requests = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    full_log_path = '{0}/{1}'.format(args.dest, filename.replace('.gz', '.log'))
    full_gz_path = '{0}/{1}'.format(args.dest, filename)
    if in_date_range(args, filename):
      if not (os.path.isfile(full_log_path) or os.path.isfile(full_gz_path)):
        async_requests.append(
          grequests.AsyncRequest('GET', log_file['getUrl'],
            callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size)
          )
        )
      all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
  grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  zipped_files = ['{0}/{1}'.format(args.dest, log_file['key'].rsplit("/", 1)[1]) for log_file in logs]
  logfetch_base.unpack_logs(zipped_files)
  sys.stderr.write(colored('All S3 logs up to date', 'blue') + '\n')
  return all_logs
Ejemplo n.º 15
0
def files_json(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    return get_json_response(uri, args)
Ejemplo n.º 16
0
def files_json(args, task):
  uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
  return get_json_response(uri, args)
Ejemplo n.º 17
0
def task_history(args, task):
  uri = TASK_HISTORY_FORMAT.format(logfetch_base.base_uri(args), task)
  return get_json_response(uri, args)
Ejemplo n.º 18
0
def task_history(args, task):
    uri = TASK_HISTORY_FORMAT.format(logfetch_base.base_uri(args), task)
    return get_json_response(uri, args)