Esempio n. 1
0
def logs_for_all_requests(args):
    s3_params = {
        'start': int(time.mktime(args.start.timetuple()) * 1000),
        'end': int(time.mktime(args.end.timetuple()) * 1000)
    }
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId), args,
                                 s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task), args,
                                        s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal,
                                              'S3 Log Finder', args.silent)
        if not args.silent:
            sys.stderr.write(
                colored('\nAlso searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request),
                                        args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Esempio n. 2
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent or args.verbose)
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 3
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 4
0
def logs_for_all_requests(args):
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId))
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task))
      logs = logs + s3_logs if s3_logs else logs
    sys.stderr.write(colored('Also searching s3 history...\n', 'magenta'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request))
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])]
Esempio n. 5
0
def logs_for_all_requests(args):
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId))
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task))
            logs = logs + s3_logs if s3_logs else logs
        sys.stderr.write(colored('Also searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request))
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Esempio n. 6
0
def logs_for_all_requests(args):
  s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    tasks_progress = 0
    tasks_goal = len(tasks)
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
      tasks_progress += 1
      logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder')
    sys.stderr.write(colored('\nAlso searching s3 history...\n', 'cyan'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
Esempio n. 7
0
def logs_for_all_requests(args):
    s3_params = {
        "start": int(time.mktime(args.start.timetuple()) * 1000),
        "end": int(time.mktime(args.end.timetuple()) * 1000),
    }
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, "S3 Log Finder", args.silent)
        logfetch_base.log(colored("\nAlso searching s3 history...\n", "cyan"), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set(tuple(l.items()) for l in logs)]  # remove any duplicates