Beispiel #1
0
def start_tail(args):
    if args.requestId:
        if not args.silent:
            sys.stderr.write('Fetching tasks\n')
        tasks = [str(t) for t in logfetch_base.tasks_for_requests(args)]
    else:
        tasks = [args.taskId]
    if args.verbose:
        sys.stderr.write(colored('Tailing logs for tasks:\n', 'green'))
        if not args.silent:
            for t in tasks:
                sys.stderr.write(colored('{0}\n'.format(t), 'yellow'))
    if not args.silent:
        sys.stderr.write(colored('ctrl+c to exit\n', 'cyan'))
    try:
        threads = []
        for task in tasks:
            thread = LogStreamer(args, task)
            threads.append(thread)
            thread.start()
        for t in threads:
            t.join(THREAD_TIMEOUT
                   )  #Need a timeout otherwise can't be killed by ctrl+c
            if not t.isAlive:
                break
    except KeyboardInterrupt:
        if not args.silent:
            sys.stderr.write(colored('Stopping tail', 'magenta') + '\n')
        sys.exit(0)
Beispiel #2
0
def logs_for_all_requests(args):
    s3_params = {
        'start': int(time.mktime(args.start.timetuple()) * 1000),
        'end': int(time.mktime(args.end.timetuple()) * 1000)
    }
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId), args,
                                 s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task), args,
                                        s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal,
                                              'S3 Log Finder', args.silent)
        if not args.silent:
            sys.stderr.write(
                colored('\nAlso searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request),
                                        args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Beispiel #3
0
def start_tail(args):
  if args.requestId:
    if not args.silent:
      sys.stderr.write('Fetching tasks\n')
    tasks = [str(t) for t in logfetch_base.tasks_for_requests(args)]
  else:
    tasks = [args.taskId]
  if args.verbose:
    sys.stderr.write(colored('Tailing logs for tasks:\n', 'green'))
    if not args.silent:
      for t in tasks:
        sys.stderr.write(colored('{0}\n'.format(t), 'yellow'))
  if not args.silent:
    sys.stderr.write(colored('ctrl+c to exit\n', 'cyan'))
  try:
    threads = []
    for task in tasks:
      thread = LogStreamer(args, task)
      threads.append(thread)
      thread.start()
    for t in threads:
      t.join(THREAD_TIMEOUT) #Need a timeout otherwise can't be killed by ctrl+c
      if not t.isAlive:
        break
  except KeyboardInterrupt:
    if not args.silent:
      sys.stderr.write(colored('Stopping tail', 'magenta') + '\n')
    sys.exit(0)
Beispiel #4
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent or args.verbose)
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Beispiel #5
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Beispiel #6
0
def logs_for_all_requests(args):
  if args.taskId:
    return get_json_response(singularity_s3logs_uri(args, args.taskId))
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    for task in tasks:
      s3_logs = get_json_response(singularity_s3logs_uri(args, task))
      logs = logs + s3_logs if s3_logs else logs
    return logs
Beispiel #7
0
def logs_for_all_requests(args):
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId))
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task))
      logs = logs + s3_logs if s3_logs else logs
    sys.stderr.write(colored('Also searching s3 history...\n', 'magenta'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request))
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])]
Beispiel #8
0
def logs_for_all_requests(args):
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId))
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task))
            logs = logs + s3_logs if s3_logs else logs
        sys.stderr.write(colored('Also searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request))
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Beispiel #9
0
def logs_for_all_requests(args):
  s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    tasks_progress = 0
    tasks_goal = len(tasks)
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
      tasks_progress += 1
      logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder')
    sys.stderr.write(colored('\nAlso searching s3 history...\n', 'cyan'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
Beispiel #10
0
def logs_for_all_requests(args):
    s3_params = {
        "start": int(time.mktime(args.start.timetuple()) * 1000),
        "end": int(time.mktime(args.end.timetuple()) * 1000),
    }
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, "S3 Log Finder", args.silent)
        logfetch_base.log(colored("\nAlso searching s3 history...\n", "cyan"), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set(tuple(l.items()) for l in logs)]  # remove any duplicates
Beispiel #11
0
def tasks_to_check(args):
  if args.taskId:
    return [args.taskId]
  else:
    return logfetch_base.tasks_for_requests(args)
Beispiel #12
0
def tasks_to_check(args):
    if args.taskId:
        return [args.taskId]
    else:
        return logfetch_base.tasks_for_requests(args)