Esempio n. 1
0
def logs_for_all_requests(args):
    s3_params = {
        'start': int(time.mktime(args.start.timetuple()) * 1000),
        'end': int(time.mktime(args.end.timetuple()) * 1000)
    }
    if args.taskId:
        return get_json_response(s3_task_logs_uri(args, args.taskId), args,
                                 s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = get_json_response(s3_task_logs_uri(args, task), args,
                                        s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal,
                                              'S3 Log Finder', args.silent)
        if not args.silent:
            sys.stderr.write(
                colored('\nAlso searching s3 history...\n', 'cyan'))
        for request in logfetch_base.all_requests(args):
            s3_logs = get_json_response(s3_request_logs_uri(args, request),
                                        args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items())
                                      for l in logs])]  # remove any duplicates
Esempio n. 2
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 3
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent or args.verbose)
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 4
0
def download_live_logs(args):
    logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent or args.verbose)

    if async_requests:
        logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs
Esempio n. 5
0
def download_live_logs(args):
    logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent)

    if async_requests:
        logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs
Esempio n. 6
0
def logs_for_all_requests(args):
  s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
  if args.taskId:
    return get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
  else:
    tasks = logfetch_base.tasks_for_requests(args)
    logs = []
    tasks_progress = 0
    tasks_goal = len(tasks)
    for task in tasks:
      s3_logs = get_json_response(s3_task_logs_uri(args, task), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
      tasks_progress += 1
      logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder')
    sys.stderr.write(colored('\nAlso searching s3 history...\n', 'cyan'))
    for request in logfetch_base.all_requests(args):
      s3_logs = get_json_response(s3_request_logs_uri(args, request), args, s3_params)
      logs = logs + s3_logs if s3_logs else logs
    return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
Esempio n. 7
0
def logs_for_all_requests(args):
    s3_params = {
        "start": int(time.mktime(args.start.timetuple()) * 1000),
        "end": int(time.mktime(args.end.timetuple()) * 1000),
    }
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, "S3 Log Finder", args.silent)
        logfetch_base.log(colored("\nAlso searching s3 history...\n", "cyan"), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set(tuple(l.items()) for l in logs)]  # remove any duplicates
Esempio n. 8
0
def download_live_logs(args):
    logfetch_base.log(colored("Finding current live log files", "cyan") + "\n", args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if "slaveHostname" in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata["slaveHostname"])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = "{0}-{1}".format(task, log_file)
                if not args.logtype or (
                    args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", ""))
                ):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                "GET",
                                uri,
                                callback=callbacks.generate_callback(
                                    uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent
                                ),
                                params={
                                    "path": "{0}/{1}/{2}".format(
                                        metadata["fullPathToRoot"], metadata["currentDirectory"], log_file
                                    )
                                },
                                headers=args.headers,
                            )
                        )
                    all_logs.append("{0}/{1}".format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(
                        colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta")
                        + "\n",
                        args,
                        True,
                    )
            for log_file in logs_folder_files(args, task):
                logfile_name = "{0}-{1}".format(task, log_file)
                if not args.logtype or (
                    args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace("logs/", ""))
                ):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                "GET",
                                uri,
                                callback=callbacks.generate_callback(
                                    uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent
                                ),
                                params={
                                    "path": "{0}/{1}/logs/{2}".format(
                                        metadata["fullPathToRoot"], metadata["currentDirectory"], log_file
                                    )
                                },
                                headers=args.headers,
                            )
                        )
                    all_logs.append("{0}/{1}".format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(
                        colored("Excluding log {0}, doesn't match {1}".format(log_file, args.logtype), "magenta")
                        + "\n",
                        args,
                        True,
                    )
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, "Log Finder", args.silent)

    if async_requests:
        logfetch_base.log(
            colored("\nStarting {0} live logs downloads\n".format(len(async_requests)), "cyan"), args, False
        )
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs