Esempio n. 1
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 2
0
def logs_for_all_requests(args):
    s3_params = {
        'start': int(time.mktime(args.start.timetuple()) * 1000),
        'end': int(time.mktime(args.end.timetuple()) * 1000)
    }
    if args.taskId:
        return logfetch_base.get_json_response(
            s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(
                s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal,
                                              'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'),
                          args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(
                s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set(tuple(l.items())
                                     for l in logs)]  # remove any duplicates
Esempio n. 3
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent or args.verbose)
        found_logs = []
        keys = []
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        for log in logs:
            if not log['key'] in keys:
                found_logs.append(log)
                keys.append(log['key'])
        return found_logs
Esempio n. 4
0
def base_directory_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = get_json_response(uri, args)
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if valid_logfile(f)]
    else:
        return [f['path'].rsplit('/')[-1] for f in files_json if valid_logfile(f)]
Esempio n. 5
0
def logs_folder_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = get_json_response(uri, args, {'path' : '{0}/logs'.format(task)})
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if is_valid_tail_log(f)]
    else:
        return [f['path'].rsplit('/')[-1] for f in files_json if is_valid_tail_log(f)]
Esempio n. 6
0
def logs_folder_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = logfetch_base.get_json_response(uri, args, {"path": "{0}/logs".format(task)}, True)
    if "files" in files_json:
        files = files_json["files"]
        return [f["name"] for f in files if logfetch_base.is_in_date_range(args, f["mtime"])]
    else:
        return [f["path"].rsplit("/")[-1] for f in files_json if logfetch_base.is_in_date_range(args, f["mtime"])]
Esempio n. 7
0
def logs_folder_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = logfetch_base.get_json_response(uri, args, {'path' : '{0}/logs'.format(task)}, True)
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if logfetch_base.is_in_date_range(args, f['mtime'])]
    else:
        return [f['path'].rsplit('/')[-1] for f in files_json if logfetch_base.is_in_date_range(args, f['mtime'])]
Esempio n. 8
0
def logs_folder_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = logfetch_base.get_json_response(uri, args, {'path' : '{0}/logs'.format(task)}, True)
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if is_valid_live_log(args, f)]
    else:
        return [f['path'].rsplit('/')[-1] for f in files_json if is_valid_live_log(args, f)]
Esempio n. 9
0
def base_directory_files(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    files_json = get_json_response(uri, args)
    if 'files' in files_json:
        files = files_json['files']
        return [f['name'] for f in files if valid_logfile(f)]
    else:
        return [
            f['path'].rsplit('/')[-1] for f in files_json if valid_logfile(f)
        ]
Esempio n. 10
0
def logs_for_all_requests(args):
    s3_params = {'start': int(time.mktime(args.start.timetuple()) * 1000), 'end': int(time.mktime(args.end.timetuple()) * 1000)}
    if args.taskId:
        return logfetch_base.get_json_response(s3_task_logs_uri(args, args.taskId), args, s3_params)
    else:
        tasks = logfetch_base.tasks_for_requests(args)
        logs = []
        tasks_progress = 0
        tasks_goal = len(tasks)
        for task in tasks:
            s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
            tasks_progress += 1
            logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
        logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
        for request in logfetch_base.all_requests(args):
            s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
            logs = logs + s3_logs if s3_logs else logs
        return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
Esempio n. 11
0
def files_json(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    return logfetch_base.get_json_response(uri, args, {}, True)
Esempio n. 12
0
def task_history(args, task):
    uri = TASK_HISTORY_FORMAT.format(logfetch_base.base_uri(args), task)
    return logfetch_base.get_json_response(uri, args)
Esempio n. 13
0
def files_json(args, task):
    uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
    return logfetch_base.get_json_response(uri, args, {}, True)
Esempio n. 14
0
def task_history(args, task):
    uri = TASK_HISTORY_FORMAT.format(logfetch_base.base_uri(args), task)
    return logfetch_base.get_json_response(uri, args)