예제 #1
0
def download_live_logs(args):
    logfetch_base.log(colored('Finding current live log files', 'cyan') + '\n', args, False)
    tasks = tasks_to_check(args)
    async_requests = []
    all_logs = []
    callbacks.progress = 0
    tasks_check_progress = 0
    tasks_check_goal = len(tasks)
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (args.logtype and logfetch_base.log_matches(log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest('GET',uri ,
                                callback=callbacks.generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose, args.silent),
                                params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
                                headers=args.headers
                            )
                        )
                    all_logs.append('{0}/{1}'.format(args.dest, logfile_name))
                elif args.logtype:
                    logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
        tasks_check_progress += 1
        logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent)

    if async_requests:
        logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    return all_logs
예제 #2
0
def download_s3_logs(args):
    if not args.silent:
        sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
    callbacks.progress = 0
    logs = logs_for_all_requests(args)
    async_requests = []
    all_logs = []
    for log_file in logs:
        filename = log_file['key'].rsplit("/", 1)[1]
        if logfetch_base.is_in_date_range(args, int(str(log_file['lastModified'])[0:-3])):
            if not args.logtype or log_matches(args, filename):
                logfetch_base.log(colored('Including log {0}'.format(filename), 'blue') + '\n', args, True)
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest('GET', log_file['getUrl'], callback=callbacks.generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose, args.silent), headers=args.headers)
                    )
                else:
                    logfetch_base.log(colored('Log already downloaded {0}'.format(filename), 'blue') + '\n', args, True)
                all_logs.append('{0}/{1}'.format(args.dest, filename))
            else:
                logfetch_base.log(colored('Excluding {0} log does not match logtype argument {1}'.format(filename, args.logtype), 'magenta') + '\n', args, True)
        else:
            logfetch_base.log(colored('Excluding {0}, not in date range'.format(filename), 'magenta') + '\n', args, True)
    if async_requests:
        logfetch_base.log(colored('Starting {0} S3 Downloads with {1} parallel fetches\n'.format(len(async_requests), args.num_parallel_fetches), 'cyan'), args, False)
        callbacks.goal = len(async_requests)
        grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
    else:
        logfetch_base.log(colored('No S3 logs to download\n', 'cyan'), args, False)
    logfetch_base.log(colored('All S3 logs up to date\n', 'cyan'), args, False)
    all_logs = modify_download_list(all_logs)
    return all_logs
예제 #3
0
def download_s3_logs(args):
    sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
    logs = logs_for_all_requests(args)
    async_requests = []
    zipped_files = []
    all_logs = []
    for log_file in logs:
        filename = log_file['key'].rsplit("/", 1)[1]
        if logfetch_base.is_in_date_range(
                args, int(str(log_file['lastModified'])[0:-3])):
            if not args.logtype or log_matches(args, filename):
                if not already_downloaded(args.dest, filename):
                    async_requests.append(
                        grequests.AsyncRequest('GET',
                                               log_file['getUrl'],
                                               callback=generate_callback(
                                                   log_file['getUrl'],
                                                   args.dest, filename,
                                                   args.chunk_size,
                                                   args.verbose),
                                               headers=args.headers))
                else:
                    if args.verbose:
                        sys.stderr.write(
                            colored(
                                'Log already downloaded {0}'.format(filename),
                                'magenta') + '\n')
                    all_logs.append('{0}/{1}'.format(
                        args.dest, filename.replace('.gz', '.log')))
                zipped_files.append('{0}/{1}'.format(args.dest, filename))
            else:
                if args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding {0} log does not match logtype argument {1}'
                            .format(filename, args.logtype), 'magenta') + '\n')
        else:
            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Excluding {0}, not in date range'.format(filename),
                        'magenta') + '\n')
    if async_requests:
        sys.stderr.write(
            colored(
                'Starting S3 Downloads with {0} parallel fetches'.format(
                    args.num_parallel_fetches), 'cyan'))
        grequests.map(async_requests,
                      stream=True,
                      size=args.num_parallel_fetches)
    else:
        sys.stderr.write(colored('No S3 logs to download', 'cyan'))
    sys.stderr.write(colored('\nUnpacking S3 logs\n', 'cyan'))
    all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
    sys.stderr.write(colored('All S3 logs up to date', 'cyan') + '\n')
    return all_logs
예제 #4
0
파일: beacon.py 프로젝트: tjirab/decipher3
    def do(self, action, name, args, asynchronous=False):
        "Perform action"
        if args.pop('__meta', None):
            return (dict(api='/api/%s/%s' % (self.version, name),
                         method=action.upper(),
                         args=args))

        self._ensureKey()
        url = '%s/api/%s/%s' % (self.host, self.version, name)
        self._debug('> %s %s' % (action.upper(), url))

        kw = {}
        if action == 'get':
            body = kw['params'] = args
        else:
            body = kw['data'] = json.dumps(args, indent=1, cls=JSONEncoder)
        if self.timeout: kw['timeout'] = self.timeout

        headers = {'content-type': 'application/json'}
        headers.update(self._requestAuthHeaders)
        for k, v in list(self._requestAuthHeaders.items()):
            self._debug('>> %s: %s' % (k, v))
        if body:
            self._debug("\n%s\n" % body)

        if self.xml:
            headers['accept'] = 'application/xml'
        kw['verify'] = self.verifySSL

        headers.update(self.headers)
        if asynchronous:
            import grequests
            return grequests.AsyncRequest(action, url, headers=headers, **kw)

        try:
            r = self.session.request(action, url, headers=headers, **kw)
        except requests.ConnectionError as e:
            raise BeaconAPIException(
                code=500,
                message="Could not connect to server (%s): %s" % (url, e))
        self._debug('<< %s %s' % (r.status_code, r.reason))
        if 'x-typehint' in r.headers:
            self._debug('< x-typehint: %s' % r.headers['x-typehint'])
        if r.status_code != 200:
            raise BeaconAPIException(code=r.status_code,
                                     message=r.reason,
                                     body=r.content)
        if r.headers['content-type'] == 'application/json':
            return r.json()
        return r.content
예제 #5
0
def main(parser, args):
    if args.taskId:
        singularity_path = "task/%s" % args.taskId
    elif args.deployId:
        if not args.requestId:
            exit(parser, "RequestId requires DeployId")
        singularity_path = "request/%s/deploy/%s" % (args.requestId,
                                                     args.deployId)
    elif args.requestId:
        singularity_path = "request/%s" % args.requestId
    else:
        exit(parser,
             "Specify one of taskId, requestId and deployId, or requestId")

    if not args.singularity_uri_base:
        exit(parser, "Specify a base uri for Singularity")

    uri_prefix = ""

    if not args.singularity_uri_base.startswith(("http://", "https://")):
        uri_prefix = "http://"

    singularity_uri = "%s%s/logs/%s" % (uri_prefix, args.singularity_uri_base,
                                        singularity_path)

    print "fetching log metadata from %s" % singularity_uri

    singularity_response = requests.get(singularity_uri)

    if singularity_response.status_code < 199 or singularity_response.status_code > 299:
        exit(
            None, "Singularity responded with an invalid status code (%s)" %
            singularity_response.status_code)

    s_json = singularity_response.json()

    print "found %s log files" % len(s_json)

    async_requests = [
        grequests.AsyncRequest('GET',
                               log_file['getUrl'],
                               callback=generate_callback(
                                   log_file['getUrl'], args.dest,
                                   log_file['key'][log_file['key'].rfind('/') +
                                                   1:], args.chunk_size))
        for log_file in s_json
    ]

    grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
예제 #6
0
def download_s3_logs(args):
  sys.stderr.write(colored('Checking for S3 log files', 'blue') + '\n')
  logs = get_json_response(singularity_s3logs_uri(args))
  async_requests = []
  all_logs = []
  for log_file in logs:
    filename = log_file['key'].rsplit("/", 1)[1]
    full_log_path = '{0}/{1}'.format(args.dest, filename.replace('.gz', '.log'))
    full_gz_path = '{0}/{1}'.format(args.dest, filename)
    if in_date_range(args, filename):
      if not (os.path.isfile(full_log_path) or os.path.isfile(full_gz_path)):
        async_requests.append(
          grequests.AsyncRequest('GET', log_file['getUrl'],
            callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size)
          )
        )
      all_logs.append('{0}/{1}'.format(args.dest, filename.replace('.gz', '.log')))
  grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
  zipped_files = ['{0}/{1}'.format(args.dest, log_file['key'].rsplit("/", 1)[1]) for log_file in logs]
  logfetch_base.unpack_logs(zipped_files)
  sys.stderr.write(colored('All S3 logs up to date', 'blue') + '\n')
  return all_logs
예제 #7
0
def download_live_logs(args):
    tasks = tasks_to_check(args)
    async_requests = []
    zipped_files = []
    all_logs = []
    sys.stderr.write(colored('Finding current live log files', 'cyan') + '\n')
    for task in tasks:
        metadata = files_json(args, task)
        if 'slaveHostname' in metadata:
            uri = DOWNLOAD_FILE_FORMAT.format(metadata['slaveHostname'])
            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Finding logs in base directory on {0}'.format(
                            metadata['slaveHostname']), 'magenta') + '\n')
            for log_file in base_directory_files(args, task, metadata):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (
                        args.logtype and logfetch_base.log_matches(
                            log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                'GET',
                                uri,
                                callback=generate_callback(
                                    uri, args.dest, logfile_name,
                                    args.chunk_size, args.verbose),
                                params={
                                    'path':
                                    '{0}/{1}/{2}'.format(
                                        metadata['fullPathToRoot'],
                                        metadata['currentDirectory'], log_file)
                                },
                                headers=args.headers))
                    if logfile_name.endswith('.gz'):
                        zipped_files.append('{0}/{1}'.format(
                            args.dest, logfile_name))
                    else:
                        all_logs.append('{0}/{1}'.format(
                            args.dest, logfile_name.replace('.gz', '.log')))
                elif args.logtype and args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding log {0}, doesn\'t match {1}'.format(
                                log_file, args.logtype), 'magenta') + '\n')

            if args.verbose:
                sys.stderr.write(
                    colored(
                        'Finding logs in logs directory on {0}'.format(
                            metadata['slaveHostname']), 'magenta') + '\n')
            for log_file in logs_folder_files(args, task):
                logfile_name = '{0}-{1}'.format(task, log_file)
                if not args.logtype or (
                        args.logtype and logfetch_base.log_matches(
                            log_file, args.logtype.replace('logs/', ''))):
                    if should_download(args, logfile_name, task):
                        async_requests.append(
                            grequests.AsyncRequest(
                                'GET',
                                uri,
                                callback=generate_callback(
                                    uri, args.dest, logfile_name,
                                    args.chunk_size, args.verbose),
                                params={
                                    'path':
                                    '{0}/{1}/logs/{2}'.format(
                                        metadata['fullPathToRoot'],
                                        metadata['currentDirectory'], log_file)
                                },
                                headers=args.headers))
                    if logfile_name.endswith('.gz'):
                        zipped_files.append('{0}/{1}'.format(
                            args.dest, logfile_name))
                    else:
                        all_logs.append('{0}/{1}'.format(
                            args.dest, logfile_name.replace('.gz', '.log')))
                elif args.logtype and args.verbose:
                    sys.stderr.write(
                        colored(
                            'Excluding log {0}, doesn\'t match {1}'.format(
                                log_file, args.logtype), 'magenta') + '\n')

    if async_requests:
        sys.stderr.write(colored('Starting live logs downloads\n', 'cyan'))
        grequests.map(async_requests,
                      stream=True,
                      size=args.num_parallel_fetches)
    if zipped_files:
        sys.stderr.write(colored('\nUnpacking logs\n', 'cyan'))
        all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
    return all_logs
예제 #8
0
# -*- coding: utf-8 -*-

"""
grequests 是 gevent + requests
"""

from urls import url_list
import grequests
from sfm.timer import DateTimeTimer as Timer

with Timer(title="use grequests"):
    req_list = [
        grequests.AsyncRequest(method="GET", url=url)
        for url in url_list
    ]
    res_list = grequests.map(req_list)
    results = [len(res.text) for res in res_list]
    print(results)