示例#1
0
def main():
    colorama.init()

    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
    parser.add_option('-I',
                      '--isolate-server',
                      metavar='URL',
                      default='',
                      help='Isolate server to use')
    parser.add_option('--namespace',
                      default='temporary%d-gzip' % time.time(),
                      metavar='XX',
                      help='Namespace to use on the server, default: %default')
    parser.add_option('--threads',
                      type='int',
                      default=16,
                      metavar='N',
                      help='Parallel worker threads to use, default:%default')
    graph.unit_option(parser,
                      '--items',
                      default=0,
                      help='Number of items to upload')
    graph.unit_option(parser,
                      '--max-size',
                      default=0,
                      help='Loop until this amount of data was transferred')
    graph.unit_option(parser,
                      '--mid-size',
                      default=100 * 1024,
                      help='Rough average size of each item, default:%default')
    parser.add_option('--columns',
                      type='int',
                      default=graph.get_console_width(),
                      metavar='N',
                      help='For histogram display, default:%default')
    parser.add_option(
        '--buckets',
        type='int',
        default=20,
        metavar='N',
        help='Number of buckets for histogram display, default:%default')
    parser.add_option('--dump', metavar='FOO.JSON', help='Dumps to json file')
    parser.add_option('--dry-run',
                      action='store_true',
                      help='Do not send anything')
    parser.add_option('-v',
                      '--verbose',
                      action='store_true',
                      help='Enable logging')
    options, args = parser.parse_args()

    logging.basicConfig(
        level=logging.INFO if options.verbose else logging.FATAL)
    if args:
        parser.error('Unsupported args: %s' % args)
    if bool(options.max_size) == bool(options.items):
        parser.error(
            'Use one of --max-size or --items.\n'
            '  Use --max-size if you want to run it until NN bytes where '
            'transfered.\n'
            '  Otherwise use --items to run it for NN items.')
    if not options.dry_run:
        options.isolate_server = options.isolate_server.rstrip('/')
        if not options.isolate_server:
            parser.error('--isolate-server is required.')

    print(' - Using %d thread,  items=%d,  max-size=%d,  mid-size=%d' %
          (options.threads, options.items, options.max_size, options.mid_size))
    if options.dry_run:
        print(' - %sDRY RUN MODE%s' %
              (colorama.Fore.GREEN, colorama.Fore.RESET))

    start = time.time()

    random_pool = Randomness()
    print(' - Generated pool after %.1fs' % (time.time() - start))

    columns = [('index', 0), ('data', 0), ('size', options.items)]
    progress = Progress(columns)
    api = isolateserver.get_storage_api(options.isolate_server,
                                        options.namespace)
    do_item = functools.partial(
        send_and_receive, random_pool, options.dry_run,
        isolateserver.is_namespace_with_compression(options.namespace), api,
        progress)

    # TODO(maruel): Handle Ctrl-C should:
    # - Stop adding tasks.
    # - Stop scheduling tasks in ThreadPool.
    # - Wait for the remaining ungoing tasks to complete.
    # - Still print details and write the json file.
    with threading_utils.ThreadPoolWithProgress(progress, options.threads,
                                                options.threads, 0) as pool:
        if options.items:
            for _ in xrange(options.items):
                pool.add_task(0, do_item, gen_size(options.mid_size))
                progress.print_update()
        elif options.max_size:
            # This one is approximate.
            total = 0
            while True:
                size = gen_size(options.mid_size)
                progress.update_item('', size=1)
                progress.print_update()
                pool.add_task(0, do_item, size)
                total += size
                if total >= options.max_size:
                    break
        results = sorted(pool.join())

    print('')
    print(' - Took %.1fs.' % (time.time() - start))
    print('')
    print_results(results, options.columns, options.buckets)
    if options.dump:
        with open(options.dump, 'w') as f:
            json.dump(results, f, separators=(',', ':'))
    return 0
def main():
  colorama.init()

  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
  parser.add_option(
      '-I', '--isolate-server',
      metavar='URL', default='',
      help='Isolate server to use')
  parser.add_option(
      '--namespace', default='temporary%d-gzip' % time.time(), metavar='XX',
      help='Namespace to use on the server, default: %default')
  parser.add_option(
      '--threads', type='int', default=16, metavar='N',
      help='Parallel worker threads to use, default:%default')

  data_group = optparse.OptionGroup(parser, 'Amount of data')
  graph.unit_option(
      data_group, '--items', default=0, help='Number of items to upload')
  graph.unit_option(
      data_group, '--max-size', default=0,
      help='Loop until this amount of data was transferred')
  graph.unit_option(
      data_group, '--mid-size', default=100*1024,
      help='Rough average size of each item, default:%default')
  parser.add_option_group(data_group)

  ui_group = optparse.OptionGroup(parser, 'Result histogram')
  ui_group.add_option(
      '--columns', type='int', default=graph.get_console_width(), metavar='N',
      help='Width of histogram, default:%default')
  ui_group.add_option(
      '--buckets', type='int', default=20, metavar='N',
      help='Number of histogram\'s buckets, default:%default')
  parser.add_option_group(ui_group)

  log_group = optparse.OptionGroup(parser, 'Logging')
  log_group.add_option(
      '--dump', metavar='FOO.JSON', help='Dumps to json file')
  log_group.add_option(
      '-v', '--verbose', action='store_true', help='Enable logging')
  parser.add_option_group(log_group)

  options, args = parser.parse_args()

  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
  if args:
    parser.error('Unsupported args: %s' % args)
  if bool(options.max_size) == bool(options.items):
    parser.error(
        'Use one of --max-size or --items.\n'
        '  Use --max-size if you want to run it until NN bytes where '
        'transfered.\n'
        '  Otherwise use --items to run it for NN items.')
  options.isolate_server = options.isolate_server.rstrip('/')
  if not options.isolate_server:
    parser.error('--isolate-server is required.')

  print(
      ' - Using %d thread,  items=%d,  max-size=%d,  mid-size=%d' % (
      options.threads, options.items, options.max_size, options.mid_size))

  start = time.time()

  random_pool = Randomness()
  print(' - Generated pool after %.1fs' % (time.time() - start))

  columns = [('index', 0), ('data', 0), ('size', options.items)]
  progress = Progress(columns)
  storage = isolateserver.get_storage(options.isolate_server, options.namespace)
  do_item = functools.partial(
      send_and_receive,
      random_pool,
      storage,
      progress)

  # TODO(maruel): Handle Ctrl-C should:
  # - Stop adding tasks.
  # - Stop scheduling tasks in ThreadPool.
  # - Wait for the remaining ungoing tasks to complete.
  # - Still print details and write the json file.
  with threading_utils.ThreadPoolWithProgress(
      progress, options.threads, options.threads, 0) as pool:
    if options.items:
      for _ in xrange(options.items):
        pool.add_task(0, do_item, gen_size(options.mid_size))
        progress.print_update()
    elif options.max_size:
      # This one is approximate.
      total = 0
      while True:
        size = gen_size(options.mid_size)
        progress.update_item('', size=1)
        progress.print_update()
        pool.add_task(0, do_item, size)
        total += size
        if total >= options.max_size:
          break
    results = sorted(pool.join())

  print('')
  print(' - Took %.1fs.' % (time.time() - start))
  print('')
  print_results(results, options.columns, options.buckets)
  if options.dump:
    with open(options.dump, 'w') as f:
      json.dump(results, f, separators=(',',':'))
  return 0