コード例 #1
0
def main():
  """CLI frontend to validate arguments."""
  tools.disable_buffering()
  parser = run_test_cases.OptionParserWithTestShardingAndFiltering(
      usage='%prog <options> [gtest]')

  # Override default seed value to default to 0.
  parser.set_defaults(seed=0)

  options, args = parser.parse_args()
  if not args:
    parser.error('Please provide the executable to run')

  cmd = tools.fix_python_path(args)
  try:
    tests = run_test_cases.chromium_list_test_cases(
        cmd,
        os.getcwd(),
        index=options.index,
        shards=options.shards,
        seed=options.seed,
        disabled=options.disabled,
        fails=options.fails,
        flaky=options.flaky,
        pre=False,
        manual=options.manual)
    for test in tests:
      print test
  except run_test_cases.Failure, e:
    print e.args[0]
    return e.args[1]
コード例 #2
0
def main():
    """CLI frontend to validate arguments."""
    tools.disable_buffering()
    parser = run_test_cases.OptionParserWithTestShardingAndFiltering(
        usage='%prog <options> [gtest]')

    # Override default seed value to default to 0.
    parser.set_defaults(seed=0)

    options, args = parser.parse_args()
    if not args:
        parser.error('Please provide the executable to run')

    cmd = tools.fix_python_path(args)
    try:
        tests = run_test_cases.chromium_list_test_cases(
            cmd,
            os.getcwd(),
            index=options.index,
            shards=options.shards,
            seed=options.seed,
            disabled=options.disabled,
            fails=options.fails,
            flaky=options.flaky,
            pre=False,
            manual=options.manual)
        for test in tests:
            print test
    except run_test_cases.Failure, e:
        print e.args[0]
        return e.args[1]
コード例 #3
0
def main():
    """CLI frontend to validate arguments."""
    tools.disable_buffering()
    parser = run_test_cases.OptionParserTestCases(
        usage='%prog <options> --isolated <.isolated>')
    parser.format_description = lambda *_: parser.description
    isolate.add_variable_option(parser)
    isolate.add_trace_option(parser)

    # TODO(maruel): Add support for options.timeout.
    parser.remove_option('--timeout')

    options, args = parser.parse_args()
    if args:
        parser.error('Unsupported arg: %s' % args)
    isolate.parse_isolated_option(parser, options, os.getcwd(), True)
    isolate.parse_variable_option(options)

    try:
        config, command, test_cases = safely_load_isolated(parser, options)
        if not command:
            parser.error('A command must be defined')
        if not test_cases:
            parser.error('No test case to run with command: %s' %
                         ' '.join(command))

        config.saved_state.variables.update(options.variables)
        return isolate_test_cases(
            command, test_cases, options.jobs, config.isolated_filepath,
            config.saved_state.isolate_filepath, config.root_dir,
            config.saved_state.relative_cwd, config.saved_state.variables,
            options.trace_blacklist)
    except isolate.ExecutionError, e:
        print >> sys.stderr, str(e)
        return 1
コード例 #4
0
ファイル: run_isolated.py プロジェクト: WHS-TechOps/Aviator
def main(args):
    tools.disable_buffering()
    parser = tools.OptionParserWithLogging(usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE)

    data_group = optparse.OptionGroup(parser, "Data source")
    data_group.add_option("-s", "--isolated", metavar="FILE", help="File/url describing what to map or run")
    data_group.add_option("-H", "--hash", help="Hash of the .isolated to grab from the hash table")
    isolateserver.add_isolate_server_options(data_group, True)
    parser.add_option_group(data_group)

    cache_group = optparse.OptionGroup(parser, "Cache management")
    cache_group.add_option("--cache", default="cache", metavar="DIR", help="Cache directory, default=%default")
    cache_group.add_option(
        "--max-cache-size",
        type="int",
        metavar="NNN",
        default=20 * 1024 * 1024 * 1024,
        help="Trim if the cache gets larger than this value, default=%default",
    )
    cache_group.add_option(
        "--min-free-space",
        type="int",
        metavar="NNN",
        default=2 * 1024 * 1024 * 1024,
        help="Trim if disk free space becomes lower than this value, " "default=%default",
    )
    cache_group.add_option(
        "--max-items",
        type="int",
        metavar="NNN",
        default=100000,
        help="Trim if more than this number of items are in the cache " "default=%default",
    )
    parser.add_option_group(cache_group)

    auth.add_auth_options(parser)
    options, args = parser.parse_args(args)
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(data_group, options)

    if bool(options.isolated) == bool(options.hash):
        logging.debug("One and only one of --isolated or --hash is required.")
        parser.error("One and only one of --isolated or --hash is required.")

    options.cache = os.path.abspath(options.cache)
    policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items)

    try:
        # |options.cache| path may not exist until DiskCache() instance is created.
        cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace))
        remote = options.isolate_server or options.indir
        with isolateserver.get_storage(remote, options.namespace) as storage:
            # Hashing schemes used by |storage| and |cache| MUST match.
            assert storage.hash_algo == cache.hash_algo
            return run_tha_test(options.isolated or options.hash, storage, cache, args)
    except Exception as e:
        # Make sure any exception is logged.
        tools.report_error(e)
        logging.exception(e)
        return 1
コード例 #5
0
ファイル: run_isolated.py プロジェクト: maruel/luci-py
def main(args):
  tools.disable_buffering()
  parser = logging_utils.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)
  parser.add_option(
      '--json',
      help='dump output metadata to json file. When used, run_isolated returns '
           'non-zero only on internal failure')
  parser.add_option(
      '--hard-timeout', type='int', help='Enforce hard timeout in execution')
  parser.add_option(
      '--grace-period', type='int',
      help='Grace period between SIGTERM and SIGKILL')
  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  debug_group.add_option(
      '--root-dir', help='Use a directory instead of a random one')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  if options.root_dir:
    options.root_dir = unicode(os.path.abspath(options.root_dir))
  if options.json:
    options.json = unicode(os.path.abspath(options.json))
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, options.json,
        options.root_dir, options.hard_timeout, options.grace_period, args)
コード例 #6
0
def main():
    tools.disable_buffering()
    parser = optparse.OptionParser()
    parser.add_option('-s',
                      '--isolated',
                      help='.isolated file to profile with.')
    parser.add_option('--largest_files',
                      type='int',
                      help='If this is set, instead of compressing all the '
                      'files, only the large n files will be compressed')
    options, args = parser.parse_args()

    if args:
        parser.error('Unknown args passed in; %s' % args)
    if not options.isolated:
        parser.error('The .isolated file must be given.')

    temp_dir = None
    try:
        temp_dir = tempfile.mkdtemp(prefix=u'zip_profiler')

        # Create a directory of the required files
        subprocess.check_call([
            os.path.join(ROOT_DIR, 'isolate.py'), 'remap', '-s',
            options.isolated, '--outdir', temp_dir
        ])

        file_set = tree_files(temp_dir)

        if options.largest_files:
            sorted_by_size = sorted(file_set.iteritems(),
                                    key=lambda x: x[1],
                                    reverse=True)
            files_to_compress = sorted_by_size[:options.largest_files]

            for filename, size in files_to_compress:
                print('Compressing %s, uncompressed size %d' %
                      (filename, size))

                profile_compress('zlib', zlib.compressobj, range(10), zip_file,
                                 filename)
                profile_compress('bz2', bz2.BZ2Compressor, range(1, 10),
                                 zip_file, filename)
        else:
            print('Number of files: %s' % len(file_set))
            print('Total size: %s' % sum(file_set.itervalues()))

            # Profile!
            profile_compress('zlib', zlib.compressobj, range(10),
                             zip_directory, temp_dir)
            profile_compress('bz2', bz2.BZ2Compressor, range(1, 10),
                             zip_directory, temp_dir)
    finally:
        file_path.rmtree(temp_dir)
コード例 #7
0
def main():
  """CLI frontend to validate arguments."""
  tools.disable_buffering()
  parser = run_test_cases.OptionParserTestCases(
      usage='%prog <options> [gtest]')
  parser.format_description = lambda *_: parser.description
  parser.add_option(
      '-o', '--out',
      help='output file, defaults to <executable>.test_cases')
  parser.add_option(
      '-r', '--root-dir',
      help='Root directory under which file access should be noted')
  parser.add_option(
      '--trace-blacklist', action='append', default=[],
      help='List of regexp to use as blacklist filter')
  # TODO(maruel): Add support for options.timeout.
  parser.remove_option('--timeout')
  options, args = parser.parse_args()

  if not args:
    parser.error(
        'Please provide the executable line to run, if you need fancy things '
        'like xvfb, start this script from *inside* xvfb, it\'ll be much faster'
        '.')

  cmd = tools.fix_python_path(args)
  cmd[0] = os.path.abspath(cmd[0])
  if not os.path.isfile(cmd[0]):
    parser.error('Tracing failed for: %s\nIt doesn\'t exit' % ' '.join(cmd))

  if not options.out:
    options.out = '%s.test_cases' % cmd[-1]
  options.out = os.path.abspath(options.out)
  if options.root_dir:
    options.root_dir = os.path.abspath(options.root_dir)
  logname = options.out + '.log'

  test_cases = parser.process_gtest_options(cmd, os.getcwd(), options)

  # Then run them.
  print('Tracing...')
  results = trace_test_cases(
      cmd,
      os.getcwd(),
      test_cases,
      options.jobs,
      logname)
  print('Reading trace logs...')
  blacklist = tools.gen_blacklist(options.trace_blacklist)
  write_details(logname, options.out, options.root_dir, blacklist, results)
  return 0
コード例 #8
0
ファイル: run_isolated.py プロジェクト: eakuefner/luci-py
def main(args):
  tools.disable_buffering()
  parser = logging_utils.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)
  parser.add_option(
      '--json',
      help='dump output metadata to json file. When used, run_isolated returns '
           'non-zero only on internal failure')
  parser.add_option(
      '--hard-timeout', type='int', help='Enforce hard timeout in execution')
  parser.add_option(
      '--grace-period', type='int',
      help='Grace period between SIGTERM and SIGKILL')
  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  debug_group.add_option(
      '--root-dir', help='Use a directory instead of a random one')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, options.json,
        options.root_dir, options.hard_timeout, options.grace_period, args)
コード例 #9
0
ファイル: zip_profiler.py プロジェクト: WHS-TechOps/Aviator
def main():
  tools.disable_buffering()
  parser = optparse.OptionParser()
  parser.add_option('-s', '--isolated', help='.isolated file to profile with.')
  parser.add_option('--largest_files', type='int',
                    help='If this is set, instead of compressing all the '
                    'files, only the large n files will be compressed')
  options, args = parser.parse_args()

  if args:
    parser.error('Unknown args passed in; %s' % args)
  if not options.isolated:
    parser.error('The .isolated file must be given.')

  temp_dir = None
  try:
    temp_dir = tempfile.mkdtemp(prefix='zip_profiler')

    # Create a directory of the required files
    subprocess.check_call([os.path.join(ROOT_DIR, 'isolate.py'),
                           'remap',
                           '-s', options.isolated,
                           '--outdir', temp_dir])

    file_set = tree_files(temp_dir)

    if options.largest_files:
      sorted_by_size = sorted(file_set.iteritems(),  key=lambda x: x[1],
                              reverse=True)
      files_to_compress = sorted_by_size[:options.largest_files]

      for filename, size in files_to_compress:
        print('Compressing %s, uncompressed size %d' % (filename, size))

        profile_compress('zlib', zlib.compressobj, range(10), zip_file,
                         filename)
        profile_compress('bz2', bz2.BZ2Compressor, range(1, 10), zip_file,
                         filename)
    else:
      print('Number of files: %s' % len(file_set))
      print('Total size: %s' % sum(file_set.itervalues()))

      # Profile!
      profile_compress('zlib', zlib.compressobj, range(10), zip_directory,
                       temp_dir)
      profile_compress('bz2', bz2.BZ2Compressor, range(1, 10), zip_directory,
                       temp_dir)
  finally:
    shutil.rmtree(temp_dir)
コード例 #10
0
def main(args=None):
    tools.disable_buffering()
    parser = tools.OptionParserWithLogging(usage="%prog <options> [file1] [file2] ...")
    parser.add_option("-o", "--output", help="Output to file instead of stdout")

    options, args = parser.parse_args(args)

    configs = load_isolates(args)
    data = configs.make_isolate_file()
    if options.output:
        with open(options.output, "wb") as f:
            print_all(configs.file_comment, data, f)
    else:
        print_all(configs.file_comment, data, sys.stdout)
    return 0
コード例 #11
0
ファイル: fix_test_cases.py プロジェクト: rzr/Tizen_Crosswalk
def main():
    tools.disable_buffering()
    parser = run_test_cases.OptionParserTestCases(usage="%prog <options> -s <something.isolated>")
    isolate.add_trace_option(parser)
    parser.add_option("-s", "--isolated", help="The isolated file")
    options, args = parser.parse_args()

    if args:
        parser.error("Unsupported arg: %s" % args)
    isolate.parse_isolated_option(parser, options, os.getcwd(), True)

    _, command, test_cases = isolate_test_cases.safely_load_isolated(parser, options)
    if not command:
        parser.error("A command must be defined")
    if not test_cases:
        parser.error("No test case to run")
    return not fix_all(options.isolated, test_cases, options.trace_blacklist, options.verbose)
コード例 #12
0
ファイル: isolate_merge.py プロジェクト: WHS-TechOps/Aviator
def main(args=None):
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options> [file1] [file2] ...')
  parser.add_option(
      '-o', '--output', help='Output to file instead of stdout')

  options, args = parser.parse_args(args)

  configs = load_isolates(args)
  data = configs.make_isolate_file()
  if options.output:
    with open(options.output, 'wb') as f:
      isolate_format.print_all(configs.file_comment, data, f)
  else:
    isolate_format.print_all(configs.file_comment, data, sys.stdout)
  return 0
コード例 #13
0
ファイル: zip_profiler.py プロジェクト: qlb7707/webrtc_src
def main():
    tools.disable_buffering()
    parser = optparse.OptionParser()
    parser.add_option("-s", "--isolated", help=".isolated file to profile with.")
    parser.add_option(
        "--largest_files",
        type="int",
        help="If this is set, instead of compressing all the " "files, only the large n files will be compressed",
    )
    options, args = parser.parse_args()

    if args:
        parser.error("Unknown args passed in; %s" % args)
    if not options.isolated:
        parser.error("The .isolated file must be given.")

    temp_dir = None
    try:
        temp_dir = tempfile.mkdtemp(prefix=u"zip_profiler")

        # Create a directory of the required files
        subprocess.check_call(
            [os.path.join(ROOT_DIR, "isolate.py"), "remap", "-s", options.isolated, "--outdir", temp_dir]
        )

        file_set = tree_files(temp_dir)

        if options.largest_files:
            sorted_by_size = sorted(file_set.iteritems(), key=lambda x: x[1], reverse=True)
            files_to_compress = sorted_by_size[: options.largest_files]

            for filename, size in files_to_compress:
                print("Compressing %s, uncompressed size %d" % (filename, size))

                profile_compress("zlib", zlib.compressobj, range(10), zip_file, filename)
                profile_compress("bz2", bz2.BZ2Compressor, range(1, 10), zip_file, filename)
        else:
            print("Number of files: %s" % len(file_set))
            print("Total size: %s" % sum(file_set.itervalues()))

            # Profile!
            profile_compress("zlib", zlib.compressobj, range(10), zip_directory, temp_dir)
            profile_compress("bz2", bz2.BZ2Compressor, range(1, 10), zip_directory, temp_dir)
    finally:
        shutil.rmtree(temp_dir)
コード例 #14
0
def main(args=None):
    tools.disable_buffering()
    parser = tools.OptionParserWithLogging(
        usage='%prog <options> [file1] [file2] ...')
    parser.add_option('-o',
                      '--output',
                      help='Output to file instead of stdout')

    options, args = parser.parse_args(args)

    configs = load_isolates(args)
    data = configs.make_isolate_file()
    if options.output:
        with open(options.output, 'wb') as f:
            isolate_format.print_all(configs.file_comment, data, f)
    else:
        isolate_format.print_all(configs.file_comment, data, sys.stdout)
    return 0
コード例 #15
0
def main():
  """CLI frontend to validate arguments."""
  tools.disable_buffering()
  parser = run_test_cases.OptionParserTestCases(
      usage='%prog <options> --isolated <.isolated>')
  parser.format_description = lambda *_: parser.description
  isolate.add_variable_option(parser)
  isolate.add_trace_option(parser)

  # TODO(maruel): Add support for options.timeout.
  parser.remove_option('--timeout')

  options, args = parser.parse_args()
  if args:
    parser.error('Unsupported arg: %s' % args)
  isolate.parse_isolated_option(parser, options, os.getcwd(), True)
  isolate.parse_variable_option(options)

  try:
    config, command, test_cases = safely_load_isolated(parser, options)
    if not command:
      parser.error('A command must be defined')
    if not test_cases:
      parser.error('No test case to run with command: %s' % ' '.join(command))

    config.saved_state.config_variables.update(options.config_variables)
    config.saved_state.extra_variables.update(options.extra_variables)
    config.saved_state.path_variables.update(options.path_variables)
    return isolate_test_cases(
        command,
        test_cases,
        options.jobs,
        config.isolated_filepath,
        config.saved_state.isolate_filepath,
        config.root_dir,
        config.saved_state.relative_cwd,
        config.saved_state.path_variables,
        config.saved_state.config_variables,
        config.saved_state.extra_variables,
        options.trace_blacklist)
  except isolate.ExecutionError, e:
    print >> sys.stderr, str(e)
    return 1
コード例 #16
0
def main():
  tools.disable_buffering()
  parser = optparse.OptionParser(usage='%prog <options> [gtest]')
  parser.disable_interspersed_args()
  parser.add_option(
      '-I', '--index',
      type='int',
      default=os.environ.get('GTEST_SHARD_INDEX'),
      help='Shard index to run')
  parser.add_option(
      '-S', '--shards',
      type='int',
      default=os.environ.get('GTEST_TOTAL_SHARDS'),
      help='Total number of shards to calculate from the --index to run')
  options, args = parser.parse_args()
  env = os.environ.copy()
  env['GTEST_TOTAL_SHARDS'] = str(options.shards)
  env['GTEST_SHARD_INDEX'] = str(options.index)
  return subprocess.call(tools.fix_python_path(args), env=env)
コード例 #17
0
def main():
    tools.disable_buffering()
    parser = optparse.OptionParser(usage='%prog <options> [gtest]')
    parser.disable_interspersed_args()
    parser.add_option('-I',
                      '--index',
                      type='int',
                      default=os.environ.get('GTEST_SHARD_INDEX'),
                      help='Shard index to run')
    parser.add_option(
        '-S',
        '--shards',
        type='int',
        default=os.environ.get('GTEST_TOTAL_SHARDS'),
        help='Total number of shards to calculate from the --index to run')
    options, args = parser.parse_args()
    env = os.environ.copy()
    env['GTEST_TOTAL_SHARDS'] = str(options.shards)
    env['GTEST_SHARD_INDEX'] = str(options.index)
    return subprocess.call(tools.fix_python_path(args), env=env)
コード例 #18
0
ファイル: fix_test_cases.py プロジェクト: danfengzi/soui_gyp
def main():
    tools.disable_buffering()
    parser = run_test_cases.OptionParserTestCases(
        usage='%prog <options> -s <something.isolated>')
    isolate.add_trace_option(parser)
    parser.add_option('-s', '--isolated', help='The isolated file')
    options, args = parser.parse_args()

    if args:
        parser.error('Unsupported arg: %s' % args)
    isolate.parse_isolated_option(parser, options, os.getcwd(), True)

    _, command, test_cases = isolate_test_cases.safely_load_isolated(
        parser, options)
    if not command:
        parser.error('A command must be defined')
    if not test_cases:
        parser.error('No test case to run')
    return not fix_all(options.isolated, test_cases, options.trace_blacklist,
                       options.verbose)
コード例 #19
0
ファイル: run_isolated.py プロジェクト: misscache/luci-py
def main(args):
  tools.disable_buffering()
  parser = logging_utils.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  data_group.add_option(
      '-H', dest='isolated', help=optparse.SUPPRESS_HELP)
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, args)
コード例 #20
0
ファイル: run_isolated.py プロジェクト: pombreda/client-py
def main(args):
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  data_group.add_option(
      '-H', dest='isolated', help=optparse.SUPPRESS_HELP)
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, args)
コード例 #21
0
def main(args):
    tools.disable_buffering()
    parser = logging_utils.OptionParserWithLogging(
        usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE
    )
    parser.add_option(
        "--json",
        help="dump output metadata to json file. When used, run_isolated returns " "non-zero only on internal failure",
    )
    data_group = optparse.OptionGroup(parser, "Data source")
    data_group.add_option("-s", "--isolated", help="Hash of the .isolated to grab from the isolate server")
    isolateserver.add_isolate_server_options(data_group)
    parser.add_option_group(data_group)

    isolateserver.add_cache_options(parser)
    parser.set_defaults(cache="cache")

    debug_group = optparse.OptionGroup(parser, "Debugging")
    debug_group.add_option(
        "--leak-temp-dir",
        action="store_true",
        help="Deliberately leak isolate's temp dir for later examination " "[default: %default]",
    )
    debug_group.add_option("--root-dir", help="Use a directory instead of a random one")
    parser.add_option_group(debug_group)

    auth.add_auth_options(parser)
    options, args = parser.parse_args(args)
    if not options.isolated:
        parser.error("--isolated is required.")
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True)

    cache = isolateserver.process_cache_options(options)
    with isolateserver.get_storage(options.isolate_server, options.namespace) as storage:
        # Hashing schemes used by |storage| and |cache| MUST match.
        assert storage.hash_algo == cache.hash_algo
        return run_tha_test(
            options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, args
        )
コード例 #22
0
        parser.error(
            'Must pass one python script to run. Use --help for more details')

    # 1. Query the bots list.
    bots = get_bot_list(options.swarming, options.dimensions)
    print('Found %d bots to process' % len(bots))
    if not bots:
        return 1

    # 2. Archive the script to run.
    isolated_hash = archive(options.isolate_server, args[0])
    print('Running %s' % isolated_hash)

    # 3. Trigger the tasks.
    name = os.path.basename(args[0])
    if options.serial:
        return run_serial(options.swarming, options.isolate_server,
                          str(options.priority), str(options.deadline),
                          options.repeat, isolated_hash, name, bots)

    return run_parallel(options.swarming, options.isolate_server,
                        str(options.priority), str(options.deadline),
                        options.repeat, isolated_hash, name, bots)


if __name__ == '__main__':
    fix_encoding.fix_encoding()
    tools.disable_buffering()
    colorama.init()
    sys.exit(main())
コード例 #23
0
def main(args):
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      metavar='FILE',
      help='File/url describing what to map or run')
  data_group.add_option(
      '-H', '--hash',
      help='Hash of the .isolated to grab from the hash table')
  isolateserver.add_isolate_server_options(data_group, True)
  parser.add_option_group(data_group)

  cache_group = optparse.OptionGroup(parser, 'Cache management')
  cache_group.add_option(
      '--cache',
      default='cache',
      metavar='DIR',
      help='Cache directory, default=%default')
  cache_group.add_option(
      '--max-cache-size',
      type='int',
      metavar='NNN',
      default=20*1024*1024*1024,
      help='Trim if the cache gets larger than this value, default=%default')
  cache_group.add_option(
      '--min-free-space',
      type='int',
      metavar='NNN',
      default=2*1024*1024*1024,
      help='Trim if disk free space becomes lower than this value, '
           'default=%default')
  cache_group.add_option(
      '--max-items',
      type='int',
      metavar='NNN',
      default=100000,
      help='Trim if more than this number of items are in the cache '
           'default=%default')
  parser.add_option_group(cache_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(data_group, options)

  if bool(options.isolated) == bool(options.hash):
    logging.debug('One and only one of --isolated or --hash is required.')
    parser.error('One and only one of --isolated or --hash is required.')

  options.cache = os.path.abspath(options.cache)
  policies = CachePolicies(
      options.max_cache_size, options.min_free_space, options.max_items)
  algo = isolateserver.get_hash_algo(options.namespace)

  try:
    # |options.cache| may not exist until DiskCache() instance is created.
    cache = DiskCache(options.cache, policies, algo)
    remote = options.isolate_server or options.indir
    with isolateserver.get_storage(remote, options.namespace) as storage:
      return run_tha_test(
          options.isolated or options.hash, storage, cache, algo, args)
  except Exception as e:
    # Make sure any exception is logged.
    tools.report_error(e)
    logging.exception(e)
    return 1
コード例 #24
0
ファイル: run_isolated.py プロジェクト: rzr/Tizen_Crosswalk
def main():
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  group = optparse.OptionGroup(parser, 'Data source')
  group.add_option(
      '-s', '--isolated',
      metavar='FILE',
      help='File/url describing what to map or run')
  group.add_option(
      '-H', '--hash',
      help='Hash of the .isolated to grab from the hash table')
  group.add_option(
      '-I', '--isolate-server',
      metavar='URL', default='',
      help='Isolate server to use')
  group.add_option(
      '-n', '--namespace',
      default='default-gzip',
      help='namespace to use when using isolateserver, default: %default')
  parser.add_option_group(group)

  group = optparse.OptionGroup(parser, 'Cache management')
  group.add_option(
      '--cache',
      default='cache',
      metavar='DIR',
      help='Cache directory, default=%default')
  group.add_option(
      '--max-cache-size',
      type='int',
      metavar='NNN',
      default=20*1024*1024*1024,
      help='Trim if the cache gets larger than this value, default=%default')
  group.add_option(
      '--min-free-space',
      type='int',
      metavar='NNN',
      default=2*1024*1024*1024,
      help='Trim if disk free space becomes lower than this value, '
           'default=%default')
  group.add_option(
      '--max-items',
      type='int',
      metavar='NNN',
      default=100000,
      help='Trim if more than this number of items are in the cache '
           'default=%default')
  parser.add_option_group(group)

  options, args = parser.parse_args()

  if bool(options.isolated) == bool(options.hash):
    logging.debug('One and only one of --isolated or --hash is required.')
    parser.error('One and only one of --isolated or --hash is required.')
  if args:
    logging.debug('Unsupported args %s' % ' '.join(args))
    parser.error('Unsupported args %s' % ' '.join(args))
  if not options.isolate_server:
    parser.error('--isolate-server is required.')

  options.cache = os.path.abspath(options.cache)
  policies = CachePolicies(
      options.max_cache_size, options.min_free_space, options.max_items)
  storage = isolateserver.get_storage(options.isolate_server, options.namespace)
  algo = isolateserver.get_hash_algo(options.namespace)

  try:
    # |options.cache| may not exist until DiskCache() instance is created.
    cache = DiskCache(options.cache, policies, algo)
    outdir = make_temp_dir('run_tha_test', options.cache)
    return run_tha_test(
        options.isolated or options.hash, storage, cache, algo, outdir)
  except Exception as e:
    # Make sure any exception is logged.
    logging.exception(e)
    return 1
コード例 #25
0
  if options.os:
    if options.os not in oses:
      parser.error(
          '--os %s is unknown. Valid values are %s' % (
            options.os, ', '.join(sorted(oses))))
    oses = [options.os]

  if sys.platform in ('win32', 'cygwin'):
    # If we are on Windows, don't generate the tests for Linux and Mac since
    # they use symlinks and we can't create symlinks on windows.
    oses = ['Windows']
    if options.os != 'win32':
      print('Linux and Mac tests skipped since running on Windows.')

  return run_swarming_tests_on_swarming(
      options.swarming,
      options.isolate_server,
      options.priority,
      oses,
      tests,
      options.logs,
      options.no_idempotent)


if __name__ == '__main__':
  fix_encoding.fix_encoding()
  tools.disable_buffering()
  colorama.init()
  sys.exit(main())
コード例 #26
0
ファイル: run_test_cases.py プロジェクト: WHS-TechOps/Aviator
def main(argv):
  """CLI frontend to validate arguments."""
  tools.disable_buffering()
  parser, options, cmd = process_args(argv)

  if options.gtest_list_tests:
    # Special case, return the output of the target unmodified.
    return subprocess42.call(cmd + ['--gtest_list_tests'])

  cwd = os.getcwd()
  test_cases = parser.process_gtest_options(cmd, cwd, options)

  if options.no_dump:
    result_file = None
  else:
    result_file = options.result
    if not result_file:
      if cmd[0] == sys.executable:
        result_file = '%s.run_test_cases' % cmd[1]
      else:
        result_file = '%s.run_test_cases' % cmd[0]

  if not test_cases:
    # The fact of not running any test is considered a failure. This is to
    # prevent silent failure with an invalid --gtest_filter argument or because
    # of a misconfigured unit test.
    if test_cases is not None:
      print('Found no test to run')
    if result_file:
      dump_results_as_json(result_file, {
        'test_cases': [],
        'expected': 0,
        'success': [],
        'flaky': [],
        'fail': [],
        'missing': [],
        'duration': 0,
      })
    return 1

  if options.disabled:
    cmd.append('--gtest_also_run_disabled_tests')
  if options.manual:
    cmd.append('--run-manual')

  try:
    return run_test_cases(
        cmd,
        cwd,
        test_cases,
        options.jobs,
        options.timeout,
        options.clusters,
        options.retries,
        options.run_all,
        options.max_failures,
        options.no_cr,
        options.gtest_output,
        result_file,
        options.verbose)
  except Failure as e:
    print >> sys.stderr, e.args[0]
    return 1
コード例 #27
0
def main(args):
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      metavar='FILE',
      help='File/url describing what to map or run')
  data_group.add_option(
      '-H', '--hash',
      help='Hash of the .isolated to grab from the hash table')
  isolateserver.add_isolate_server_options(data_group, True)
  parser.add_option_group(data_group)

  cache_group = optparse.OptionGroup(parser, 'Cache management')
  cache_group.add_option(
      '--cache',
      default='cache',
      metavar='DIR',
      help='Cache directory, default=%default')
  cache_group.add_option(
      '--max-cache-size',
      type='int',
      metavar='NNN',
      default=20*1024*1024*1024,
      help='Trim if the cache gets larger than this value, default=%default')
  cache_group.add_option(
      '--min-free-space',
      type='int',
      metavar='NNN',
      default=2*1024*1024*1024,
      help='Trim if disk free space becomes lower than this value, '
           'default=%default')
  cache_group.add_option(
      '--max-items',
      type='int',
      metavar='NNN',
      default=100000,
      help='Trim if more than this number of items are in the cache '
           'default=%default')
  parser.add_option_group(cache_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(data_group, options)

  if bool(options.isolated) == bool(options.hash):
    logging.debug('One and only one of --isolated or --hash is required.')
    parser.error('One and only one of --isolated or --hash is required.')

  options.cache = os.path.abspath(options.cache)
  policies = CachePolicies(
      options.max_cache_size, options.min_free_space, options.max_items)

  try:
    # |options.cache| path may not exist until DiskCache() instance is created.
    cache = DiskCache(
        options.cache, policies, isolateserver.get_hash_algo(options.namespace))
    remote = options.isolate_server or options.indir
    with isolateserver.get_storage(remote, options.namespace) as storage:
      # Hashing schemes used by |storage| and |cache| MUST match.
      assert storage.hash_algo == cache.hash_algo
      return run_tha_test(
          options.isolated or options.hash, storage, cache, args)
  except Exception as e:
    # Make sure any exception is logged.
    tools.report_error(e)
    logging.exception(e)
    return 1
コード例 #28
0
ファイル: run_isolated.py プロジェクト: bbmjja8123/chromium-1
def main():
    tools.disable_buffering()
    parser = tools.OptionParserWithLogging(usage='%prog <options>',
                                           version=__version__,
                                           log_file=RUN_ISOLATED_LOG_FILE)

    group = optparse.OptionGroup(parser, 'Data source')
    group.add_option('-s',
                     '--isolated',
                     metavar='FILE',
                     help='File/url describing what to map or run')
    group.add_option('-H',
                     '--hash',
                     help='Hash of the .isolated to grab from the hash table')
    group.add_option('-I',
                     '--isolate-server',
                     metavar='URL',
                     default='',
                     help='Isolate server to use')
    group.add_option(
        '-n',
        '--namespace',
        default='default-gzip',
        help='namespace to use when using isolateserver, default: %default')
    parser.add_option_group(group)

    group = optparse.OptionGroup(parser, 'Cache management')
    group.add_option('--cache',
                     default='cache',
                     metavar='DIR',
                     help='Cache directory, default=%default')
    group.add_option(
        '--max-cache-size',
        type='int',
        metavar='NNN',
        default=20 * 1024 * 1024 * 1024,
        help='Trim if the cache gets larger than this value, default=%default')
    group.add_option(
        '--min-free-space',
        type='int',
        metavar='NNN',
        default=2 * 1024 * 1024 * 1024,
        help='Trim if disk free space becomes lower than this value, '
        'default=%default')
    group.add_option(
        '--max-items',
        type='int',
        metavar='NNN',
        default=100000,
        help='Trim if more than this number of items are in the cache '
        'default=%default')
    parser.add_option_group(group)

    options, args = parser.parse_args()

    if bool(options.isolated) == bool(options.hash):
        logging.debug('One and only one of --isolated or --hash is required.')
        parser.error('One and only one of --isolated or --hash is required.')
    if args:
        logging.debug('Unsupported args %s' % ' '.join(args))
        parser.error('Unsupported args %s' % ' '.join(args))
    if not options.isolate_server:
        parser.error('--isolate-server is required.')

    options.cache = os.path.abspath(options.cache)
    policies = CachePolicies(options.max_cache_size, options.min_free_space,
                             options.max_items)
    storage = isolateserver.get_storage(options.isolate_server,
                                        options.namespace)
    algo = isolateserver.get_hash_algo(options.namespace)

    try:
        # |options.cache| may not exist until DiskCache() instance is created.
        cache = DiskCache(options.cache, policies, algo)
        outdir = make_temp_dir('run_tha_test', options.cache)
        return run_tha_test(options.isolated or options.hash, storage, cache,
                            algo, outdir)
    except Exception as e:
        # Make sure any exception is logged.
        logging.exception(e)
        return 1