Example #1
0
def main(args):
    parser = logging_utils.OptionParserWithLogging(
        usage='%prog <options>',
        version=__version__,
        log_file=RUN_ISOLATED_LOG_FILE)
    parser.add_option(
        '--json',
        help=
        'dump output metadata to json file. When used, run_isolated returns '
        'non-zero only on internal failure')
    parser.add_option('--hard-timeout',
                      type='float',
                      help='Enforce hard timeout in execution')
    parser.add_option('--grace-period',
                      type='float',
                      help='Grace period between SIGTERM and SIGKILL')
    data_group = optparse.OptionGroup(parser, 'Data source')
    data_group.add_option(
        '-s',
        '--isolated',
        help='Hash of the .isolated to grab from the isolate server')
    isolateserver.add_isolate_server_options(data_group)
    parser.add_option_group(data_group)

    isolateserver.add_cache_options(parser)
    parser.set_defaults(cache='cache')

    debug_group = optparse.OptionGroup(parser, 'Debugging')
    debug_group.add_option(
        '--leak-temp-dir',
        action='store_true',
        help='Deliberately leak isolate\'s temp dir for later examination '
        '[default: %default]')
    debug_group.add_option('--root-dir',
                           help='Use a directory instead of a random one')
    parser.add_option_group(debug_group)

    auth.add_auth_options(parser)
    options, args = parser.parse_args(args)
    if not options.isolated:
        parser.error('--isolated is required.')
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True)

    cache = isolateserver.process_cache_options(options)
    if options.root_dir:
        options.root_dir = unicode(os.path.abspath(options.root_dir))
    if options.json:
        options.json = unicode(os.path.abspath(options.json))
    with isolateserver.get_storage(options.isolate_server,
                                   options.namespace) as storage:
        # Hashing schemes used by |storage| and |cache| MUST match.
        assert storage.hash_algo == cache.hash_algo
        return run_tha_test(options.isolated, storage, cache,
                            options.leak_temp_dir, options.json,
                            options.root_dir, options.hard_timeout,
                            options.grace_period, args)
Example #2
0
def main(args):
  tools.disable_buffering()
  parser = logging_utils.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)
  parser.add_option(
      '--json',
      help='dump output metadata to json file. When used, run_isolated returns '
           'non-zero only on internal failure')
  parser.add_option(
      '--hard-timeout', type='int', help='Enforce hard timeout in execution')
  parser.add_option(
      '--grace-period', type='int',
      help='Grace period between SIGTERM and SIGKILL')
  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  debug_group.add_option(
      '--root-dir', help='Use a directory instead of a random one')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  if options.root_dir:
    options.root_dir = unicode(os.path.abspath(options.root_dir))
  if options.json:
    options.json = unicode(os.path.abspath(options.json))
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, options.json,
        options.root_dir, options.hard_timeout, options.grace_period, args)
Example #3
0
def main(args):
  tools.disable_buffering()
  parser = logging_utils.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  data_group.add_option(
      '-H', dest='isolated', help=optparse.SUPPRESS_HELP)
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, args)
Example #4
0
def main(args):
  tools.disable_buffering()
  parser = tools.OptionParserWithLogging(
      usage='%prog <options>',
      version=__version__,
      log_file=RUN_ISOLATED_LOG_FILE)

  data_group = optparse.OptionGroup(parser, 'Data source')
  data_group.add_option(
      '-s', '--isolated',
      help='Hash of the .isolated to grab from the isolate server')
  data_group.add_option(
      '-H', dest='isolated', help=optparse.SUPPRESS_HELP)
  isolateserver.add_isolate_server_options(data_group)
  parser.add_option_group(data_group)

  isolateserver.add_cache_options(parser)
  parser.set_defaults(cache='cache')

  debug_group = optparse.OptionGroup(parser, 'Debugging')
  debug_group.add_option(
      '--leak-temp-dir',
      action='store_true',
      help='Deliberately leak isolate\'s temp dir for later examination '
          '[default: %default]')
  parser.add_option_group(debug_group)

  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if not options.isolated:
    parser.error('--isolated is required.')
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True)

  cache = isolateserver.process_cache_options(options)
  with isolateserver.get_storage(
      options.isolate_server, options.namespace) as storage:
    # Hashing schemes used by |storage| and |cache| MUST match.
    assert storage.hash_algo == cache.hash_algo
    return run_tha_test(
        options.isolated, storage, cache, options.leak_temp_dir, args)
def main(args):
    tools.disable_buffering()
    parser = logging_utils.OptionParserWithLogging(
        usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE
    )
    parser.add_option(
        "--json",
        help="dump output metadata to json file. When used, run_isolated returns " "non-zero only on internal failure",
    )
    data_group = optparse.OptionGroup(parser, "Data source")
    data_group.add_option("-s", "--isolated", help="Hash of the .isolated to grab from the isolate server")
    isolateserver.add_isolate_server_options(data_group)
    parser.add_option_group(data_group)

    isolateserver.add_cache_options(parser)
    parser.set_defaults(cache="cache")

    debug_group = optparse.OptionGroup(parser, "Debugging")
    debug_group.add_option(
        "--leak-temp-dir",
        action="store_true",
        help="Deliberately leak isolate's temp dir for later examination " "[default: %default]",
    )
    debug_group.add_option("--root-dir", help="Use a directory instead of a random one")
    parser.add_option_group(debug_group)

    auth.add_auth_options(parser)
    options, args = parser.parse_args(args)
    if not options.isolated:
        parser.error("--isolated is required.")
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True)

    cache = isolateserver.process_cache_options(options)
    with isolateserver.get_storage(options.isolate_server, options.namespace) as storage:
        # Hashing schemes used by |storage| and |cache| MUST match.
        assert storage.hash_algo == cache.hash_algo
        return run_tha_test(
            options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, args
        )
Example #6
0
def main(args):
    (parser, options, args) = parse_args(args)

    isolate_cache = isolateserver.process_cache_options(options, trim=False)
    named_cache_manager = named_cache.process_named_cache_options(
        parser, options)
    if options.clean:
        if options.isolated:
            parser.error('Can\'t use --isolated with --clean.')
        if options.isolate_server:
            parser.error('Can\'t use --isolate-server with --clean.')
        if options.json:
            parser.error('Can\'t use --json with --clean.')
        if options.named_caches:
            parser.error('Can\t use --named-cache with --clean.')
        clean_caches(options, isolate_cache, named_cache_manager)
        return 0

    if not options.no_clean:
        clean_caches(options, isolate_cache, named_cache_manager)

    if not options.isolated and not args:
        parser.error('--isolated or command to run is required.')

    auth.process_auth_options(parser, options)

    isolateserver.process_isolate_server_options(parser, options, True, False)
    if not options.isolate_server:
        if options.isolated:
            parser.error('--isolated requires --isolate-server')
        if ISOLATED_OUTDIR_PARAMETER in args:
            parser.error('%s in args requires --isolate-server' %
                         ISOLATED_OUTDIR_PARAMETER)

    if options.root_dir:
        options.root_dir = unicode(os.path.abspath(options.root_dir))
    if options.json:
        options.json = unicode(os.path.abspath(options.json))

    cipd.validate_cipd_options(parser, options)

    install_packages_fn = noop_install_packages
    if options.cipd_enabled:
        install_packages_fn = lambda run_dir: install_client_and_packages(
            run_dir,
            cipd.parse_package_args(options.cipd_packages),
            options.cipd_server,
            options.cipd_client_package,
            options.cipd_client_version,
            cache_dir=options.cipd_cache)

    @contextlib.contextmanager
    def init_named_caches(run_dir):
        # WARNING: this function depends on "options" variable defined in the outer
        # function.
        with named_cache_manager.open():
            named_cache_manager.create_symlinks(run_dir, options.named_caches)
        try:
            yield
        finally:
            if not options.leak_temp_dir:
                named_cache_manager.delete_symlinks(run_dir,
                                                    options.named_caches)

    try:
        if options.isolate_server:
            storage = isolateserver.get_storage(options.isolate_server,
                                                options.namespace)
            with storage:
                # Hashing schemes used by |storage| and |isolate_cache| MUST match.
                assert storage.hash_algo == isolate_cache.hash_algo
                return run_tha_test(args, options.isolated, storage,
                                    isolate_cache, options.output,
                                    init_named_caches, options.leak_temp_dir,
                                    options.json, options.root_dir,
                                    options.hard_timeout, options.grace_period,
                                    options.bot_file, install_packages_fn,
                                    options.use_symlinks)
        return run_tha_test(args, options.isolated, None, isolate_cache,
                            options.output, init_named_caches,
                            options.leak_temp_dir, options.json,
                            options.root_dir, options.hard_timeout,
                            options.grace_period, options.bot_file,
                            install_packages_fn, options.use_symlinks)
    except (cipd.Error, named_cache.Error) as ex:
        print >> sys.stderr, ex.message
        return 1
Example #7
0
  def test_clean_caches(self):
    # Create an isolated cache and a named cache each with 2 items. Ensure that
    # one item from each is removed.
    fake_time = 1
    fake_free_space = [102400]
    np = self.temp_join('named_cache')
    ip = self.temp_join('isolated_cache')
    args = [
      '--named-cache-root', np, '--cache', ip, '--clean',
      '--min-free-space', '10240',
      '--log-file', self.temp_join('run_isolated.log'),
    ]
    self.mock(file_path, 'get_free_space', lambda _: fake_free_space[0])
    parser, options, _ = run_isolated.parse_args(args)
    isolate_cache = isolateserver.process_cache_options(
        options, trim=False, time_fn=lambda: fake_time)
    self.assertIsInstance(isolate_cache, isolateserver.DiskCache)
    named_cache_manager = named_cache.process_named_cache_options(
        parser, options)
    self.assertIsInstance(named_cache_manager, named_cache.CacheManager)

    # Add items to these caches.
    small = '0123456789'
    big = small * 1014
    small_digest = unicode(ALGO(small).hexdigest())
    big_digest = unicode(ALGO(big).hexdigest())
    with isolate_cache:
      fake_time = 1
      isolate_cache.write(big_digest, [big])
      fake_time = 2
      isolate_cache.write(small_digest, [small])
    with named_cache_manager.open(time_fn=lambda: fake_time):
      fake_time = 1
      put_to_named_cache(named_cache_manager, u'first', u'big', big)
      fake_time = 3
      put_to_named_cache(named_cache_manager, u'second', u'small', small)

    # Ensures the cache contain the expected data.
    actual = genTree(np)
    # Figure out the cache path names.
    cache_small = [
        os.path.dirname(n) for n in actual if os.path.basename(n) == 'small'][0]
    cache_big = [
        os.path.dirname(n) for n in actual if os.path.basename(n) == 'big'][0]
    expected = {
      os.path.join(cache_small, u'small'): small,
      os.path.join(cache_big, u'big'): big,
      u'state.json':
          '{"items":[["first",["%s",1]],["second",["%s",3]]],"version":2}' % (
          cache_big, cache_small),
    }
    self.assertEqual(expected, actual)
    expected = {
      big_digest: big,
      small_digest: small,
      u'state.json':
          '{"items":[["%s",[10140,1]],["%s",[10,2]]],"version":2}' % (
          big_digest, small_digest),
    }
    self.assertEqual(expected, genTree(ip))

    # Request triming.
    fake_free_space[0] = 1020
    # Abuse the fact that named cache is trimed after isolated cache.
    def rmtree(p):
      self.assertEqual(os.path.join(np, cache_big), p)
      fake_free_space[0] += 10240
      return old_rmtree(p)
    old_rmtree = self.mock(file_path, 'rmtree', rmtree)
    isolate_cache = isolateserver.process_cache_options(options, trim=False)
    named_cache_manager = named_cache.process_named_cache_options(
        parser, options)
    actual = run_isolated.clean_caches(
        options, isolate_cache, named_cache_manager)
    self.assertEqual(2, actual)
    # One of each entry should have been cleaned up. This only happen to work
    # because:
    # - file_path.get_free_space() is mocked
    # - DiskCache.trim() keeps its own internal counter while deleting files so
    #   it ignores get_free_space() output while deleting files.
    actual = genTree(np)
    expected = {
      os.path.join(cache_small, u'small'): small,
      u'state.json':
          '{"items":[["second",["%s",3]]],"version":2}' % cache_small,
    }
    self.assertEqual(expected, actual)
    expected = {
      small_digest: small,
      u'state.json':
          '{"items":[["%s",[10,2]]],"version":2}' % small_digest,
    }
    self.assertEqual(expected, genTree(ip))
Example #8
0
def main(args):
  (parser, options, args) = parse_args(args)

  isolate_cache = isolateserver.process_cache_options(options, trim=False)
  named_cache_manager = named_cache.process_named_cache_options(parser, options)
  if options.clean:
    if options.isolated:
      parser.error('Can\'t use --isolated with --clean.')
    if options.isolate_server:
      parser.error('Can\'t use --isolate-server with --clean.')
    if options.json:
      parser.error('Can\'t use --json with --clean.')
    if options.named_caches:
      parser.error('Can\t use --named-cache with --clean.')
    clean_caches(options, isolate_cache, named_cache_manager)
    return 0

  if not options.no_clean:
    clean_caches(options, isolate_cache, named_cache_manager)

  if not options.isolated and not args:
    parser.error('--isolated or command to run is required.')

  auth.process_auth_options(parser, options)

  isolateserver.process_isolate_server_options(
    parser, options, True, False)
  if not options.isolate_server:
    if options.isolated:
      parser.error('--isolated requires --isolate-server')
    if ISOLATED_OUTDIR_PARAMETER in args:
      parser.error(
        '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER)

  if options.root_dir:
    options.root_dir = unicode(os.path.abspath(options.root_dir))
  if options.json:
    options.json = unicode(os.path.abspath(options.json))

  cipd.validate_cipd_options(parser, options)

  install_packages_fn = noop_install_packages
  if options.cipd_enabled:
    install_packages_fn = lambda run_dir: install_client_and_packages(
        run_dir, cipd.parse_package_args(options.cipd_packages),
        options.cipd_server, options.cipd_client_package,
        options.cipd_client_version, cache_dir=options.cipd_cache)

  @contextlib.contextmanager
  def install_named_caches(run_dir):
    # WARNING: this function depends on "options" variable defined in the outer
    # function.
    caches = [
      (os.path.join(run_dir, unicode(relpath)), name)
      for name, relpath in options.named_caches
    ]
    with named_cache_manager.open():
      for path, name in caches:
        named_cache_manager.install(path, name)
    try:
      yield
    finally:
      # Uninstall each named cache, returning it to the cache pool. If an
      # uninstall fails for a given cache, it will remain in the task's
      # temporary space, get cleaned up by the Swarming bot, and be lost.
      #
      # If the Swarming bot cannot clean up the cache, it will handle it like
      # any other bot file that could not be removed.
      with named_cache_manager.open():
        for path, name in caches:
          try:
            named_cache_manager.uninstall(path, name)
          except named_cache.Error:
            logging.exception('Error while removing named cache %r at %r. '
                              'The cache will be lost.', path, name)

  try:
    if options.isolate_server:
      storage = isolateserver.get_storage(
          options.isolate_server, options.namespace)
      with storage:
        # Hashing schemes used by |storage| and |isolate_cache| MUST match.
        assert storage.hash_algo == isolate_cache.hash_algo
        return run_tha_test(
            args,
            options.isolated,
            storage,
            isolate_cache,
            options.output,
            install_named_caches,
            options.leak_temp_dir,
            options.json, options.root_dir,
            options.hard_timeout,
            options.grace_period,
            options.bot_file,
            options.switch_to_account,
            install_packages_fn,
            options.use_symlinks)
    return run_tha_test(
        args,
        options.isolated,
        None,
        isolate_cache,
        options.output,
        install_named_caches,
        options.leak_temp_dir,
        options.json,
        options.root_dir,
        options.hard_timeout,
        options.grace_period,
        options.bot_file,
        options.switch_to_account,
        install_packages_fn,
        options.use_symlinks)
  except (cipd.Error, named_cache.Error) as ex:
    print >> sys.stderr, ex.message
    return 1
Example #9
0
def main(args):
    parser = create_option_parser()
    options, args = parser.parse_args(args)

    cache = isolateserver.process_cache_options(options)
    if options.clean:
        if options.isolated:
            parser.error('Can\'t use --isolated with --clean.')
        if options.isolate_server:
            parser.error('Can\'t use --isolate-server with --clean.')
        if options.json:
            parser.error('Can\'t use --json with --clean.')
        cache.cleanup()
        return 0
    if not options.no_clean:
        cache.cleanup()

    if not options.isolated and not args:
        parser.error('--isolated or command to run is required.')

    auth.process_auth_options(parser, options)

    isolateserver.process_isolate_server_options(parser, options, True, False)
    if not options.isolate_server:
        if options.isolated:
            parser.error('--isolated requires --isolate-server')
        if ISOLATED_OUTDIR_PARAMETER in args:
            parser.error('%s in args requires --isolate-server' %
                         ISOLATED_OUTDIR_PARAMETER)

    if options.root_dir:
        options.root_dir = unicode(os.path.abspath(options.root_dir))
    if options.json:
        options.json = unicode(os.path.abspath(options.json))

    cipd.validate_cipd_options(parser, options)

    install_packages_fn = lambda run_dir: install_packages(
        run_dir,
        cipd.parse_package_args(options.cipd_packages),
        options.cipd_server,
        options.cipd_client_package,
        options.cipd_client_version,
        cache_dir=options.cipd_cache)

    try:
        command = [] if options.isolated else args
        if options.isolate_server:
            storage = isolateserver.get_storage(options.isolate_server,
                                                options.namespace)
            with storage:
                # Hashing schemes used by |storage| and |cache| MUST match.
                assert storage.hash_algo == cache.hash_algo
                return run_tha_test(command, options.isolated, storage, cache,
                                    options.leak_temp_dir, options.json,
                                    options.root_dir, options.hard_timeout,
                                    options.grace_period, options.bot_file,
                                    args, install_packages_fn,
                                    options.use_symlinks)
        return run_tha_test(command, options.isolated, None, cache,
                            options.leak_temp_dir, options.json,
                            options.root_dir, options.hard_timeout,
                            options.grace_period, options.bot_file, args,
                            install_packages_fn, options.use_symlinks)
    except cipd.Error as ex:
        print >> sys.stderr, ex.message
        return 1
Example #10
0
def main(args):
  # Warning: when --argsfile is used, the strings are unicode instances, when
  # parsed normally, the strings are str instances.
  (parser, options, args) = parse_args(args)

  if not file_path.enable_symlink():
    logging.error('Symlink support is not enabled')

  isolate_cache = isolateserver.process_cache_options(options, trim=False)
  named_cache_manager = named_cache.process_named_cache_options(parser, options)
  if options.clean:
    if options.isolated:
      parser.error('Can\'t use --isolated with --clean.')
    if options.isolate_server:
      parser.error('Can\'t use --isolate-server with --clean.')
    if options.json:
      parser.error('Can\'t use --json with --clean.')
    if options.named_caches:
      parser.error('Can\t use --named-cache with --clean.')
    clean_caches(options, isolate_cache, named_cache_manager)
    return 0

  if not options.no_clean:
    clean_caches(options, isolate_cache, named_cache_manager)

  if not options.isolated and not args:
    parser.error('--isolated or command to run is required.')

  auth.process_auth_options(parser, options)

  isolateserver.process_isolate_server_options(
      parser, options, True, False)
  if not options.isolate_server:
    if options.isolated:
      parser.error('--isolated requires --isolate-server')
    if ISOLATED_OUTDIR_PARAMETER in args:
      parser.error(
        '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER)

  if options.root_dir:
    options.root_dir = unicode(os.path.abspath(options.root_dir))
  if options.json:
    options.json = unicode(os.path.abspath(options.json))

  if any('=' not in i for i in options.env):
    parser.error(
        '--env required key=value form. value can be skipped to delete '
        'the variable')
  options.env = dict(i.split('=', 1) for i in options.env)

  prefixes = {}
  cwd = os.path.realpath(os.getcwd())
  for item in options.env_prefix:
    if '=' not in item:
      parser.error(
        '--env-prefix %r is malformed, must be in the form `VAR=./path`'
        % item)
    key, opath = item.split('=', 1)
    if os.path.isabs(opath):
      parser.error('--env-prefix %r path is bad, must be relative.' % opath)
    opath = os.path.normpath(opath)
    if not os.path.realpath(os.path.join(cwd, opath)).startswith(cwd):
      parser.error(
        '--env-prefix %r path is bad, must be relative and not contain `..`.'
        % opath)
    prefixes.setdefault(key, []).append(opath)
  options.env_prefix = prefixes

  cipd.validate_cipd_options(parser, options)

  install_packages_fn = noop_install_packages
  if options.cipd_enabled:
    install_packages_fn = lambda run_dir: install_client_and_packages(
        run_dir, cipd.parse_package_args(options.cipd_packages),
        options.cipd_server, options.cipd_client_package,
        options.cipd_client_version, cache_dir=options.cipd_cache)

  @contextlib.contextmanager
  def install_named_caches(run_dir):
    # WARNING: this function depends on "options" variable defined in the outer
    # function.
    caches = [
      (os.path.join(run_dir, unicode(relpath)), name)
      for name, relpath in options.named_caches
    ]
    with named_cache_manager.open():
      for path, name in caches:
        named_cache_manager.install(path, name)
    try:
      yield
    finally:
      # Uninstall each named cache, returning it to the cache pool. If an
      # uninstall fails for a given cache, it will remain in the task's
      # temporary space, get cleaned up by the Swarming bot, and be lost.
      #
      # If the Swarming bot cannot clean up the cache, it will handle it like
      # any other bot file that could not be removed.
      with named_cache_manager.open():
        for path, name in caches:
          try:
            named_cache_manager.uninstall(path, name)
          except named_cache.Error:
            logging.exception('Error while removing named cache %r at %r. '
                              'The cache will be lost.', path, name)

  extra_args = []
  command = []
  if options.raw_cmd:
    command = args
    if options.relative_cwd:
      a = os.path.normpath(os.path.abspath(options.relative_cwd))
      if not a.startswith(os.getcwd()):
        parser.error(
            '--relative-cwd must not try to escape the working directory')
  else:
    if options.relative_cwd:
      parser.error('--relative-cwd requires --raw-cmd')
    extra_args = args

  data = TaskData(
      command=command,
      relative_cwd=options.relative_cwd,
      extra_args=extra_args,
      isolated_hash=options.isolated,
      storage=None,
      isolate_cache=isolate_cache,
      outputs=options.output,
      install_named_caches=install_named_caches,
      leak_temp_dir=options.leak_temp_dir,
      root_dir=_to_unicode(options.root_dir),
      hard_timeout=options.hard_timeout,
      grace_period=options.grace_period,
      bot_file=options.bot_file,
      switch_to_account=options.switch_to_account,
      install_packages_fn=install_packages_fn,
      use_symlinks=options.use_symlinks,
      env=options.env,
      env_prefix=options.env_prefix)
  try:
    if options.isolate_server:
      storage = isolateserver.get_storage(
          options.isolate_server, options.namespace)
      with storage:
        data = data._replace(storage=storage)
        # Hashing schemes used by |storage| and |isolate_cache| MUST match.
        assert storage.hash_algo == isolate_cache.hash_algo
        return run_tha_test(data, options.json)
    return run_tha_test(data, options.json)
  except (cipd.Error, named_cache.Error) as ex:
    print >> sys.stderr, ex.message
    return 1