Пример #1
0
    def test_clean_caches_disk(self):
        # Create an isolated cache and a named cache each with 2 items. Ensure that
        # one item from each is removed.
        now = self._now
        self._free_disk = 100000

        # Setup caches.
        policies = _get_policies(min_free_space=1000)
        named_cache = local_caching.NamedCache(
            tempfile.mkdtemp(dir=self.tempdir, prefix='nc'), policies)
        short_names = self._prepare_named_cache(named_cache)

        isolated_cache = local_caching.DiskContentAddressedCache(
            tempfile.mkdtemp(dir=self.tempdir, prefix='ic'),
            policies,
            trim=False)
        self._prepare_isolated_cache(isolated_cache)
        self.assertEqual(now, self._now)

        # Request trimming.
        self._free_disk = 950
        trimmed = local_caching.trim_caches(
            [isolated_cache, named_cache],
            self.tempdir,
            min_free_space=policies.min_free_space,
            max_age_secs=policies.max_age_secs)
        # Enough to free 50 bytes. The following sums to 56.
        expected = [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7]
        self.assertEqual(expected, trimmed)

        # Cache verification.
        self._verify_named_cache(named_cache, short_names, range(8, 11))
        self._verify_isolated_cache(isolated_cache, range(8, 11))
Пример #2
0
 def test_clean_caches_memory_time(self):
     # Test that cleaning is correctly distributed independent of the cache
     # location.
     caches = self._get_5_caches()
     self.mock(time, 'time', lambda: self._now)
     trimmed = local_caching.trim_caches(caches,
                                         self.tempdir,
                                         min_free_space=0,
                                         max_age_secs=10)
     # Only the last 10 items are kept. The first 90 items were trimmed.
     self.assertEqual(range(1, 91), trimmed)
Пример #3
0
 def test_clean_caches_memory_size(self):
     # Test that cleaning is correctly distributed independent of the cache
     # location.
     caches = self._get_5_caches()
     # 100 bytes must be freed.
     self._free_disk = 900
     trimmed = local_caching.trim_caches(caches,
                                         self.tempdir,
                                         min_free_space=1000,
                                         max_age_secs=0)
     # sum(range(1, 15)) == 105, the first value after 100.
     self.assertEqual(range(1, 15), trimmed)
Пример #4
0
def main(args):
  # Warning: when --argsfile is used, the strings are unicode instances, when
  # parsed normally, the strings are str instances.
  (parser, options, args) = parse_args(args)

  if not file_path.enable_symlink():
    logging.error('Symlink support is not enabled')

  # TODO(maruel): CIPD caches should be defined at an higher level here too, so
  # they can be cleaned the same way.
  isolate_cache = isolateserver.process_cache_options(options, trim=False)
  named_cache = process_named_cache_options(parser, options)
  caches = []
  if isolate_cache:
    caches.append(isolate_cache)
  if named_cache:
    caches.append(named_cache)
  root = caches[0].cache_dir if caches else unicode(os.getcwd())
  if options.clean:
    if options.isolated:
      parser.error('Can\'t use --isolated with --clean.')
    if options.isolate_server:
      parser.error('Can\'t use --isolate-server with --clean.')
    if options.json:
      parser.error('Can\'t use --json with --clean.')
    if options.named_caches:
      parser.error('Can\t use --named-cache with --clean.')
    # Trim first, then clean.
    local_caching.trim_caches(
        caches,
        root,
        min_free_space=options.min_free_space,
        max_age_secs=MAX_AGE_SECS)
    for c in caches:
      c.cleanup()
    return 0

  if not options.no_clean:
    # Trim but do not clean (which is slower).
    local_caching.trim_caches(
        caches,
        root,
        min_free_space=options.min_free_space,
        max_age_secs=MAX_AGE_SECS)

  if not options.isolated and not args:
    parser.error('--isolated or command to run is required.')

  auth.process_auth_options(parser, options)

  isolateserver.process_isolate_server_options(
      parser, options, True, False)
  if not options.isolate_server:
    if options.isolated:
      parser.error('--isolated requires --isolate-server')
    if ISOLATED_OUTDIR_PARAMETER in args:
      parser.error(
        '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER)

  if options.root_dir:
    options.root_dir = unicode(os.path.abspath(options.root_dir))
  if options.json:
    options.json = unicode(os.path.abspath(options.json))

  if any('=' not in i for i in options.env):
    parser.error(
        '--env required key=value form. value can be skipped to delete '
        'the variable')
  options.env = dict(i.split('=', 1) for i in options.env)

  prefixes = {}
  cwd = os.path.realpath(os.getcwd())
  for item in options.env_prefix:
    if '=' not in item:
      parser.error(
        '--env-prefix %r is malformed, must be in the form `VAR=./path`'
        % item)
    key, opath = item.split('=', 1)
    if os.path.isabs(opath):
      parser.error('--env-prefix %r path is bad, must be relative.' % opath)
    opath = os.path.normpath(opath)
    if not os.path.realpath(os.path.join(cwd, opath)).startswith(cwd):
      parser.error(
        '--env-prefix %r path is bad, must be relative and not contain `..`.'
        % opath)
    prefixes.setdefault(key, []).append(opath)
  options.env_prefix = prefixes

  cipd.validate_cipd_options(parser, options)

  install_packages_fn = noop_install_packages
  if options.cipd_enabled:
    install_packages_fn = lambda run_dir: install_client_and_packages(
        run_dir, cipd.parse_package_args(options.cipd_packages),
        options.cipd_server, options.cipd_client_package,
        options.cipd_client_version, cache_dir=options.cipd_cache)

  @contextlib.contextmanager
  def install_named_caches(run_dir):
    # WARNING: this function depends on "options" variable defined in the outer
    # function.
    assert unicode(run_dir), repr(run_dir)
    assert os.path.isabs(run_dir), run_dir
    caches = [
      (os.path.join(run_dir, unicode(relpath)), name)
      for name, relpath in options.named_caches
    ]
    for path, name in caches:
      named_cache.install(path, name)
    named_cache.trim()
    try:
      yield
    finally:
      # Uninstall each named cache, returning it to the cache pool. If an
      # uninstall fails for a given cache, it will remain in the task's
      # temporary space, get cleaned up by the Swarming bot, and be lost.
      #
      # If the Swarming bot cannot clean up the cache, it will handle it like
      # any other bot file that could not be removed.
      for path, name in caches:
        try:
          named_cache.uninstall(path, name)
        except local_caching.NamedCacheError:
          logging.exception('Error while removing named cache %r at %r. '
                            'The cache will be lost.', path, name)
      named_cache.trim()

  extra_args = []
  command = []
  if options.raw_cmd:
    command = args
    if options.relative_cwd:
      a = os.path.normpath(os.path.abspath(options.relative_cwd))
      if not a.startswith(os.getcwd()):
        parser.error(
            '--relative-cwd must not try to escape the working directory')
  else:
    if options.relative_cwd:
      parser.error('--relative-cwd requires --raw-cmd')
    extra_args = args

  data = TaskData(
      command=command,
      relative_cwd=options.relative_cwd,
      extra_args=extra_args,
      isolated_hash=options.isolated,
      storage=None,
      isolate_cache=isolate_cache,
      outputs=options.output,
      install_named_caches=install_named_caches,
      leak_temp_dir=options.leak_temp_dir,
      root_dir=_to_unicode(options.root_dir),
      hard_timeout=options.hard_timeout,
      grace_period=options.grace_period,
      bot_file=options.bot_file,
      switch_to_account=options.switch_to_account,
      install_packages_fn=install_packages_fn,
      use_symlinks=options.use_symlinks,
      env=options.env,
      env_prefix=options.env_prefix)
  try:
    if options.isolate_server:
      storage = isolateserver.get_storage(
          options.isolate_server, options.namespace)
      with storage:
        data = data._replace(storage=storage)
        # Hashing schemes used by |storage| and |isolate_cache| MUST match.
        assert storage.hash_algo == isolate_cache.hash_algo
        return run_tha_test(data, options.json)
    return run_tha_test(data, options.json)
  except (
      cipd.Error,
      local_caching.NamedCacheError,
      local_caching.NotFoundError) as ex:
    print >> sys.stderr, ex.message
    return 1