示例#1
0
  def test_async_push_upload_errors(self):
    chunk = 'data_chunk'

    def push_side_effect():
      raise IOError('Nope')

    content_sources = (
        lambda: [chunk],
        lambda: [(yield chunk)],
    )

    for use_zip in (False, True):
      for source in content_sources:
        item = FakeItem(chunk)
        self.mock(item, 'content', source)
        server_ref = isolate_storage.ServerRef(
            'http://localhost:1', 'default-gzip' if use_zip else 'default')
        storage_api = MockedStorageApi(
            server_ref, {item.digest: 'push_state'}, push_side_effect)
        storage = isolateserver.Storage(storage_api)
        channel = threading_utils.TaskChannel()
        storage._async_push(channel, item, self.get_push_state(storage, item))
        with self.assertRaises(IOError):
          next(channel)
        # First initial attempt + all retries.
        attempts = 1 + storage.net_thread_pool.RETRIES
        # Single push attempt call arguments.
        expected_push = (
            item, 'push_state', item.zipped if use_zip else item.data)
        # Ensure all pushes are attempted.
        self.assertEqual(
            [expected_push] * attempts, storage_api.push_calls)
示例#2
0
  def _archive_smoke(self, size):
    self.server.store_hash_instead()
    files = {}
    for i in range(5):
      name = '512mb_%d.%s' % (i, isolateserver.ALREADY_COMPRESSED_TYPES[0])
      logging.info('Writing %s', name)
      p = os.path.join(self.tempdir, name)
      h = hashlib.sha1()
      data = os.urandom(1024)
      with open(p, 'wb') as f:
        # Write 512MiB.
        for _ in range(size / len(data)):
          f.write(data)
          h.update(data)
      os.chmod(p, 0o600)
      files[p] = h.hexdigest()

    server_ref = isolate_storage.ServerRef(self.server.url, 'default')
    with isolateserver.get_storage(server_ref) as storage:
      logging.info('Archiving')
      results, cold, hot = isolateserver.archive_files_to_storage(
          storage, list(files), None)
      logging.info('Done')

    expected = {'default': {h: h for h in files.values()}}
    self.assertEqual(expected, self.server.contents)
    self.assertEqual(files, dict(results))
    # Everything is cold.
    f = os.path.join(self.tempdir, '512mb_3.7z')
    self.assertEqual(
        sorted(files.items()), sorted((f.path, f.digest) for f in cold))
    self.assertEqual([], [(f.path, f.digest) for f in hot])
示例#3
0
  def test_upload_items(self):
    server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
    items = [
      isolateserver.BufferItem('a'*12, server_ref.hash_algo),
      isolateserver.BufferItem('', server_ref.hash_algo),
      isolateserver.BufferItem('c'*1222, server_ref.hash_algo),
      isolateserver.BufferItem('d'*1223, server_ref.hash_algo),
    ]
    missing = {
      items[2]: 123,
      items[3]: 456,
    }
    storage_api = MockedStorageApi(
        server_ref,
        {item.digest: push_state for item, push_state in missing.items()})
    storage = isolateserver.Storage(storage_api)

    # Intentionally pass a generator, to confirm it works.
    result = storage.upload_items((i for i in items))
    self.assertEqual(sorted(missing), sorted(result))
    self.assertEqual(4, len(items))
    self.assertEqual(2, len(missing))
    self.assertEqual([items], storage_api.contains_calls)
    self.assertEqual(
        sorted(
          ((items[2], 123, items[2].content()[0]),
            (items[3], 456, items[3].content()[0]))),
        sorted(storage_api.push_calls))
示例#4
0
  def test_fetch_offset_bad_header(self):
    server_ref = isolate_storage.ServerRef('http://example.com', 'default')
    data = ''.join(str(x) for x in range(1000))
    item = isolateserver_fake.hash_content(data)
    offset = 200
    size = len(data)

    bad_content_range_headers = [
      # Missing header.
      None,
      '',
      # Bad format.
      'not bytes %d-%d/%d' % (offset, size - 1, size),
      'bytes %d-%d' % (offset, size - 1),
      # Bad offset.
      'bytes %d-%d/%d' % (offset - 1, size - 1, size),
      # Incomplete chunk.
      'bytes %d-%d/%d' % (offset, offset + 10, size),
    ]

    for content_range_header in bad_content_range_headers:
      self.expected_requests([
          self.mock_fetch_request(server_ref, item, offset=offset),
          self.mock_gs_request(
              server_ref, item, data, offset=offset,
              request_headers={'Range': 'bytes=%d-' % offset},
              response_headers={'Content-Range': content_range_header}),
      ])
      storage = isolate_storage.IsolateServer(server_ref)
      with self.assertRaises(IOError):
        _ = ''.join(storage.fetch(item, 0, offset))
示例#5
0
 def test_contains_success(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   files = [
     FakeItem('1', high_priority=True),
     FakeItem('2' * 100),
     FakeItem('3' * 200),
   ]
   request = {'items': [
       {'digest': f.digest, 'is_isolated': not i, 'size': f.size}
       for i, f in enumerate(files)]}
   response = {
       'items': [{
           'index': str(i),
           'upload_ticket': 'ticket_%d' % i
       } for i in range(3)],
   }
   missing = [
       files[0],
       files[1],
       files[2],
   ]
   self._requests = [self.mock_contains_request(server_ref, request, response)]
   storage = isolate_storage.IsolateServer(server_ref)
   result = storage.contains(files)
   self.assertEqual(set(missing), set(result.keys()))
   for i, (_item, push_state) in enumerate(result.items()):
     self.assertEqual(
         push_state.upload_url, '_ah/api/isolateservice/v1/store_inline')
     self.assertEqual(push_state.finalize_url, None)
示例#6
0
 def test_push_failure_upload(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   data = ''.join(str(x) for x in range(1000))
   item = FakeItem(data)
   contains_request = {'items': [
       {'digest': item.digest, 'size': item.size, 'is_isolated': 0}]}
   contains_response = {'items': [{'index': 0, 'upload_ticket': 'ticket!'}]}
   requests = [
     self.mock_contains_request(
         server_ref, contains_request, contains_response),
     self.mock_upload_request(
         server_ref,
         base64.b64encode(data),
         contains_response['items'][0]['upload_ticket'],
     ),
   ]
   self.expected_requests(requests)
   storage = isolate_storage.IsolateServer(server_ref)
   missing = storage.contains([item])
   self.assertEqual([item], missing.keys())
   push_state = missing[item]
   with self.assertRaises(IOError):
     storage.push(item, push_state, [data])
   self.assertFalse(push_state.uploaded)
   self.assertFalse(push_state.finalized)
示例#7
0
def CMDarchive(parser, args):
  """Creates a .isolated file and uploads the tree to an isolate server.

  All the files listed in the .isolated file are put in the isolate server
  cache via isolateserver.py.
  """
  add_isolate_options(parser)
  add_subdir_option(parser)
  isolateserver.add_isolate_server_options(parser)
  auth.add_auth_options(parser)
  options, args = parser.parse_args(args)
  if args:
    parser.error('Unsupported argument: %s' % args)
  process_isolate_options(parser, options)
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True, True)
  server_ref = isolate_storage.ServerRef(
      options.isolate_server, options.namespace)
  result = isolate_and_archive([(options, unicode(os.getcwd()))], server_ref)
  if result is None:
    return EXIT_CODE_UPLOAD_ERROR
  assert len(result) == 1, result
  if result.values()[0] is None:
    return EXIT_CODE_ISOLATE_ERROR
  return 0
示例#8
0
 def test_contains_format_failure(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   self.expected_requests(
       [self.mock_contains_request(server_ref, {'items': []}, None)])
   storage = isolate_storage.IsolateServer(server_ref)
   with self.assertRaises(isolated_format.MappingError):
     storage.contains([])
示例#9
0
  def run_push_and_fetch_test(self, namespace):
    storage = isolateserver.get_storage(
        isolate_storage.ServerRef(self.server.url, namespace))

    # Upload items.
    items = [
        isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo)
        for i in range(10)
    ]
    uploaded = storage.upload_items(items)
    self.assertEqual(set(items), set(uploaded))

    # Fetch them all back into local memory cache.
    cache = local_caching.MemoryContentAddressedCache()
    queue = isolateserver.FetchQueue(storage, cache)

    # Start fetching.
    pending = set()
    for item in items:
      pending.add(item.digest)
      queue.add(item.digest)
      queue.wait_on(item.digest)

    # Wait for fetch to complete.
    while pending:
      fetched = queue.wait()
      pending.discard(fetched)

    # Ensure fetched same data as was pushed.
    actual = []
    for i in items:
      with cache.getfileobj(i.digest) as f:
        actual.append(f.read())

    self.assertEqual([''.join(i.content()) for i in items], actual)
 def test_download_isolated_simple(self):
   # Test downloading an isolated tree.
   # It writes files to disk for real.
   server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip')
   files = {
     os.path.join('a', 'foo'): 'Content',
     'b': 'More content',
   }
   isolated = {
     'command': ['Absurb', 'command'],
     'relative_cwd': 'a',
     'files': {
       os.path.join('a', 'foo'): {
         'h': isolateserver_fake.hash_content('Content'),
         's': len('Content'),
         'm': 0700,
       },
       'b': {
         'h': isolateserver_fake.hash_content('More content'),
         's': len('More content'),
         'm': 0600,
       },
       'c': {
         'l': 'a/foo',
       },
     },
     'read_only': 1,
     'version': isolated_format.ISOLATED_FILE_VERSION,
   }
示例#11
0
  def _run_tha_test(self, isolated_hash=None, files=None, command=None):
    files = files or {}
    make_tree_call = []
    def add(i, _):
      make_tree_call.append(i)
    for i in ('make_tree_read_only', 'make_tree_files_read_only',
              'make_tree_deleteable', 'make_tree_writeable'):
      self.mock(file_path, i, functools.partial(add, i))

    server_ref = isolate_storage.ServerRef('http://localhost:1', 'default-gzip')
    data = run_isolated.TaskData(
        command=command or [],
        relative_cwd=None,
        extra_args=[],
        isolated_hash=isolated_hash,
        storage=StorageFake(files, server_ref),
        isolate_cache=local_caching.MemoryContentAddressedCache(),
        outputs=None,
        install_named_caches=init_named_caches_stub,
        leak_temp_dir=False,
        root_dir=None,
        hard_timeout=60,
        grace_period=30,
        bot_file=None,
        switch_to_account=False,
        install_packages_fn=run_isolated.noop_install_packages,
        use_symlinks=False,
        env={},
        env_prefix={})
    ret = run_isolated.run_tha_test(data, None)
    self.assertEqual(0, ret)
    return make_tree_call
示例#12
0
 def test_fetch_success(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   data = ''.join(str(x) for x in range(1000))
   item = isolateserver_fake.hash_content(data)
   self.expected_requests([self.mock_fetch_request(server_ref, item, data)])
   storage = isolate_storage.IsolateServer(server_ref)
   fetched = ''.join(storage.fetch(item, 0, 0))
   self.assertEqual(data, fetched)
示例#13
0
 def test_fetch_failure(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   item = isolateserver_fake.hash_content('something')
   self.expected_requests(
       [self.mock_fetch_request(server_ref, item)[:-1] + (None,)])
   storage = isolate_storage.IsolateServer(server_ref)
   with self.assertRaises(IOError):
     _ = ''.join(storage.fetch(item, 0, 0))
示例#14
0
  def test_archive_files_to_storage(self):
    # Mocked
    files_content = {}
    def add(p, c):
      with open(os.path.join(self.tempdir, p), 'wb') as f:
        f.write(c)
      files_content[p] = c
    add(u'a', 'a'*100)
    add(u'b', 'b'*200)
    os.mkdir(os.path.join(self.tempdir, 'sub'))
    add(os.path.join(u'sub', u'c'), 'c'*300)
    add(os.path.join(u'sub', u'a_copy'), 'a'*100)

    files_hash = {
      p: hashlib.sha1(c).hexdigest() for p, c in files_content.items()
    }
    # 'a' and 'sub/c' are missing.
    missing = {
      files_hash[u'a']: u'a',
      files_hash[os.path.join(u'sub', u'c')]: os.path.join(u'sub', u'c'),
    }
    server_ref = isolate_storage.ServerRef(
        'http://localhost:1', 'some-namespace')
    storage_api = MockedStorageApi(server_ref, missing)
    storage = isolateserver.Storage(storage_api)
    with storage:
      results, cold, hot = isolateserver.archive_files_to_storage(
          storage, [os.path.join(self.tempdir, p) for p in files_content], None)
    self.assertEqual(
        {os.path.join(self.tempdir, f): h for f, h in files_hash.items()},
        dict(results))

    expected = [
      (os.path.join(self.tempdir, u'a'), files_hash['a']),
      (os.path.join(self.tempdir, u'sub', u'c'),
        files_hash[os.path.join(u'sub', u'c')]),
      (os.path.join(self.tempdir, u'sub', u'a_copy'),
        files_hash[os.path.join(u'sub', u'a_copy')]),
    ]
    self.assertEqual(expected, [(f.path, f.digest) for f in cold])
    self.assertEqual(
        [(os.path.join(self.tempdir, u'b'), files_hash['b'])],
        [(f.path, f.digest) for f in hot])
    # 'contains' checked for existence of all files.
    self.assertEqualIgnoringOrder(
        set(files_hash.values()),
        [i.digest for i in sum(storage_api.contains_calls, [])])
    # Pushed only missing files.
    self.assertEqualIgnoringOrder(
        list(missing),
        [call[0].digest for call in storage_api.push_calls])
    # Pushing with correct data, size and push state.
    for pushed_item, _push_state, pushed_content in storage_api.push_calls:
      filename = missing[pushed_item.digest]
      self.assertEqual(os.path.join(self.tempdir, filename), pushed_item.path)
      self.assertEqual(files_content[filename], pushed_content)
示例#15
0
 def test_archive_files_to_storage_tar(self):
   # Create 5 files, which is the minimum to create a tarball.
   for i in range(5):
     with open(os.path.join(self.tempdir, six.text_type(i)), 'wb') as f:
       f.write('fooo%d' % i)
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   results, cold, hot = isolateserver.archive_files_to_storage(
       storage, [self.tempdir], None)
   self.assertEqual([self.tempdir], results.keys())
   self.assertEqual([], cold)
   # 5 files, the isolated file.
   self.assertEqual(6, len(hot))
示例#16
0
 def test_get_hash_algo(self):
   pairs = [
     ('default', hashlib.sha1),
     ('default-gzip', hashlib.sha1),
     ('sha1-flat', hashlib.sha1),
     ('sha1-deflate', hashlib.sha1),
     ('sha256-flat', hashlib.sha256),
     ('sha256-deflate', hashlib.sha256),
     ('sha512-flat', hashlib.sha512),
     ('sha512-deflate', hashlib.sha512),
   ]
   for namespace, expected in pairs:
     server_ref = isolate_storage.ServerRef('http://localhost:0', namespace)
     self.assertIs(expected, server_ref.hash_algo, namespace)
示例#17
0
 def test_async_push(self):
   for use_zip in (False, True):
     item = FakeItem('1234567')
     server_ref = isolate_storage.ServerRef(
         'http://localhost:1', 'default-gzip' if use_zip else 'default')
     storage_api = MockedStorageApi(server_ref, {item.digest: 'push_state'})
     storage = isolateserver.Storage(storage_api)
     channel = threading_utils.TaskChannel()
     storage._async_push(channel, item, self.get_push_state(storage, item))
     # Wait for push to finish.
     pushed_item = next(channel)
     self.assertEqual(item, pushed_item)
     # StorageApi.push was called with correct arguments.
     self.assertEqual(
         [(item, 'push_state', item.zipped if use_zip else item.data)],
         storage_api.push_calls)
示例#18
0
 def test_download_two_files(self):
   # Test downloading two files.
   # It doesn't touch disk, 'file_write' is mocked.
   # It doesn't touch network, url_open() is mocked.
   actual = {}
   def out(key, generator):
     actual[key] = ''.join(generator)
   self.mock(local_caching, 'file_write', out)
   server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip')
   coucou_sha1 = hashlib.sha1('Coucou').hexdigest()
   byebye_sha1 = hashlib.sha1('Bye Bye').hexdigest()
   requests = [
     (
       '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url,
       {
           'data': {
               'digest': h.encode('utf-8'),
               'namespace': {
                   'namespace': 'default-gzip',
                   'digest_hash': 'sha-1',
                   'compression': 'flate',
               },
               'offset': 0,
           },
           'read_timeout': 60,
       },
       {'content': base64.b64encode(zlib.compress(v))},
     ) for h, v in [(coucou_sha1, 'Coucou'), (byebye_sha1, 'Bye Bye')]
   ]
   self.expected_requests(requests)
   cmd = [
     'download',
     '--isolate-server', server_ref.url,
     '--namespace', server_ref.namespace,
     '--target', test_env.CLIENT_DIR,
     '--file', coucou_sha1, 'path/to/a',
     '--file', byebye_sha1, 'path/to/b',
     # Even if everything is mocked, the cache directory will still be created.
     '--cache', self.tempdir,
   ]
   self.assertEqual(0, isolateserver.main(cmd))
   expected = {
     os.path.join(test_env.CLIENT_DIR, 'path/to/a'): 'Coucou',
     os.path.join(test_env.CLIENT_DIR, 'path/to/b'): 'Bye Bye',
   }
   self.assertEqual(expected, actual)
示例#19
0
  def test_fetch_offset_success(self):
    server_ref = isolate_storage.ServerRef('http://example.com', 'default')
    data = ''.join(str(x) for x in range(1000))
    item = isolateserver_fake.hash_content(data)
    offset = 200
    size = len(data)

    good_content_range_headers = [
      'bytes %d-%d/%d' % (offset, size - 1, size),
      'bytes %d-%d/*' % (offset, size - 1),
    ]

    for _content_range_header in good_content_range_headers:
      self.expected_requests(
          [self.mock_fetch_request(server_ref, item, data, offset=offset)])
      storage = isolate_storage.IsolateServer(server_ref)
      fetched = ''.join(storage.fetch(item, 0, offset))
      self.assertEqual(data[offset:], fetched)
示例#20
0
  def test_async_push_generator_errors(self):
    class FakeException(Exception):
      pass

    def faulty_generator():
      yield 'Hi!'
      raise FakeException('fake exception')

    for use_zip in (False, True):
      item = FakeItem('')
      self.mock(item, 'content', faulty_generator)
      server_ref = isolate_storage.ServerRef(
          'http://localhost:1', 'default-gzip' if use_zip else 'default')
      storage_api = MockedStorageApi(server_ref, {item.digest: 'push_state'})
      storage = isolateserver.Storage(storage_api)
      channel = threading_utils.TaskChannel()
      storage._async_push(channel, item, self.get_push_state(storage, item))
      with self.assertRaises(FakeException):
        next(channel)
      # StorageApi's push should never complete when data can not be read.
      self.assertEqual(0, len(storage_api.push_calls))
示例#21
0
 def test_push_failure_finalize(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   data = ''.join(str(x) for x in range(1000))
   item = FakeItem(data)
   contains_request = {'items': [
       {'digest': item.digest, 'size': item.size, 'is_isolated': 0}]}
   contains_response = {'items': [
       {'index': 0,
        'gs_upload_url': '%s/FAKE_GCS/whatevs/1234' % server_ref.url,
        'upload_ticket': 'ticket!'}]}
   requests = [
     self.mock_contains_request(
         server_ref, contains_request, contains_response),
     (
       '%s/FAKE_GCS/whatevs/1234' % server_ref.url,
       {
         'data': data,
         'content_type': 'application/octet-stream',
         'method': 'PUT',
         'headers': {'Cache-Control': 'public, max-age=31536000'},
       },
       '',
       None,
     ),
     (
       '%s/_ah/api/isolateservice/v1/finalize_gs_upload' % server_ref.url,
       {'data': {'upload_ticket': 'ticket!'}},
       None,
     ),
   ]
   self.expected_requests(requests)
   storage = isolate_storage.IsolateServer(server_ref)
   missing = storage.contains([item])
   self.assertEqual([item], missing.keys())
   push_state = missing[item]
   with self.assertRaises(IOError):
     storage.push(item, push_state, [data])
   self.assertTrue(push_state.uploaded)
   self.assertFalse(push_state.finalized)
示例#22
0
 def test_archive_files_to_storage_symlink(self):
   link_path = os.path.join(self.tempdir, u'link')
   with open(os.path.join(self.tempdir, u'foo'), 'wb') as f:
     f.write('fooo')
   fs.symlink('foo', link_path)
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   results, cold, hot = isolateserver.archive_files_to_storage(
       storage, [self.tempdir], None)
   self.assertEqual([self.tempdir], results.keys())
   self.assertEqual([], cold)
   # isolated, symlink, foo file.
   self.assertEqual(3, len(hot))
   self.assertEqual(os.path.join(self.tempdir, u'foo'), hot[0].path)
   self.assertEqual(4, hot[0].size)
   # TODO(maruel): The symlink is reported as its destination. We should fix
   # this because it double counts the stats.
   self.assertEqual(os.path.join(self.tempdir, u'foo'), hot[1].path)
   self.assertEqual(4, hot[1].size)
   # The isolated file is pure in-memory.
   self.assertIsInstance(hot[2], isolateserver.BufferItem)
  def run_upload_items_test(self, namespace):
    storage = isolateserver.get_storage(
        isolate_storage.ServerRef(self.server.url, namespace))

    # Items to upload.
    items = [
      isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo)
      for i in xrange(10)
    ]

    # Do it.
    uploaded = storage.upload_items(items)
    self.assertEqual(set(items), set(uploaded))

    # Now ensure upload_items skips existing items.
    more = [
      isolateserver.BufferItem('more item %d' % i, storage.server_ref.hash_algo)
      for i in xrange(10)
    ]

    # Uploaded only |more|.
    uploaded = storage.upload_items(items + more)
    self.assertEqual(set(more), set(uploaded))
示例#24
0
 def get_server(self):
     s = isolate_storage.ServerRef('https://luci.appspot.com',
                                   'default-gzip')
     return isolate_storage.IsolateServerGrpc(
         s, 'https://luci.com/client/bob')
示例#25
0
def load_complete_state(options, cwd, subdir, skip_update):
  """Loads a CompleteState.

  This includes data from .isolate and .isolated.state files. Never reads the
  .isolated file.

  Arguments:
    options: Options instance generated with process_isolate_options. For either
             options.isolate and options.isolated, if the value is set, it is an
             absolute path.
    cwd: base directory to be used when loading the .isolate file.
    subdir: optional argument to only process file in the subdirectory, relative
            to CompleteState.root_dir.
    skip_update: Skip trying to load the .isolate file and processing the
                 dependencies. It is useful when not needed, like when tracing.
  """
  assert not options.isolate or os.path.isabs(options.isolate)
  assert not options.isolated or os.path.isabs(options.isolated)
  cwd = file_path.get_native_path_case(unicode(cwd))
  # maruel: This is incorrect but it's not worth fixing.
  namespace = getattr(options, 'namespace', 'default')
  algo_name = isolate_storage.ServerRef('', namespace).hash_algo_name
  if options.isolated:
    # Load the previous state if it was present. Namely, "foo.isolated.state".
    # Note: this call doesn't load the .isolate file.
    complete_state = CompleteState.load_files(algo_name, options.isolated)
  else:
    # Constructs a dummy object that cannot be saved. Useful for temporary
    # commands like 'run'. There is no directory containing a .isolated file so
    # specify the current working directory as a valid directory.
    complete_state = CompleteState(None, SavedState(algo_name, os.getcwd()))

  if not options.isolate:
    if not complete_state.saved_state.isolate_file:
      if not skip_update:
        raise ExecutionError('A .isolate file is required.')
      isolate = None
    else:
      isolate = complete_state.saved_state.isolate_filepath
  else:
    isolate = options.isolate
    if complete_state.saved_state.isolate_file:
      rel_isolate = file_path.safe_relpath(
          options.isolate, complete_state.saved_state.isolated_basedir)
      if rel_isolate != complete_state.saved_state.isolate_file:
        # This happens if the .isolate file was moved for example. In this case,
        # discard the saved state.
        logging.warning(
            '--isolated %s != %s as saved in %s. Discarding saved state',
            rel_isolate,
            complete_state.saved_state.isolate_file,
            isolatedfile_to_state(options.isolated))
        complete_state = CompleteState(
            options.isolated,
            SavedState(algo_name, complete_state.saved_state.isolated_basedir))

  if not skip_update:
    # Then load the .isolate and expands directories.
    complete_state.load_isolate(
        cwd, isolate, options.path_variables, options.config_variables,
        options.extra_variables, options.blacklist, options.ignore_broken_items,
        options.collapse_symlinks)

  # Regenerate complete_state.saved_state.files.
  if subdir:
    subdir = unicode(subdir)
    # This is tricky here. If it is a path, take it from the root_dir. If
    # it is a variable, it must be keyed from the directory containing the
    # .isolate file. So translate all variables first.
    translated_path_variables = dict(
        (k,
          os.path.normpath(os.path.join(complete_state.saved_state.relative_cwd,
            v)))
        for k, v in complete_state.saved_state.path_variables.iteritems())
    subdir = isolate_format.eval_variables(subdir, translated_path_variables)
    subdir = subdir.replace('/', os.path.sep)

  if not skip_update:
    complete_state.files_to_metadata(subdir, options.collapse_symlinks)
  return complete_state
示例#26
0
def CMDbatcharchive(parser, args):
  """Archives multiple isolated trees at once.

  Using single command instead of multiple sequential invocations allows to cut
  redundant work when isolated trees share common files (e.g. file hashes are
  checked only once, their presence on the server is checked only once, and
  so on).

  Takes a list of paths to *.isolated.gen.json files that describe what trees to
  isolate. Format of files is:
  {
    "version": 1,
    "dir": <absolute path to a directory all other paths are relative to>,
    "args": [list of command line arguments for single 'archive' command]
  }
  """
  isolateserver.add_isolate_server_options(parser)
  isolateserver.add_archive_options(parser)
  auth.add_auth_options(parser)
  parser.add_option(
      '--dump-json',
      metavar='FILE',
      help='Write isolated hashes of archived trees to this file as JSON')
  options, args = parser.parse_args(args)
  auth.process_auth_options(parser, options)
  isolateserver.process_isolate_server_options(parser, options, True, True)

  # Validate all incoming options, prepare what needs to be archived as a list
  # of tuples (archival options, working directory).
  work_units = []
  for gen_json_path in args:
    # Validate JSON format of a *.isolated.gen.json file.
    try:
      data = tools.read_json(gen_json_path)
    except IOError as e:
      parser.error('Failed to open %s: %s' % (gen_json_path, e))
    if data.get('version') != ISOLATED_GEN_JSON_VERSION:
      parser.error('Invalid version in %s' % gen_json_path)
    cwd = data.get('dir')
    if not isinstance(cwd, unicode) or not fs.isdir(cwd):
      parser.error('Invalid dir in %s' % gen_json_path)
    args = data.get('args')
    if (not isinstance(args, list) or
        not all(isinstance(x, unicode) for x in args)):
      parser.error('Invalid args in %s' % gen_json_path)
    # Convert command line (embedded in JSON) to Options object.
    work_units.append((parse_archive_command_line(args, cwd), cwd))

  # Perform the archival, all at once.
  server_ref = isolate_storage.ServerRef(
      options.isolate_server, options.namespace)
  isolated_hashes = isolate_and_archive(work_units, server_ref)

  # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
  # no way currently to figure out what *.isolated file from a batch were
  # successfully uploaded, so consider them all failed (and emit empty dict
  # as JSON result).
  if options.dump_json:
    tools.write_json(options.dump_json, isolated_hashes or {}, False)

  if isolated_hashes is None:
    return EXIT_CODE_UPLOAD_ERROR

  # isolated_hashes[x] is None if 'x.isolate' contains a error.
  if not all(isolated_hashes.itervalues()):
    return EXIT_CODE_ISOLATE_ERROR

  return 0
示例#27
0
 def test_download_isolated_simple(self):
   # Test downloading an isolated tree.
   # It writes files to disk for real.
   server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip')
   files = {
     os.path.join('a', 'foo'): 'Content',
     'b': 'More content',
   }
   isolated = {
       'command': ['Absurb', 'command'],
       'relative_cwd': 'a',
       'files': {
           os.path.join('a', 'foo'): {
               'h': isolateserver_fake.hash_content('Content'),
               's': len('Content'),
               'm': 0o700,
           },
           'b': {
               'h': isolateserver_fake.hash_content('More content'),
               's': len('More content'),
               'm': 0o600,
           },
           'c': {
               'l': 'a/foo',
           },
       },
       'read_only': 1,
       'version': isolated_format.ISOLATED_FILE_VERSION,
   }
   isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':'))
   isolated_hash = isolateserver_fake.hash_content(isolated_data)
   requests = [
     (v['h'], files[k]) for k, v in isolated['files'].items()
     if 'h' in v
   ]
   requests.append((isolated_hash, isolated_data))
   requests = [
     (
       '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url,
       {
           'data': {
               'digest': h.encode('utf-8'),
               'namespace': {
                   'namespace': 'default-gzip',
                   'digest_hash': 'sha-1',
                   'compression': 'flate',
               },
               'offset': 0,
           },
           'read_timeout': 60,
       },
       {'content': base64.b64encode(zlib.compress(v))},
     ) for h, v in requests
   ]
   cmd = [
     'download',
     '--isolate-server', server_ref.url,
     '--namespace', server_ref.namespace,
     '--target', os.path.join(self.tempdir, 'target'),
     '--isolated', isolated_hash,
     '--cache', os.path.join(self.tempdir, 'cache'),
   ]
   self.expected_requests(requests)
   self.assertEqual(0, isolateserver.main(cmd))
   expected = {
       os.path.join(self.tempdir, 'target', 'a', 'foo'): ('Content', 0o500),
       os.path.join(self.tempdir, 'target', 'b'): ('More content', 0o400),
       os.path.join(self.tempdir, 'target', 'c'): (u'a/foo', 0),
   }
   actual = self._get_actual()
   self.assertEqual(expected, actual)
   expected_stdout = (
       'To run this test please run from the directory %s:\n  Absurb command\n'
       % os.path.join(self.tempdir, 'target', 'a'))
   self.checkOutput(expected_stdout, '')
示例#28
0
  def test_download_isolated_tar_archive(self):
    # Test downloading an isolated tree.
    server_ref = isolate_storage.ServerRef('http://example.com', 'default-gzip')

    files = {
        os.path.join('a', 'foo'): ('Content', 0o500),
        'b': ('More content', 0o400),
        'c': ('Even more content!', 0o500),
    }

    # Generate a tar archive
    tf = io.BytesIO()
    with tarfile.TarFile(mode='w', fileobj=tf) as tar:
      f1 = tarfile.TarInfo()
      f1.type = tarfile.REGTYPE
      f1.name = 'a/foo'
      f1.size = 7
      f1.mode = 0o570
      tar.addfile(f1, io.BytesIO('Content'))

      f2 = tarfile.TarInfo()
      f2.type = tarfile.REGTYPE
      f2.name = 'b'
      f2.size = 12
      f2.mode = 0o666
      tar.addfile(f2, io.BytesIO('More content'))
    archive = tf.getvalue()

    isolated = {
      'command': ['Absurb', 'command'],
      'relative_cwd': 'a',
      'files': {
        'archive1': {
          'h': isolateserver_fake.hash_content(archive),
          's': len(archive),
          't': 'tar',
        },
        'c': {
          'h': isolateserver_fake.hash_content(files['c'][0]),
          's': len(files['c'][0]),
        },
      },
      'read_only': 1,
      'version': isolated_format.ISOLATED_FILE_VERSION,
    }
    isolated_data = json.dumps(isolated, sort_keys=True, separators=(',', ':'))
    isolated_hash = isolateserver_fake.hash_content(isolated_data)
    requests = [
      (isolated['files']['archive1']['h'], archive),
      (isolated['files']['c']['h'], files['c'][0]),
    ]
    requests.append((isolated_hash, isolated_data))
    requests = [
      (
        '%s/_ah/api/isolateservice/v1/retrieve' % server_ref.url,
        {
            'data': {
                'digest': h.encode('utf-8'),
                'namespace': {
                    'namespace': 'default-gzip',
                    'digest_hash': 'sha-1',
                    'compression': 'flate',
                },
                'offset': 0,
            },
            'read_timeout': 60,
        },
        {'content': base64.b64encode(zlib.compress(v))},
      ) for h, v in requests
    ]
    cmd = [
      'download',
      '--isolate-server', server_ref.url,
      '--namespace', server_ref.namespace,
      '--target', os.path.join(self.tempdir, 'target'),
      '--isolated', isolated_hash,
      '--cache', os.path.join(self.tempdir, 'cache'),
    ]
    self.expected_requests(requests)
    self.assertEqual(0, isolateserver.main(cmd))
    expected = {
      os.path.join(self.tempdir, 'target', k): v for k, v in files.items()
    }
    actual = self._get_actual()
    self.assertEqual(expected, actual)
    expected_stdout = (
        'To run this test please run from the directory %s:\n  Absurb command\n'
        % os.path.join(self.tempdir, 'target', 'a'))
    self.checkOutput(expected_stdout, '')
示例#29
0
 def test_upload_items_empty(self):
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   result = storage.upload_items(())
   self.assertEqual([], result)
示例#30
0
 def test_server_capabilities_success(self):
   server_ref = isolate_storage.ServerRef('http://example.com', 'default')
   self.expected_requests([self.mock_server_details_request(server_ref)])
   storage = isolate_storage.IsolateServer(server_ref)
   caps = storage._server_capabilities
   self.assertEqual({'server_version': 'such a good version'}, caps)