def test_async_push_upload_errors(self):
        chunk = 'data_chunk'

        def _generator(_chunk_size):
            yield chunk

        def push_side_effect():
            raise IOError('Nope')

        # TODO(vadimsh): Retrying push when fetching data from a generator is
        # broken now (it reuses same generator instance when retrying).
        content_sources = (
            # generator(),
            lambda _chunk_size: [chunk], )

        for use_zip in (False, True):
            for source in content_sources:
                item = FakeItem(chunk)
                self.mock(item, 'content', source)
                storage_api = self.mock_push(push_side_effect)
                storage = isolateserver.Storage(storage_api, use_zip)
                channel = threading_utils.TaskChannel()
                storage.async_push(channel, 0, item)
                with self.assertRaises(IOError):
                    channel.pull()
                # First initial attempt + all retries.
                attempts = 1 + isolateserver.WorkerPool.RETRIES
                # Single push attempt parameters.
                expected_push = (item, item.zipped if use_zip else item.data)
                # Ensure all pushes are attempted.
                self.assertEqual([expected_push] * attempts,
                                 storage_api.pushed)
Beispiel #2
0
  def test_async_push_upload_errors(self):
    chunk = 'data_chunk'

    def push_side_effect():
      raise IOError('Nope')

    content_sources = (
        lambda: [chunk],
        lambda: [(yield chunk)],
    )

    for use_zip in (False, True):
      for source in content_sources:
        item = FakeItem(chunk)
        self.mock(item, 'content', source)
        server_ref = isolate_storage.ServerRef(
            'http://localhost:1', 'default-gzip' if use_zip else 'default')
        storage_api = MockedStorageApi(
            server_ref, {item.digest: 'push_state'}, push_side_effect)
        storage = isolateserver.Storage(storage_api)
        channel = threading_utils.TaskChannel()
        storage._async_push(channel, item, self.get_push_state(storage, item))
        with self.assertRaises(IOError):
          next(channel)
        # First initial attempt + all retries.
        attempts = 1 + storage.net_thread_pool.RETRIES
        # Single push attempt call arguments.
        expected_push = (
            item, 'push_state', item.zipped if use_zip else item.data)
        # Ensure all pushes are attempted.
        self.assertEqual(
            [expected_push] * attempts, storage_api.push_calls)
Beispiel #3
0
  def test_upload_items(self):
    server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
    items = [
      isolateserver.BufferItem('a'*12, server_ref.hash_algo),
      isolateserver.BufferItem('', server_ref.hash_algo),
      isolateserver.BufferItem('c'*1222, server_ref.hash_algo),
      isolateserver.BufferItem('d'*1223, server_ref.hash_algo),
    ]
    missing = {
      items[2]: 123,
      items[3]: 456,
    }
    storage_api = MockedStorageApi(
        server_ref,
        {item.digest: push_state for item, push_state in missing.items()})
    storage = isolateserver.Storage(storage_api)

    # Intentionally pass a generator, to confirm it works.
    result = storage.upload_items((i for i in items))
    self.assertEqual(sorted(missing), sorted(result))
    self.assertEqual(4, len(items))
    self.assertEqual(2, len(missing))
    self.assertEqual([items], storage_api.contains_calls)
    self.assertEqual(
        sorted(
          ((items[2], 123, items[2].content()[0]),
            (items[3], 456, items[3].content()[0]))),
        sorted(storage_api.push_calls))
Beispiel #4
0
  def test_archive_files_to_storage(self):
    # Mocked
    files_content = {}
    def add(p, c):
      with open(os.path.join(self.tempdir, p), 'wb') as f:
        f.write(c)
      files_content[p] = c
    add(u'a', 'a'*100)
    add(u'b', 'b'*200)
    os.mkdir(os.path.join(self.tempdir, 'sub'))
    add(os.path.join(u'sub', u'c'), 'c'*300)
    add(os.path.join(u'sub', u'a_copy'), 'a'*100)

    files_hash = {
      p: hashlib.sha1(c).hexdigest() for p, c in files_content.items()
    }
    # 'a' and 'sub/c' are missing.
    missing = {
      files_hash[u'a']: u'a',
      files_hash[os.path.join(u'sub', u'c')]: os.path.join(u'sub', u'c'),
    }
    server_ref = isolate_storage.ServerRef(
        'http://localhost:1', 'some-namespace')
    storage_api = MockedStorageApi(server_ref, missing)
    storage = isolateserver.Storage(storage_api)
    with storage:
      results, cold, hot = isolateserver.archive_files_to_storage(
          storage, [os.path.join(self.tempdir, p) for p in files_content], None)
    self.assertEqual(
        {os.path.join(self.tempdir, f): h for f, h in files_hash.items()},
        dict(results))

    expected = [
      (os.path.join(self.tempdir, u'a'), files_hash['a']),
      (os.path.join(self.tempdir, u'sub', u'c'),
        files_hash[os.path.join(u'sub', u'c')]),
      (os.path.join(self.tempdir, u'sub', u'a_copy'),
        files_hash[os.path.join(u'sub', u'a_copy')]),
    ]
    self.assertEqual(expected, [(f.path, f.digest) for f in cold])
    self.assertEqual(
        [(os.path.join(self.tempdir, u'b'), files_hash['b'])],
        [(f.path, f.digest) for f in hot])
    # 'contains' checked for existence of all files.
    self.assertEqualIgnoringOrder(
        set(files_hash.values()),
        [i.digest for i in sum(storage_api.contains_calls, [])])
    # Pushed only missing files.
    self.assertEqualIgnoringOrder(
        list(missing),
        [call[0].digest for call in storage_api.push_calls])
    # Pushing with correct data, size and push state.
    for pushed_item, _push_state, pushed_content in storage_api.push_calls:
      filename = missing[pushed_item.digest]
      self.assertEqual(os.path.join(self.tempdir, filename), pushed_item.path)
      self.assertEqual(files_content[filename], pushed_content)
 def test_async_push(self):
     for use_zip in (False, True):
         item = FakeItem('1234567')
         storage_api = self.mock_push()
         storage = isolateserver.Storage(storage_api, use_zip)
         channel = threading_utils.TaskChannel()
         storage.async_push(channel, 0, item)
         # Wait for push to finish.
         pushed_item = channel.pull()
         self.assertEqual(item, pushed_item)
         # StorageApi.push was called with correct arguments.
         self.assertEqual([(item, item.zipped if use_zip else item.data)],
                          storage_api.pushed)
Beispiel #6
0
 def test_archive_files_to_storage_tar(self):
   # Create 5 files, which is the minimum to create a tarball.
   for i in range(5):
     with open(os.path.join(self.tempdir, six.text_type(i)), 'wb') as f:
       f.write('fooo%d' % i)
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   results, cold, hot = isolateserver.archive_files_to_storage(
       storage, [self.tempdir], None)
   self.assertEqual([self.tempdir], results.keys())
   self.assertEqual([], cold)
   # 5 files, the isolated file.
   self.assertEqual(6, len(hot))
Beispiel #7
0
 def test_async_push(self):
   for use_zip in (False, True):
     item = FakeItem('1234567')
     server_ref = isolate_storage.ServerRef(
         'http://localhost:1', 'default-gzip' if use_zip else 'default')
     storage_api = MockedStorageApi(server_ref, {item.digest: 'push_state'})
     storage = isolateserver.Storage(storage_api)
     channel = threading_utils.TaskChannel()
     storage._async_push(channel, item, self.get_push_state(storage, item))
     # Wait for push to finish.
     pushed_item = next(channel)
     self.assertEqual(item, pushed_item)
     # StorageApi.push was called with correct arguments.
     self.assertEqual(
         [(item, 'push_state', item.zipped if use_zip else item.data)],
         storage_api.push_calls)
    def test_async_push_generator_errors(self):
        class FakeException(Exception):
            pass

        def faulty_generator(_chunk_size):
            yield 'Hi!'
            raise FakeException('fake exception')

        for use_zip in (False, True):
            item = FakeItem('')
            self.mock(item, 'content', faulty_generator)
            storage_api = self.mock_push()
            storage = isolateserver.Storage(storage_api, use_zip)
            channel = threading_utils.TaskChannel()
            storage.async_push(channel, 0, item)
            with self.assertRaises(FakeException):
                channel.pull()
            # StorageApi's push should never complete when data can not be read.
            self.assertEqual(0, len(storage_api.pushed))
    def test_get_missing_items(self):
        items = [
            isolateserver.Item('foo', 12),
            isolateserver.Item('blow', 0),
            isolateserver.Item('bizz', 1222),
            isolateserver.Item('buzz', 1223),
        ]
        missing = [
            [items[2], items[3]],
        ]

        class MockedStorageApi(isolateserver.StorageApi):
            def contains(self, _items):
                return missing

        storage = isolateserver.Storage(MockedStorageApi(), use_zip=False)

        # 'get_missing_items' is a generator, materialize its result in a list.
        result = list(storage.get_missing_items(items))
        self.assertEqual(missing, result)
  def test_get_missing_items(self):
    items = [
      isolateserver.Item('foo', 12),
      isolateserver.Item('blow', 0),
      isolateserver.Item('bizz', 1222),
      isolateserver.Item('buzz', 1223),
    ]
    missing = {
      items[2]: 123,
      items[3]: 456,
    }

    storage_api = MockedStorageApi(
        {item.digest: push_state for item, push_state in missing.iteritems()})
    storage = isolateserver.Storage(storage_api)

    # 'get_missing_items' is a generator yielding pairs, materialize its
    # result in a dict.
    result = dict(storage.get_missing_items(items))
    self.assertEqual(missing, result)
Beispiel #11
0
  def test_async_push_generator_errors(self):
    class FakeException(Exception):
      pass

    def faulty_generator():
      yield 'Hi!'
      raise FakeException('fake exception')

    for use_zip in (False, True):
      item = FakeItem('')
      self.mock(item, 'content', faulty_generator)
      server_ref = isolate_storage.ServerRef(
          'http://localhost:1', 'default-gzip' if use_zip else 'default')
      storage_api = MockedStorageApi(server_ref, {item.digest: 'push_state'})
      storage = isolateserver.Storage(storage_api)
      channel = threading_utils.TaskChannel()
      storage._async_push(channel, item, self.get_push_state(storage, item))
      with self.assertRaises(FakeException):
        next(channel)
      # StorageApi's push should never complete when data can not be read.
      self.assertEqual(0, len(storage_api.push_calls))
Beispiel #12
0
 def test_archive_files_to_storage_symlink(self):
   link_path = os.path.join(self.tempdir, u'link')
   with open(os.path.join(self.tempdir, u'foo'), 'wb') as f:
     f.write('fooo')
   fs.symlink('foo', link_path)
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   results, cold, hot = isolateserver.archive_files_to_storage(
       storage, [self.tempdir], None)
   self.assertEqual([self.tempdir], results.keys())
   self.assertEqual([], cold)
   # isolated, symlink, foo file.
   self.assertEqual(3, len(hot))
   self.assertEqual(os.path.join(self.tempdir, u'foo'), hot[0].path)
   self.assertEqual(4, hot[0].size)
   # TODO(maruel): The symlink is reported as its destination. We should fix
   # this because it double counts the stats.
   self.assertEqual(os.path.join(self.tempdir, u'foo'), hot[1].path)
   self.assertEqual(4, hot[1].size)
   # The isolated file is pure in-memory.
   self.assertIsInstance(hot[2], isolateserver.BufferItem)
Beispiel #13
0
    def test_async_push_upload_errors(self):
        chunk = 'data_chunk'

        def _generator():
            yield chunk

        def push_side_effect():
            raise IOError('Nope')

        # TODO(vadimsh): Retrying push when fetching data from a generator is
        # broken now (it reuses same generator instance when retrying).
        content_sources = (
            # generator(),
            lambda: [chunk], )

        for use_zip in (False, True):
            for source in content_sources:
                item = FakeItem(chunk)
                self.mock(item, 'content', source)
                storage_api = MockedStorageApi(
                    {item.digest: 'push_state'},
                    push_side_effect,
                    namespace='default-gzip' if use_zip else 'default')
                storage = isolateserver.Storage(storage_api)
                channel = threading_utils.TaskChannel()
                storage.async_push(channel, item,
                                   self.get_push_state(storage, item))
                with self.assertRaises(IOError):
                    channel.pull()
                # First initial attempt + all retries.
                attempts = 1 + storage.net_thread_pool.RETRIES
                # Single push attempt call arguments.
                expected_push = (item, 'push_state',
                                 item.zipped if use_zip else item.data)
                # Ensure all pushes are attempted.
                self.assertEqual([expected_push] * attempts,
                                 storage_api.push_calls)
Beispiel #14
0
    def test_upload_tree(self):
        files = {
            '/a': {
                's': 100,
                'h': 'hash_a',
            },
            '/some/dir/b': {
                's': 200,
                'h': 'hash_b',
            },
            '/another/dir/c': {
                's': 300,
                'h': 'hash_c',
            },
            '/a_copy': {
                's': 100,
                'h': 'hash_a',
            },
        }
        files_data = dict((k, 'x' * files[k]['s']) for k in files)
        all_hashes = set(f['h'] for f in files.itervalues())
        missing_hashes = {'hash_a': 'push a', 'hash_b': 'push b'}

        # Files read by mocked_file_read.
        read_calls = []

        def mocked_file_read(filepath, chunk_size=0, offset=0):
            self.assertIn(filepath, files_data)
            read_calls.append(filepath)
            return files_data[filepath]

        self.mock(isolateserver, 'file_read', mocked_file_read)

        storage_api = MockedStorageApi(missing_hashes)
        storage = isolateserver.Storage(storage_api)

        def mock_get_storage(base_url, namespace):
            self.assertEqual('base_url', base_url)
            self.assertEqual('some-namespace', namespace)
            return storage

        self.mock(isolateserver, 'get_storage', mock_get_storage)

        isolateserver.upload_tree('base_url', files.iteritems(),
                                  'some-namespace')

        # Was reading only missing files.
        self.assertEqualIgnoringOrder(
            missing_hashes, [files[path]['h'] for path in read_calls])
        # 'contains' checked for existence of all files.
        self.assertEqualIgnoringOrder(
            all_hashes,
            [i.digest for i in sum(storage_api.contains_calls, [])])
        # Pushed only missing files.
        self.assertEqualIgnoringOrder(
            missing_hashes,
            [call[0].digest for call in storage_api.push_calls])
        # Pushing with correct data, size and push state.
        for pushed_item, push_state, pushed_content in storage_api.push_calls:
            filenames = [
                name for name, metadata in files.iteritems()
                if metadata['h'] == pushed_item.digest
            ]
            # If there are multiple files that map to same hash, upload_tree chooses
            # a first one.
            filename = filenames[0]
            self.assertEqual(filename, pushed_item.path)
            self.assertEqual(files_data[filename], pushed_content)
            self.assertEqual(missing_hashes[pushed_item.digest], push_state)
Beispiel #15
0
 def test_upload_items_empty(self):
   server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
   storage_api = MockedStorageApi(server_ref, {})
   storage = isolateserver.Storage(storage_api)
   result = storage.upload_items(())
   self.assertEqual([], result)
    def test_upload_tree(self):
        root = 'root'
        files = {
            'a': {
                's': 100,
                'h': 'hash_a',
            },
            'b': {
                's': 200,
                'h': 'hash_b',
            },
            'c': {
                's': 300,
                'h': 'hash_c',
            },
            'a_copy': {
                's': 100,
                'h': 'hash_a',
            },
        }
        files_data = dict((k, 'x' * files[k]['s']) for k in files)
        all_hashes = set(f['h'] for f in files.itervalues())
        missing_hashes = set(['hash_a', 'hash_b'])

        # Files read by mocked_file_read.
        read_calls = []
        # 'contains' calls.
        contains_calls = []
        # 'push' calls.
        push_calls = []

        def mocked_file_read(filepath, _chunk_size=0):
            self.assertEqual(root, os.path.dirname(filepath))
            filename = os.path.basename(filepath)
            self.assertIn(filename, files_data)
            read_calls.append(filename)
            return files_data[filename]

        self.mock(isolateserver, 'file_read', mocked_file_read)

        class MockedStorageApi(isolateserver.StorageApi):
            def contains(self, items):
                contains_calls.append(items)
                return [
                    i for i in items
                    if os.path.basename(i.digest) in missing_hashes
                ]

            def push(self, item, content):
                push_calls.append((item, ''.join(content)))

        storage_api = MockedStorageApi()
        storage = isolateserver.Storage(storage_api, use_zip=False)
        storage.upload_tree(root, files)

        # Was reading only missing files.
        self.assertEqualIgnoringOrder(
            missing_hashes, [files[path]['h'] for path in read_calls])
        # 'contains' checked for existence of all files.
        self.assertEqualIgnoringOrder(
            all_hashes, [i.digest for i in sum(contains_calls, [])])
        # Pushed only missing files.
        self.assertEqualIgnoringOrder(missing_hashes,
                                      [call[0].digest for call in push_calls])
        # Pushing with correct data, size and push urls.
        for pushed_item, pushed_content in push_calls:
            filenames = [
                name for name, metadata in files.iteritems()
                if metadata['h'] == pushed_item.digest
            ]
            # If there are multiple files that map to same hash, upload_tree chooses
            # a first one.
            filename = filenames[0]
            self.assertEqual(os.path.join(root, filename), pushed_item.path)
            self.assertEqual(files_data[filename], pushed_content)