Exemple #1
0
  def test_upload_items(self):
    server_ref = isolate_storage.ServerRef('http://localhost:1', 'default')
    items = [
      isolateserver.BufferItem('a'*12, server_ref.hash_algo),
      isolateserver.BufferItem('', server_ref.hash_algo),
      isolateserver.BufferItem('c'*1222, server_ref.hash_algo),
      isolateserver.BufferItem('d'*1223, server_ref.hash_algo),
    ]
    missing = {
      items[2]: 123,
      items[3]: 456,
    }
    storage_api = MockedStorageApi(
        server_ref,
        {item.digest: push_state for item, push_state in missing.items()})
    storage = isolateserver.Storage(storage_api)

    # Intentionally pass a generator, to confirm it works.
    result = storage.upload_items((i for i in items))
    self.assertEqual(sorted(missing), sorted(result))
    self.assertEqual(4, len(items))
    self.assertEqual(2, len(missing))
    self.assertEqual([items], storage_api.contains_calls)
    self.assertEqual(
        sorted(
          ((items[2], 123, items[2].content()[0]),
            (items[3], 456, items[3].content()[0]))),
        sorted(storage_api.push_calls))
Exemple #2
0
  def zip_and_upload(self):
    """Zips up all the files necessary to run a shard and uploads to Swarming
    master.
    """
    assert not self._isolate_item

    start_time = time.time()
    self._isolate_item = isolateserver.BufferItem(
        self.bundle.zip_into_buffer(), self._algo, is_isolated=True)
    print 'Zipping completed, time elapsed: %f' % (time.time() - start_time)

    try:
      start_time = time.time()
      uploaded = self.storage.upload_items([self._isolate_item])
      elapsed = time.time() - start_time
    except (IOError, OSError) as exc:
      print >> sys.stderr, 'Failed to upload the zip file: %s' % exc
      return False

    if self._isolate_item in uploaded:
      print 'Upload complete, time elapsed: %f' % elapsed
    else:
      print 'Zip file already on server, time elapsed: %f' % elapsed

    return True
Exemple #3
0
    def run_push_and_fetch_test(self, namespace):
        storage = isolateserver.get_storage(self.server.url, namespace)

        # Upload items.
        items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]
        uploaded = storage.upload_items(items)
        self.assertEqual(set(items), set(uploaded))

        # Fetch them all back into local memory cache.
        cache = isolateserver.MemoryCache()
        queue = isolateserver.FetchQueue(storage, cache)

        # Start fetching.
        pending = set()
        for item in items:
            pending.add(item.digest)
            queue.add(item.digest)

        # Wait for fetch to complete.
        while pending:
            fetched = queue.wait(pending)
            pending.discard(fetched)

        # Ensure fetched same data as was pushed.
        self.assertEqual([i.buffer for i in items],
                         [cache.read(i.digest) for i in items])
Exemple #4
0
def upload_zip_bundle(isolate_server, bundle):
    """Uploads a zip package to isolate storage and returns raw fetch URL.

  Args:
    isolate_server: URL of an isolate server.
    bundle: instance of ZipPackage to upload.

  Returns:
    URL to get the file from on success.
    None on failure.
  """
    # Swarming bot would need to be able to grab the file from the storage
    # using raw HTTP GET. Use 'default' namespace so that the raw data returned
    # to a bot is not zipped, since swarm_bot doesn't understand compressed
    # data yet. This namespace have nothing to do with |namespace| passed to
    # run_isolated.py that is used to store files for isolated task.
    logging.info('Zipping up and uploading files...')
    try:
        start_time = now()
        isolate_item = isolateserver.BufferItem(bundle.zip_into_buffer(),
                                                high_priority=True)
        with isolateserver.get_storage(isolate_server, 'default') as storage:
            uploaded = storage.upload_items([isolate_item])
            bundle_url = storage.get_fetch_url(isolate_item)
        elapsed = now() - start_time
    except (IOError, OSError) as exc:
        tools.report_error('Failed to upload the zip file: %s' % exc)
        return None
    if isolate_item in uploaded:
        logging.info('Upload complete, time elapsed: %f', elapsed)
    else:
        logging.info('Zip file already on server, time elapsed: %f', elapsed)
    return bundle_url
Exemple #5
0
  def run_push_and_fetch_test(self, namespace):
    storage = isolateserver.get_storage(
        isolate_storage.ServerRef(self.server.url, namespace))

    # Upload items.
    items = [
        isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo)
        for i in range(10)
    ]
    uploaded = storage.upload_items(items)
    self.assertEqual(set(items), set(uploaded))

    # Fetch them all back into local memory cache.
    cache = local_caching.MemoryContentAddressedCache()
    queue = isolateserver.FetchQueue(storage, cache)

    # Start fetching.
    pending = set()
    for item in items:
      pending.add(item.digest)
      queue.add(item.digest)
      queue.wait_on(item.digest)

    # Wait for fetch to complete.
    while pending:
      fetched = queue.wait()
      pending.discard(fetched)

    # Ensure fetched same data as was pushed.
    actual = []
    for i in items:
      with cache.getfileobj(i.digest) as f:
        actual.append(f.read())

    self.assertEqual([''.join(i.content()) for i in items], actual)
 def isolate_item(self):
     """Calling this property 'closes' the manifest and it can't be modified
 afterward.
 """
     if self._isolate_item is None:
         self._isolate_item = isolateserver.BufferItem(
             self.bundle.zip_into_buffer(), high_priority=True)
     return self._isolate_item
  def run_upload_items_test(self, namespace):
    storage = isolateserver.get_storage(self.server.url, namespace)

    # Items to upload.
    items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]

    # Do it.
    uploaded = storage.upload_items(items)
    self.assertEqual(set(items), set(uploaded))

    # All items are there now.
    self.assertFalse(dict(storage.get_missing_items(items)))

    # Now ensure upload_items skips existing items.
    more = [isolateserver.BufferItem('more item %d' % i) for i in xrange(10)]

    # Uploaded only |more|.
    uploaded = storage.upload_items(items + more)
    self.assertEqual(set(more), set(uploaded))
  def run_upload_items_test(self, namespace):
    storage = isolateserver.get_storage(
        isolate_storage.ServerRef(self.server.url, namespace))

    # Items to upload.
    items = [
      isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo)
      for i in xrange(10)
    ]

    # Do it.
    uploaded = storage.upload_items(items)
    self.assertEqual(set(items), set(uploaded))

    # Now ensure upload_items skips existing items.
    more = [
      isolateserver.BufferItem('more item %d' % i, storage.server_ref.hash_algo)
      for i in xrange(10)
    ]

    # Uploaded only |more|.
    uploaded = storage.upload_items(items + more)
    self.assertEqual(set(more), set(uploaded))
Exemple #9
0
    def run_synchronous_push_test(self, namespace):
        storage = isolateserver.get_storage(self.server.url, namespace)

        # Items to upload.
        items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]

        # Storage is empty, all items are missing.
        missing = dict(storage.get_missing_items(items))
        self.assertEqual(set(items), set(missing))

        # Push, one by one.
        for item, push_state in missing.iteritems():
            storage.push(item, push_state)

        # All items are there now.
        self.assertFalse(dict(storage.get_missing_items(items)))
def send_and_receive(random_pool, storage, progress, size):
  """Sends a random file and gets it back.

  # TODO(maruel): Add a batching argument of value [1, 500] to batch requests.

  Returns (delay, size)
  """
  # Create a file out of the pool.
  start = time.time()
  batch = 1
  items = [
    isolateserver.BufferItem(random_pool.gen(size), False)
    for _ in xrange(batch)
  ]
  try:
    # len(_uploaded) may be < len(items) happen if the items is not random
    # enough or value of --mid-size is very low compared to --items.
    _uploaded = storage.upload_items(items)

    start = time.time()

    cache = isolateserver.MemoryCache()
    queue = isolateserver.FetchQueue(storage, cache)
    for i in items:
      queue.add(i.digest, i.size)

    waiting = [i.digest for i in items]
    while waiting:
      waiting.remove(queue.wait(waiting))

    expected = {i.digest: ''.join(i.content()) for i in items}
    for d in cache.cached_set():
      actual = cache.read(d)
      assert expected.pop(d) == actual
    assert not expected, expected

    duration = max(0, time.time() - start)
  except isolateserver.MappingError as e:
    duration = str(e)
  if isinstance(duration, float):
    progress.update_item('', index=1, data=size)
  else:
    progress.update_item('', index=1)
  return (duration, size)