Beispiel #1
0
  def test_ensures_same_server(self):
    self.mock(logging, 'error', lambda *_: None)
    # Two shard results, attempt to use different servers.
    actual_calls = []
    self.mock(
        isolateserver, 'fetch_isolated',
        lambda *args: actual_calls.append(args))
    data = [
      gen_result_response(
        outputs_ref={
          'isolatedserver': 'https://server1',
          'namespace': 'namespace',
          'isolated':'hash1',
        }),
      gen_result_response(
        outputs_ref={
          'isolatedserver': 'https://server2',
          'namespace': 'namespace',
          'isolated':'hash1',
        }),
    ]

    # Feed them to collector.
    collector = swarming.TaskOutputCollector(self.tempdir, 2)
    for index, result in enumerate(data):
      collector.process_shard_result(index, result)
    collector.finalize()

    # Only first fetch is made, second one is ignored.
    self.assertEqual(1, len(actual_calls))
    isolated_hash, storage, _, outdir, _ = actual_calls[0]
    self.assertEqual(
        ('hash1', os.path.join(self.tempdir, '0')),
        (isolated_hash, outdir))
    self.assertEqual('https://server1', storage.location)
Beispiel #2
0
  def test_collect_multi(self):
    actual_calls = []
    def fetch_isolated(isolated_hash, storage, cache, outdir, use_symlinks):
      self.assertIs(storage.__class__, isolateserver.Storage)
      self.assertIs(cache.__class__, isolateserver.MemoryCache)
      # Ensure storage is pointing to required location.
      self.assertEqual('https://localhost:2', storage.location)
      self.assertEqual('default', storage.namespace)
      self.assertEqual(False, use_symlinks)
      actual_calls.append((isolated_hash, outdir))
    self.mock(isolateserver, 'fetch_isolated', fetch_isolated)

    collector = swarming.TaskOutputCollector(self.tempdir, 2)
    for index in xrange(2):
      collector.process_shard_result(
          index,
          gen_result_response(
              outputs_ref={
                'isolated': str(index) * 40,
                'isolatedserver': 'https://localhost:2',
                'namespace': 'default',
              }))
    summary = collector.finalize()

    expected_calls = [
      ('0'*40, os.path.join(self.tempdir, '0')),
      ('1'*40, os.path.join(self.tempdir, '1')),
    ]
    self.assertEqual(expected_calls, actual_calls)

    # Ensure collected summary is correct.
    outputs_refs = [
      {
        'isolated': '0'*40,
        'isolatedserver': 'https://localhost:2',
        'namespace': 'default',
        'view_url':
            'https://localhost:2/browse?namespace=default&hash=' + '0'*40,
      },
      {
        'isolated': '1'*40,
        'isolatedserver': 'https://localhost:2',
        'namespace': 'default',
        'view_url':
            'https://localhost:2/browse?namespace=default&hash=' + '1'*40,
      },
    ]
    expected = {
      'shards': [gen_result_response(outputs_ref=o) for o in outputs_refs],
    }
    self.assertEqual(expected, summary)

    # Ensure summary dumped to a file is correct as well.
    with open(os.path.join(self.tempdir, 'summary.json'), 'r') as f:
      summary_dump = json.load(f)
    self.assertEqual(expected, summary_dump)
Beispiel #3
0
  def test_works(self):
    # Output logs of shards.
    logs = [
      gen_run_isolated_out_hack_log('https://server', 'namespace', 'hash1'),
      gen_run_isolated_out_hack_log('https://server', 'namespace', 'hash2'),
      SWARM_OUTPUT_SUCCESS,
    ]

    # Feed three shard results to collector, last one without output files.
    collector = swarming.TaskOutputCollector(
        self.tempdir, 'task/name', len(logs))
    for index, log in enumerate(logs):
      collector.process_shard_result(index, gen_data(log, '0, 0'))
    collector.finalize()

    # Ensure it fetches the files from first two shards only.
    expected_calls = [
      ('hash1', None, None, os.path.join(self.tempdir, '0'), False),
      ('hash2', None, None, os.path.join(self.tempdir, '1'), False),
    ]
    self.assertEqual(len(expected_calls), len(self.fetch_isolated_calls))
    storage_instances = set()
    for expected, used in zip(expected_calls, self.fetch_isolated_calls):
      isolated_hash, storage, cache, outdir, require_command = used
      storage_instances.add(storage)
      # Compare everything but |storage| and |cache| (use None in their place).
      self.assertEqual(
          expected, (isolated_hash, None, None, outdir, require_command))
      # Ensure cache is set.
      self.assertTrue(cache)

    # Only one instance of Storage should be used.
    self.assertEqual(1, len(storage_instances))

    # Ensure storage is pointing to required location.
    storage = storage_instances.pop()
    self.assertEqual('https://server', storage.location)
    self.assertEqual('namespace', storage.namespace)

    # Ensure summary dump is correct.
    with open(os.path.join(self.tempdir, 'summary.json'), 'r') as f:
      summary = json.load(f)
    expected_summary = {
      'task_name': 'task/name',
      'shards': [
        gen_data(log, '0, 0') for index, log in enumerate(logs)
      ]
    }
    self.assertEqual(expected_summary, summary)
Beispiel #4
0
  def test_ensures_same_server(self):
    # Two shard results, attempt to use different servers.
    data = [
      gen_data(
        gen_run_isolated_out_hack_log('https://server1', 'namespace', 'hash1'),
        '0, 0'),
      gen_data(
        gen_run_isolated_out_hack_log('https://server2', 'namespace', 'hash2'),
        '0, 0'),
    ]

    # Feed them to collector.
    collector = swarming.TaskOutputCollector(self.tempdir, 'task/name', 2)
    for index, result in enumerate(data):
      collector.process_shard_result(index, result)
    collector.finalize()

    # Only first fetch is made, second one is ignored.
    self.assertEqual(1, len(self.fetch_isolated_calls))
    isolated_hash, storage, _, outdir, _ = self.fetch_isolated_calls[0]
    self.assertEqual(
        ('hash1', os.path.join(self.tempdir, '0')),
        (isolated_hash, outdir))
    self.assertEqual('https://server1', storage.location)