Exemplo n.º 1
0
 def testPerhost(self, mock_benchmark_path):
   del mock_benchmark_path
   vm = MockVm()
   vm.RobustRemoteCommand.return_value = ('', '')
   results = list(omb.RunBenchmark(omb.RunRequest('igather', [vm], 1024)))
   self.assertIn('-npernode 2', results[0].full_cmd)
   self.assertIn('-m 1024:1024', results[1].full_cmd)
Exemplo n.º 2
0
  def testRunResult(self):
    test_output = inspect.cleandoc("""
    [0] MPI startup(): Rank    Pid      Node name       Pin cpu
    [0] MPI startup(): 0       17442    pkb-a0b71860-0  {0,1}
    [0] MPI startup(): 1       3735     pkb-a0b71860-1  {0,
                                          1}
    # OSU MPI Multiple Bandwidth / Message Rate Test v5.7
    # [ pairs: 15 ] [ window size: 64 ]
    # Size                  MB/s        Messages/s
    1                       6.39        6385003.80
    """)
    vm = MockVm()
    mpitest_path = 'path/to/startup/osu_mbw_mr'
    vm.RemoteCommand.side_effect = [(mpitest_path, ''), (mpitest_path, '')]
    vm.RobustRemoteCommand.side_effect = [(test_output, ''), (test_output, '')]
    vms = [vm, mock.Mock(internal_ip='10.0.0.2')]
    results = list(omb.RunBenchmark(omb.RunRequest('mbw_mr', vms)))

    expected_result = omb.RunResult(
        name='mbw_mr',
        metadata={
            'pairs': '15',
            'window_size': '64'
        },
        data=[{
            'size': 1,
            'bandwidth': 6.39,
            'messages_per_second': 6385003.8
        }],
        full_cmd=('OMPI_MCA_btl=self,tcp '
                  'OMPI_MCA_hwloc_base_binding_policy=core '
                  'mpirun -x OMPI_MCA_btl '
                  '-x OMPI_MCA_hwloc_base_binding_policy '
                  '-report-bindings -display-map -n 2 -npernode 1 '
                  '--use-hwthread-cpus -host 10.0.0.1:slots=2,10.0.0.2:slots=2 '
                  f'{mpitest_path} --iterations 10'),
        units='MB/s',
        params={'--iterations': 10},
        mpi_vendor='openmpi',
        mpi_version='3.1.2',
        value_column='bandwidth',
        number_processes=2,
        run_time=0,
        pinning=['0:0:0,1', '1:1:0,1'],
        perhost=1,
        mpi_env={
            'OMPI_MCA_btl': 'self,tcp',
            'OMPI_MCA_hwloc_base_binding_policy': 'core',
        })
    self.assertEqual(expected_result, results[0])
    self.assertLen(results, 2)
    # Called twice, the second time with 4*2=8 processes
    self.assertEqual(8, results[1].number_processes)
    def testRun(self, mock_run):
        bm_spec = MockBenchmarkSpec()
        samples = omb_benchmark.Run(bm_spec)

        self.assertSampleListsEqualUpToTimestamp(_EXPECTED_SAMPLES,
                                                 samples[:2])
        self.assertLen(samples, 84)
        expected_calls = [
            mock.call(omb.RunRequest(name, bm_spec.vms))
            for name in omb.BENCHMARKS
        ]
        mock_run.assert_has_calls(expected_calls)
    def testMessageSizeRequest(self, mock_run):
        bm_spec = MockBenchmarkSpec()

        omb_benchmark.Run(bm_spec)

        expected_calls = []
        for name, run_type in sorted(omb.BENCHMARKS.items()):
            if run_type.long_running:
                continue
            for size in (1024, 2048):
                expected_calls.append(
                    mock.call(omb.RunRequest(name, bm_spec.vms, size)))
        mock_run.assert_has_calls(expected_calls)
Exemplo n.º 5
0
  def testRunResult(self):
    test_output = inspect.cleandoc("""
    [0] MPI startup(): Rank    Pid      Node name       Pin cpu
    [0] MPI startup(): 0       17442    pkb-a0b71860-0  {0,1}
    [0] MPI startup(): 1       3735     pkb-a0b71860-1  {0,
                                          1}
    # OSU MPI Multiple Bandwidth / Message Rate Test v5.7
    # [ pairs: 15 ] [ window size: 64 ]
    # Size                  MB/s        Messages/s
    1                       6.39        6385003.80
    """)
    vm = MockVm()
    mpitest_path = 'path/to/startup/osu_mbw_mr'
    vm.RemoteCommand.side_effect = [(mpitest_path, ''), (mpitest_path, '')]
    vm.RobustRemoteCommand.side_effect = [(test_output, ''), (test_output, '')]
    vms = [vm, mock.Mock(internal_ip='10.0.0.2')]
    results = list(omb.RunBenchmark(omb.RunRequest('mbw_mr', vms)))

    expected_result = omb.RunResult(
        name='mbw_mr',
        metadata={
            'pairs': '15',
            'window_size': '64'
        },
        data=[{
            'size': 1,
            'bandwidth': 6.39,
            'messages_per_second': 6385003.8
        }],
        full_cmd=('. mpivars.sh; IMPI_DEBUG=5 mpirun '
                  '-genv I_MPI_PIN=1 '
                  '-genv I_MPI_PIN_PROCESSOR_LIST=0 -perhost 1 -n 2 '
                  f'-hosts 10.0.0.1,10.0.0.2 {mpitest_path} --iterations 10'),
        units='MB/s',
        params={'--iterations': 10},
        mpi_vendor='intel',
        mpi_version='2019.6',
        value_column='bandwidth',
        number_processes=2,
        run_time=0,
        pinning=['0:0:0,1', '1:1:0,1'],
        perhost=1,
        mpi_env={
            'I_MPI_PIN_PROCESSOR_LIST': '0',
            'I_MPI_PIN': '1',
            'IMPI_DEBUG': '5'
        })
    self.assertEqual(expected_result, results[0])
    self.assertLen(results, 2)
    # Called twice, the second time with 4*2=8 processes
    self.assertEqual(8, results[1].number_processes)
def Run(bm_spec: benchmark_spec.BenchmarkSpec) -> List[sample.Sample]:
    """Run omb.

  Args:
    bm_spec: The benchmark specification. Contains all data that is required to
      run the benchmark.

  Returns:
    A list of sample.Sample objects.
  """
    vms = bm_spec.vms
    samples = []
    for benchmark in _GetBenchmarks():
        for message_size in _MESSAGE_SIZES.value or [None]:
            # Passing in message_size=None means to run all message sizes
            request = omb.RunRequest(benchmark, vms, message_size)
            for result in omb.RunBenchmark(request):
                samples.extend(_CreateSamples(result))
    return samples