def test_aggregate_results(self): """Tests aggregating multiple results file from the same test.""" test_id_0 = 'made.up.test_id' results_list = [] extra_results = [] extra_results.append(self._mock_extra_result(250, 'total_time', 'ms')) extra_results.append(self._mock_extra_result(250, 'total_time', 'ms')) extra_results_2 = [] extra_results_2.append(self._mock_extra_result(125, 'total_time', 'ms')) extra_results_2.append(self._mock_extra_result(125, 'total_time', 'ms')) results_list.append(self._mock_result(test_id_0, 10.5, extra_results=extra_results)) results_list.append(self._mock_result(test_id_0, 20, extra_results=extra_results)) results_list.append(self._mock_result(test_id_0, .44444, extra_results=extra_results_2)) agg_result = util.report_aggregate_results(results_list) self.assertEqual(agg_result['test_id'], test_id_0) self.assertEqual(agg_result['mean'], 10.314813333333333) self.assertEqual(agg_result['max'], 20) self.assertEqual(agg_result['min'], 0.44444) self.assertEqual(agg_result['std'], 7.9845977692276744) self.assertEqual(agg_result['samples'], 3) self.assertIn('config', agg_result) # Check for extra results self.assertEqual(agg_result['extra_results'][0]['mean'], 208.33333333333334)
def test_aggregate_results_only_1(self): """Tests aggregating 1 result.""" test_id_0 = 'made.up.test_id' results_list = [] results_list.append(self._mock_result(test_id_0, 10.5)) agg_result = util.report_aggregate_results(results_list) self.assertEqual(agg_result['test_id'], test_id_0) self.assertEqual(agg_result['mean'], 10.5) self.assertEqual(agg_result['max'], 10.5) self.assertEqual(agg_result['min'], 10.5) self.assertEqual(agg_result['std'], 0) self.assertEqual(agg_result['samples'], 1) self.assertIn('config', agg_result)
def process_folder(folder_path, report_config=None): """Process one or more results of a single test found in the folder path. Args: folder_path: Folder to recursively search for results files, e.g. worker_0_stdout.log report_config: dict based config information normally passed down from a higher level harness with high level system information. """ report_config = {} if report_config is None else report_config results = _collect_results(folder_path) agg_result = util.report_aggregate_results(results) util.upload_results( report_config, agg_result, framework='pytorch', test_harness='pytorch')
def test_aggregate_results_no_imgs_sec(self): """Tests aggregating results that are empty.""" test_id_0 = 'made.up.test_id' results_list = [] results_list.append(self._mock_result(test_id_0, None)) results_list.append(self._mock_result(test_id_0, None)) results_list.append(self._mock_result(test_id_0, None)) agg_result = util.report_aggregate_results(results_list) self.assertEqual(agg_result['test_id'], test_id_0) self.assertEqual(agg_result['mean'], 0) self.assertEqual(agg_result['max'], 0) self.assertEqual(agg_result['min'], 0) self.assertEqual(agg_result['std'], 0) self.assertEqual(agg_result['samples'], 0) self.assertIn('config', agg_result)
def process_folder(folder_path, report_config=None): """Process and print aggregated results found in folder. Args: folder_path: Folder to recursively search for results files, e.g. worker_0_stdout.log report_config: dict config information normally passed down from a higher level harness with high level system information. """ report_config = {} if report_config is None else report_config results = _collect_results(folder_path) agg_result = util.report_aggregate_results(results) util.upload_results( report_config, agg_result, framework='tensorflow', test_harness='tf_cnn_benchmark')