def mx_resnet_print(key, val=None, metadata=None, deferred=False, stack_offset=1, sync=False, uniq=True): rank = mpiwrapper.rank() if sync: mpiwrapper.barrier() if (uniq and rank == 0) or (not uniq): mlperf_print(key=key, value=val, metadata=metadata, deferred=deferred, benchmark=mlperf_constants.RESNET, stack_offset=stack_offset, root_dir=os.path.dirname(os.path.abspath(__file__)), extra_print=False) if sync: mpiwrapper.barrier() return
def test_mlperf_print_with_value(self): expected_output_l = ":::MLL 1558767599.999 eval_accuracy:" expected_output_r = '{"value": 0.99, ' + \ '"metadata": {"file": "mybenchmark/file.py", "lineno": 42}}' with _captured_stdout() as out: mlperf_log.mlperf_print(constants.EVAL_ACCURACY, value=0.99) lines = out.getvalue().splitlines() output_l = " ".join(lines[0].split(" ", 3)[0:3]) output_r = lines[0].split(" ", 3)[3] self.assertEqual(output_l, expected_output_l) self.assertDictEqual(json.loads(output_r), json.loads(expected_output_r))
def test_mlperf_print_simple(self): expected_output_l = ":::MLL 1558767599.999 run_start:" expected_output_r = '{"value": null, ' + \ '"metadata": {"file": "mybenchmark/file.py", "lineno": 42}}' with _captured_stdout() as out: mlperf_log.mlperf_print(constants.RUN_START) lines = out.getvalue().splitlines() output_l = " ".join(lines[0].split(" ", 3)[0:3]) output_r = lines[0].split(" ", 3)[3] self.assertEqual(output_l, expected_output_l) self.assertDictEqual(json.loads(output_r), json.loads(expected_output_r))
def test_mlperf_print_with_metadata(self): expected_output_l = ":::MLL 1558767599.999 epoch_stop:" expected_output_r = '{"value": null, "metadata": ' + \ '{"file": "mybenchmark/file.py", "lineno": 42, "first_epoch_num": 1}}' with _captured_stdout() as out: mlperf_log.mlperf_print( constants.EPOCH_STOP, metadata={"first_epoch_num": 1}) lines = out.getvalue().splitlines() output_l = " ".join(lines[0].split(" ", 3)[0:3]) output_r = lines[0].split(" ", 3)[3] self.assertEqual(output_l, expected_output_l) self.assertDictEqual(json.loads(output_r), json.loads(expected_output_r))
def manual_test(): root_dir = os.path.normpath( os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..")) mlperf_log.setdefault( benchmark="resnet", root_dir=root_dir, extra_print=True, prefix="TEST") mlperf_log.mlperf_print(constants.RUN_START) mlperf_log.mlperf_print(constants.EVAL_ACCURACY, value=0.99) mlperf_log.mlperf_print( constants.EPOCH_STOP, metadata={"first_epoch_num": 1})
def minigo_print(*args, stack_offset=1, **kwargs): # TODO: add support for multinode logging here (only rank 0 etc.) mlperf_log.mlperf_print(*args, stack_offset=stack_offset, **kwargs)