示例#1
0
def _create_request_sender(
    experiment_id=None,
    api=None,
    allowed_plugins=_USE_DEFAULT,
    max_blob_size=_USE_DEFAULT,
    rpc_rate_limiter=_USE_DEFAULT,
    blob_rpc_rate_limiter=_USE_DEFAULT,
):
    if api is _USE_DEFAULT:
        api = _create_mock_client()
    if allowed_plugins is _USE_DEFAULT:
        allowed_plugins = _SCALARS_ONLY
    if max_blob_size is _USE_DEFAULT:
        max_blob_size = 12345
    if rpc_rate_limiter is _USE_DEFAULT:
        rpc_rate_limiter = util.RateLimiter(0)
    if blob_rpc_rate_limiter is _USE_DEFAULT:
        blob_rpc_rate_limiter = util.RateLimiter(0)
    return uploader_lib._BatchedRequestSender(
        experiment_id=experiment_id,
        api=api,
        allowed_plugins=allowed_plugins,
        max_blob_size=max_blob_size,
        rpc_rate_limiter=rpc_rate_limiter,
        blob_rpc_rate_limiter=blob_rpc_rate_limiter,
    )
示例#2
0
    def test_break_at_tag_boundary(self):
        mock_client = _create_mock_client()
        # Choose tag name sizes such that one tag fits, but not two. Note
        # that tag names appear in both `Tag.name` and the summary metadata.
        long_tag_1 = "a" * 384
        long_tag_2 = "b" * 384
        event = event_pb2.Event(step=1)
        event.summary.value.add(tag=long_tag_1, simple_value=1.0)
        event.summary.value.add(tag=long_tag_2, simple_value=2.0)
        run_to_events = {"train": [event]}

        builder = uploader_lib._BatchedRequestSender("123", mock_client,
                                                     util.RateLimiter(0))
        builder.send_requests(run_to_events)
        requests = [c[0][0] for c in mock_client.WriteScalar.call_args_list]
        for request in requests:
            _clear_wall_times(request)

        expected = [
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
        ]
        (expected[0].runs.add(name="train").tags.add(
            name=long_tag_1,
            metadata=test_util.scalar_metadata(long_tag_1)).points.add(
                step=1, value=1.0))
        (expected[1].runs.add(name="train").tags.add(
            name=long_tag_2,
            metadata=test_util.scalar_metadata(long_tag_2)).points.add(
                step=1, value=2.0))
        self.assertEqual(requests, expected)
示例#3
0
    def test_break_at_run_boundary(self):
        mock_client = _create_mock_client()
        # Choose run name sizes such that one run fits, but not two.
        long_run_1 = "A" * 768
        long_run_2 = "B" * 768
        event_1 = event_pb2.Event(step=1)
        event_1.summary.value.add(tag="foo", simple_value=1.0)
        event_2 = event_pb2.Event(step=2)
        event_2.summary.value.add(tag="bar", simple_value=-2.0)
        run_to_events = collections.OrderedDict([(long_run_1, [event_1]),
                                                 (long_run_2, [event_2])])

        builder = uploader_lib._BatchedRequestSender("123", mock_client,
                                                     util.RateLimiter(0))
        builder.send_requests(run_to_events)
        requests = [c[0][0] for c in mock_client.WriteScalar.call_args_list]

        for request in requests:
            _clear_wall_times(request)

        expected = [
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
        ]
        (expected[0].runs.add(name=long_run_1).tags.add(
            name="foo",
            metadata=test_util.scalar_metadata("foo")).points.add(step=1,
                                                                  value=1.0))
        (expected[1].runs.add(name=long_run_2).tags.add(
            name="bar",
            metadata=test_util.scalar_metadata("bar")).points.add(step=2,
                                                                  value=-2.0))
        self.assertEqual(requests, expected)
示例#4
0
    def test_prunes_tags_and_runs(self):
        mock_client = _create_mock_client()
        event_1 = event_pb2.Event(step=1)
        event_1.summary.value.add(tag="foo", simple_value=1.0)
        event_2 = event_pb2.Event(step=2)
        event_2.summary.value.add(tag="bar", simple_value=-2.0)
        run_to_events = collections.OrderedDict(
            [("train", [event_1]), ("test", [event_2])]
        )

        real_create_point = (
            uploader_lib._ScalarBatchedRequestSender._create_point
        )

        create_point_call_count_box = [0]

        def mock_create_point(uploader_self, *args, **kwargs):
            # Simulate out-of-space error the first time that we try to store
            # the second point.
            create_point_call_count_box[0] += 1
            if create_point_call_count_box[0] == 2:
                raise uploader_lib._OutOfSpaceError()
            return real_create_point(uploader_self, *args, **kwargs)

        with mock.patch.object(
            uploader_lib._ScalarBatchedRequestSender,
            "_create_point",
            mock_create_point,
        ):
            builder = uploader_lib._BatchedRequestSender(
                "123", mock_client, util.RateLimiter(0)
            )
            builder.send_requests(run_to_events)
        requests = [c[0][0] for c in mock_client.WriteScalar.call_args_list]
        for request in requests:
            _clear_wall_times(request)

        expected = [
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
            write_service_pb2.WriteScalarRequest(experiment_id="123"),
        ]
        (
            expected[0]
            .runs.add(name="train")
            .tags.add(name="foo", metadata=test_util.scalar_metadata("foo"))
            .points.add(step=1, value=1.0)
        )
        (
            expected[1]
            .runs.add(name="test")
            .tags.add(name="bar", metadata=test_util.scalar_metadata("bar"))
            .points.add(step=2, value=-2.0)
        )
        self.assertEqual(expected, requests)
示例#5
0
 def test_no_room_for_single_point(self):
     mock_client = _create_mock_client()
     event = event_pb2.Event(step=1, wall_time=123.456)
     event.summary.value.add(tag="foo", simple_value=1.0)
     long_run_name = "A" * uploader_lib._MAX_REQUEST_LENGTH_BYTES
     run_to_events = {long_run_name: [event]}
     with self.assertRaises(RuntimeError) as cm:
         builder = uploader_lib._BatchedRequestSender(
             "123", mock_client, util.RateLimiter(0))
         builder.send_requests(run_to_events)
     self.assertEqual(str(cm.exception), "add_event failed despite flush")
示例#6
0
 def _populate_run_from_events(self, run_proto, events):
     mock_client = _create_mock_client()
     builder = uploader_lib._BatchedRequestSender(
         experiment_id="123",
         api=mock_client,
         rpc_rate_limiter=util.RateLimiter(0),
     )
     builder.send_requests({"": events})
     requests = [c[0][0] for c in mock_client.WriteScalar.call_args_list]
     if requests:
         self.assertLen(requests, 1)
         self.assertLen(requests[0].runs, 1)
         run_proto.MergeFrom(requests[0].runs[0])
示例#7
0
 def test_no_budget_for_experiment_id(self):
     mock_client = _create_mock_client()
     event = event_pb2.Event(step=1, wall_time=123.456)
     event.summary.value.add(tag="foo", simple_value=1.0)
     run_to_events = {"run_name": [event]}
     long_experiment_id = "A" * uploader_lib._MAX_REQUEST_LENGTH_BYTES
     mock_client = _create_mock_client()
     with self.assertRaises(RuntimeError) as cm:
         builder = uploader_lib._BatchedRequestSender(
             long_experiment_id, mock_client, util.RateLimiter(0))
         builder.send_requests(run_to_events)
     self.assertEqual(str(cm.exception),
                      "Byte budget too small for experiment ID")
示例#8
0
    def test_break_at_scalar_point_boundary(self):
        mock_client = _create_mock_client()
        point_count = 2000  # comfortably saturates a single 1024-byte request
        events = []
        for step in range(point_count):
            summary = scalar_v2.scalar_pb("loss", -2.0 * step)
            if step > 0:
                summary.value[0].ClearField("metadata")
            events.append(event_pb2.Event(summary=summary, step=step))
        run_to_events = {"train": events}

        builder = uploader_lib._BatchedRequestSender(
            "123", mock_client, util.RateLimiter(0)
        )
        builder.send_requests(run_to_events)
        requests = [c[0][0] for c in mock_client.WriteScalar.call_args_list]
        for request in requests:
            _clear_wall_times(request)

        self.assertGreater(len(requests), 1)
        self.assertLess(len(requests), point_count)

        total_points_in_result = 0
        for request in requests:
            self.assertLen(request.runs, 1)
            run = request.runs[0]
            self.assertEqual(run.name, "train")
            self.assertLen(run.tags, 1)
            tag = run.tags[0]
            self.assertEqual(tag.name, "loss")
            for point in tag.points:
                self.assertEqual(point.step, total_points_in_result)
                self.assertEqual(point.value, -2.0 * point.step)
                total_points_in_result += 1
            self.assertLessEqual(
                request.ByteSize(), uploader_lib._MAX_REQUEST_LENGTH_BYTES
            )
        self.assertEqual(total_points_in_result, point_count)