def test_eq(self): nd = np.array x1 = provider.TensorDatum(step=12, wall_time=0.25, numpy=nd([1.0, 2.0])) x2 = provider.TensorDatum(step=12, wall_time=0.25, numpy=nd([1.0, 2.0])) x3 = provider.TensorDatum( step=23, wall_time=3.25, numpy=nd([-0.5, -2.5]) ) self.assertEqual(x1, x2) self.assertNotEqual(x1, x3) self.assertNotEqual(x1, object())
def test_eq_with_rank0_tensor(self): x1 = provider.TensorDatum(step=12, wall_time=0.25, numpy=np.array([1.25])) x2 = provider.TensorDatum(step=12, wall_time=0.25, numpy=np.array([1.25])) x3 = provider.TensorDatum(step=23, wall_time=3.25, numpy=np.array([1.25])) self.assertEqual(x1, x2) self.assertNotEqual(x1, x3) self.assertNotEqual(x1, object())
def read_tensors( self, ctx, *, experiment_id, plugin_name, downsample=None, run_tag_filter=None, ): with timing.log_latency("build request"): req = data_provider_pb2.ReadTensorsRequest() req.experiment_id = experiment_id req.plugin_filter.plugin_name = plugin_name _populate_rtf(run_tag_filter, req.run_tag_filter) req.downsample.num_points = downsample with timing.log_latency("_stub.ReadTensors"): with _translate_grpc_error(): res = self._stub.ReadTensors(req) with timing.log_latency("build result"): result = {} for run_entry in res.runs: tags = {} result[run_entry.run_name] = tags for tag_entry in run_entry.tags: series = [] tags[tag_entry.tag_name] = series d = tag_entry.data for (step, wt, value) in zip(d.step, d.wall_time, d.value): point = provider.TensorDatum( step=step, wall_time=wt, numpy=tensor_util.make_ndarray(value), ) series.append(point) return result
def _convert_tensor_event(event): """Helper for `read_tensors`.""" return provider.TensorDatum( step=event.step, wall_time=event.wall_time, numpy=tensor_util.make_ndarray(event.tensor_proto), )
def test_read_tensors(self): res = data_provider_pb2.ReadTensorsResponse() run = res.runs.add(run_name="test") tag = run.tags.add(tag_name="weights") tag.data.step.extend([0, 1, 2]) tag.data.wall_time.extend([1234.0, 1235.0, 1236.0]) tag.data.value.append(tensor_util.make_tensor_proto([0.0, 0.0, 42.0])) tag.data.value.append(tensor_util.make_tensor_proto([1.0, 1.0, 43.0])) tag.data.value.append(tensor_util.make_tensor_proto([2.0, 2.0, 44.0])) self.stub.ReadTensors.return_value = res actual = self.provider.read_tensors( self.ctx, experiment_id="123", plugin_name="histograms", run_tag_filter=provider.RunTagFilter(runs=["test", "nope"]), downsample=3, ) expected = { "test": { "weights": [ provider.TensorDatum( step=0, wall_time=1234.0, numpy=np.array([0.0, 0.0, 42.0]), ), provider.TensorDatum( step=1, wall_time=1235.0, numpy=np.array([1.0, 1.0, 43.0]), ), provider.TensorDatum( step=2, wall_time=1236.0, numpy=np.array([2.0, 2.0, 44.0]), ), ], }, } self.assertEqual(actual, expected) req = data_provider_pb2.ReadTensorsRequest() req.experiment_id = "123" req.plugin_filter.plugin_name = "histograms" req.run_tag_filter.runs.names.extend(["nope", "test"]) # sorted req.downsample.num_points = 3 self.stub.ReadTensors.assert_called_once_with(req)
def test_repr(self): x = provider.TensorDatum(step=123, wall_time=234.5, numpy=np.array(-0.25)) repr_ = repr(x) self.assertIn(repr(x.step), repr_) self.assertIn(repr(x.wall_time), repr_) self.assertIn(repr(x.numpy), repr_)
def test_hash(self): x = provider.TensorDatum(step=12, wall_time=0.25, numpy=np.array([1.25])) with six.assertRaisesRegex(self, TypeError, "unhashable type"): hash(x)
def convert_tensor_event(event): return provider.TensorDatum( step=event.step, wall_time=event.wall_time, numpy=tensor_util.make_ndarray(event.tensor_proto), )