def stream_experiment_data(request, **kwargs): self.assertEqual(kwargs["metadata"], grpc_util.version_metadata()) for run in ("train_1", "train_2"): for tag in ("dense_1/kernel", "dense_1/bias", "text/test"): response = export_service_pb2.StreamExperimentDataResponse( ) response.run_name = run response.tag_name = tag display_name = "%s:%s" % (request.experiment_id, tag) response.tag_metadata.CopyFrom( test_util.scalar_metadata(display_name)) for step in range(2): response.tensors.steps.append(step) response.tensors.wall_times.add( seconds=1571084520 + step, nanos=862939144 if run == "train_1" else 962939144, ) if tag != "text/test": response.tensors.values.append( tensor_util.make_tensor_proto( np.ones([3, 2]) * step)) else: response.tensors.values.append( tensor_util.make_tensor_proto( np.full([3], "a" * (step + 1)))) yield response
def stream_experiment_data(request, **kwargs): self.assertEqual(request.experiment_id, "789") self.assertEqual(kwargs["metadata"], grpc_util.version_metadata()) for run in ("train", "test"): for tag in ("accuracy", "loss"): response = export_service_pb2.StreamExperimentDataResponse( ) response.run_name = run response.tag_name = tag display_name = "%s:%s" % (request.experiment_id, tag) response.tag_metadata.CopyFrom( test_util.scalar_metadata(display_name)) for step in range(10): response.points.steps.append(step) if tag == "loss": if run == "train": value = 1.0 / (step + 1) seconds = step else: value = -1.0 / (step + 1) seconds = 600 + step else: # "accuracy" if run == "train": value = 1.0 / (10 - step) seconds = step * 2 else: value = -1.0 / (10 - step) seconds = 600 + step * 2 response.points.values.append(value) response.points.wall_times.add(seconds=seconds, nanos=0) yield response
def stream_experiment_data(request, **kwargs): self.assertEqual(request.experiment_id, "789") self.assertEqual(kwargs["metadata"], grpc_util.version_metadata()) response = export_service_pb2.StreamExperimentDataResponse() response.run_name = "train" response.tag_name = "batch_loss" response.points.steps.append(0) response.points.values.append(0.5) response.points.wall_times.add(seconds=0, nanos=0) response.points.steps.append(1) response.points.values.append(0.25) response.points.wall_times.add(seconds=1, nanos=0) yield response response = export_service_pb2.StreamExperimentDataResponse() response.run_name = "train" response.tag_name = "epoch_loss" response.points.steps.append(0) response.points.values.append(0.375) response.points.wall_times.add(seconds=2, nanos=0) yield response
def stream_experiment_data(request, **kwargs): self.assertEqual(kwargs["metadata"], grpc_util.version_metadata()) for run in ("train", "test"): for tag in ("accuracy", "loss"): response = export_service_pb2.StreamExperimentDataResponse() response.run_name = run response.tag_name = tag display_name = "%s:%s" % (request.experiment_id, tag) response.tag_metadata.CopyFrom( test_util.scalar_metadata(display_name)) for step in range(10): response.points.steps.append(step) response.points.values.append(2.0 * step) response.points.wall_times.add( seconds=1571084520 + step, nanos=862939144) yield response
def test_handles_outdir_with_no_slash(self): oldcwd = os.getcwd() try: os.chdir(self.get_temp_dir()) mock_api_client = self._create_mock_api_client() mock_api_client.StreamExperiments.return_value = iter( [_make_experiments_response(["123"])]) mock_api_client.StreamExperimentData.return_value = iter( [export_service_pb2.StreamExperimentDataResponse()]) exporter = exporter_lib.TensorBoardExporter( mock_api_client, "outdir") generator = exporter.export() self.assertEqual(list(generator), ["123"]) self.assertTrue(os.path.isdir("outdir")) finally: os.chdir(oldcwd)
def stream_experiment_data(request, **kwargs): self.assertEqual(kwargs["metadata"], grpc_util.version_metadata()) tag = "__default_graph__" for run in ("train", "test"): response = export_service_pb2.StreamExperimentDataResponse() response.run_name = run response.tag_name = tag display_name = "%s:%s" % (request.experiment_id, tag) response.tag_metadata.CopyFrom( summary_pb2.SummaryMetadata( data_class=summary_pb2.DATA_CLASS_BLOB_SEQUENCE ) ) for step in range(1): response.blob_sequences.steps.append(step) response.blob_sequences.wall_times.add( seconds=1571084520 + step, nanos=862939144 ) blob_sequence = blob_pb2.BlobSequence() if run == "train": # A finished blob sequence. blob = blob_pb2.Blob( blob_id="%s_blob" % run, state=blob_pb2.BlobState.BLOB_STATE_CURRENT, ) blob_sequence.entries.append( blob_pb2.BlobSequenceEntry(blob=blob) ) # An unfinished blob sequence. blob = blob_pb2.Blob( state=blob_pb2.BlobState.BLOB_STATE_UNFINALIZED, ) blob_sequence.entries.append( blob_pb2.BlobSequenceEntry(blob=blob) ) elif run == "test": blob_sequence.entries.append( # `blob` unspecified: a hole in the blob sequence. blob_pb2.BlobSequenceEntry() ) response.blob_sequences.values.append(blob_sequence) yield response