def GetJobMetrics(self, request, context=None): if request.job_id not in self._jobs: raise LookupError("Job {} does not exist".format(request.job_id)) metrics_text = self._jobs[request.job_id].get_metrics() response = beam_job_api_pb2.GetJobMetricsResponse() json_format.Parse(metrics_text, response) return response
def GetJobMetrics(self, request, context=None): if request.job_id not in self._jobs: raise LookupError("Job {} does not exist".format(request.job_id)) result = self._jobs[request.job_id].result monitoring_info_list = [] for mi in result._monitoring_infos_by_stage.values(): monitoring_info_list.extend(mi) # Filter out system metrics user_monitoring_info_list = [ x for x in monitoring_info_list if monitoring_infos.is_user_monitoring_info(x) ] return beam_job_api_pb2.GetJobMetricsResponse( metrics=beam_job_api_pb2.MetricResults( committed=user_monitoring_info_list))
def test_get_job_metrics(self, http_mock): response = { "user-task-accumulators": [{ "name": "__metricscontainers", "type": "MetricsAccumulator", "value": "{\"metrics\": {\"attempted\": [{\"urn\": " "\"metric_urn\", \"type\": \"beam:metrics:sum_int64:v1\", " "\"payload\": \"AA==\", \"labels\": " "{\"PTRANSFORM\": \"ptransform_id\"}}]}}" }] } http_mock.get('http://flink/v1/jobs/flink_job_id/accumulators', json=response) options = pipeline_options.FlinkRunnerOptions() job_server = flink_uber_jar_job_server.FlinkUberJarJobServer( 'http://flink', options) job = flink_uber_jar_job_server.FlinkBeamJob('http://flink', None, 'job_id', 'job_name', None, options) job._flink_job_id = 'flink_job_id' job_server._jobs['job_id'] = job request = beam_job_api_pb2.GetJobMetricsRequest(job_id='job_id') expected = beam_job_api_pb2.GetJobMetricsResponse( metrics=beam_job_api_pb2.MetricResults(attempted=[{ "urn": "metric_urn", "type": "beam:metrics:sum_int64:v1", "payload": b'\000', "labels": { "PTRANSFORM": "ptransform_id" } }])) actual = job_server.GetJobMetrics(request) self.assertEqual(actual, expected)