def test_system_counters_set_labels_and_step_name(self): mock_client, mock_job_result = self.setup_mock_client_result( self.SYSTEM_COUNTERS_LIST) test_object = dataflow_metrics.DataflowMetrics(mock_client, mock_job_result) all_metrics = test_object.all_metrics() matchers = [ MetricResultMatcher( name='ElementCount', labels={ 'original_name' : 'ToIsmRecordForMultimap-out0-ElementCount', 'output_user_name' : 'ToIsmRecordForMultimap-out0' }, attempted=42, committed=42 ), MetricResultMatcher( name='MeanByteCount', labels={ 'original_name' : 'Read-out0-MeanByteCount', 'output_user_name' : 'GroupByKey/Read-out0' }, attempted=31, committed=31 ), MetricResultMatcher( name='ExecutionTime_ProcessElement', step='write/Write/Write', attempted=1000, committed=1000 ) ] errors = metric_result_matchers.verify_all(all_metrics, matchers) self.assertFalse(errors, errors)
def test_system_counters_set_labels_and_step_name(self): mock_client, mock_job_result = self.setup_mock_client_result( self.SYSTEM_COUNTERS_LIST) test_object = dataflow_metrics.DataflowMetrics(mock_client, mock_job_result) all_metrics = test_object.all_metrics() matchers = [ MetricResultMatcher(name='ElementCount', labels={ 'original_name': 'ToIsmRecordForMultimap-out0-ElementCount', 'output_user_name': 'ToIsmRecordForMultimap-out0' }, attempted=42, committed=42), MetricResultMatcher(name='MeanByteCount', labels={ 'original_name': 'Read-out0-MeanByteCount', 'output_user_name': 'GroupByKey/Read-out0' }, attempted=31, committed=31), MetricResultMatcher(name='ExecutionTime_ProcessElement', step='write/Write/Write', attempted=1000, committed=1000) ] errors = metric_result_matchers.verify_all(all_metrics, matchers) self.assertFalse(errors, errors)
def test_streaming_pipeline_returns_expected_user_metrics_fnapi_it(self): """ Runs streaming Dataflow job and verifies that user metrics are reported correctly. """ self._inject_words(self.input_topic, MESSAGES_TO_PUBLISH) result = self.run_pipeline() METRIC_NAMESPACE = \ ('apache_beam.runners.dataflow.' 'dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn') matchers = [ # System metrics MetricResultMatcher( name='ElementCount', labels={ "output_user_name": "generate_metrics-out0", "original_name": "generate_metrics-out0-ElementCount" }, attempted=len(MESSAGES_TO_PUBLISH), committed=len(MESSAGES_TO_PUBLISH), ), MetricResultMatcher( name='ElementCount', labels={ "output_user_name": "ReadFromPubSub/Read-out0", "original_name": "ReadFromPubSub/Read-out0-ElementCount" }, attempted=len(MESSAGES_TO_PUBLISH), committed=len(MESSAGES_TO_PUBLISH), ), # User Counter Metrics. MetricResultMatcher(name='double_msg_counter_name', namespace=METRIC_NAMESPACE, step='generate_metrics', attempted=len(MESSAGES_TO_PUBLISH) * 2, committed=len(MESSAGES_TO_PUBLISH) * 2), MetricResultMatcher( name='msg_len_dist_metric_name', namespace=METRIC_NAMESPACE, step='generate_metrics', attempted=DistributionMatcher( sum_value=len(''.join(MESSAGES_TO_PUBLISH)), count_value=len(MESSAGES_TO_PUBLISH), min_value=len(MESSAGES_TO_PUBLISH[0]), max_value=len(MESSAGES_TO_PUBLISH[1])), committed=DistributionMatcher( sum_value=len(''.join(MESSAGES_TO_PUBLISH)), count_value=len(MESSAGES_TO_PUBLISH), min_value=len(MESSAGES_TO_PUBLISH[0]), max_value=len(MESSAGES_TO_PUBLISH[1]))), ] metrics = result.metrics().all_metrics() errors = metric_result_matchers.verify_all(metrics, matchers) self.assertFalse(errors, str(errors))
def test_metrics_fnapi_it(self): result = self.run_pipeline(experiment='beam_fn_api') errors = metric_result_matchers.verify_all( result.metrics().all_metrics(), dataflow_exercise_metrics_pipeline.fn_api_metric_matchers()) self.assertFalse(errors, str(errors))
def test_metrics_it(self): result = self.run_pipeline() errors = metric_result_matchers.verify_all( result.metrics().all_metrics(), dataflow_exercise_metrics_pipeline.legacy_metric_matchers()) self.assertFalse(errors, str(errors))
def test_metrics_fnapi_it(self): result = self.run_pipeline(experiment='beam_fn_api') errors = metric_result_matchers.verify_all( result.metrics().all_metrics(), dataflow_exercise_metrics_pipeline.fn_api_metric_matchers()) self.assertFalse(errors, str(errors))
def test_metrics_it(self): result = self.run_pipeline() errors = metric_result_matchers.verify_all( result.metrics().all_metrics(), dataflow_exercise_metrics_pipeline.legacy_metric_matchers()) self.assertFalse(errors, str(errors))