def test_string_gauge(self): gauge = monitoring.StringGauge('test/gauge', 'test gauge') gauge.get_cell().set('left') self.assertEqual(gauge.get_cell().value(), 'left') gauge.get_cell().set('right') self.assertEqual(gauge.get_cell().value(), 'right') gauge1 = monitoring.StringGauge('test/gauge1', 'test gauge1', 'label1') gauge1.get_cell('foo').set('start') self.assertEqual(gauge1.get_cell('foo').value(), 'start')
class TFLiteMetrics(metrics_interface.TFLiteMetricsInterface): """TFLite metrics helper for prod (borg) environment. Attributes: model_hash: A string containing the hash of the model binary. model_path: A string containing the path of the model for debugging purposes. """ _counter_debugger_creation = monitoring.Counter( '/tensorflow/lite/quantization_debugger/created', 'Counter for the number of debugger created.') _counter_interpreter_creation = monitoring.Counter( '/tensorflow/lite/interpreter/created', 'Counter for number of interpreter created in Python.', 'language') # The following are conversion metrics. Attempt and success are kept separated # instead of using a single metric with a label because the converter may # raise exceptions if conversion failed. That may lead to cases when we are # unable to capture the conversion attempt. Increasing attempt count at the # beginning of conversion process and the success count at the end is more # suitable in these cases. _counter_conversion_attempt = monitoring.Counter( '/tensorflow/lite/convert/attempt', 'Counter for number of conversion attempts.') _counter_conversion_success = monitoring.Counter( '/tensorflow/lite/convert/success', 'Counter for number of successful conversions.') _gauge_conversion_params = monitoring.StringGauge( '/tensorflow/lite/convert/params', 'Gauge for keeping conversion parameters.', 'name') def __init__(self, model_hash: Optional[Text] = None, model_path: Optional[Text] = None) -> None: del self # Temporarily removing self until parameter logic is implemented. if model_hash and not model_path or not model_hash and model_path: raise ValueError('Both model metadata(model_hash, model_path) should be ' 'given at the same time.') if model_hash: # TODO(b/180400857): Create stub once the service is implemented. pass def increase_counter_debugger_creation(self): self._counter_debugger_creation.get_cell().increase_by(1) def increase_counter_interpreter_creation(self): self._counter_interpreter_creation.get_cell('python').increase_by(1) def increase_counter_converter_attempt(self): self._counter_conversion_attempt.get_cell().increase_by(1) def increase_counter_converter_success(self): self._counter_conversion_success.get_cell().increase_by(1) def set_converter_param(self, name, value): self._gauge_conversion_params.get_cell(name).set(value)
from tensorflow.python.eager import monitoring from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.keras import backend as K from tensorflow.python.keras.engine import training_generator from tensorflow.python.keras.engine.base_layer import Layer from tensorflow.python.keras.utils import tf_utils from tensorflow.python.ops import sparse_ops from tensorflow.python.ops.ragged import ragged_tensor from tensorflow.python.util.tf_export import keras_export _kpl_gauge = monitoring.StringGauge( '/tensorflow/api/keras/layers/preprocessing', 'keras preprocessing layers usage', 'TFVersion') @keras_export('keras.layers.experimental.preprocessing.PreprocessingLayer') @six.add_metaclass(abc.ABCMeta) class PreprocessingLayer(Layer): """Base class for PreprocessingLayers.""" _must_restore_from_config = True def adapt(self, data, reset_state=True): # TODO(momernick): Add examples. """Fits the state of the preprocessing layer to the data being passed. Arguments: data: The data to train on. It can be passed either as a tf.data
# The following are conversion metrics. Attempt and success are kept separated # instead of using a single metric with a label because the converter may # raise exceptions if conversion failed. That may lead to cases when we are # unable to capture the conversion attempt. Increasing attempt count at the # beginning of conversion process and the success count at the end is more # suitable in these cases. _counter_conversion_attempt = monitoring.Counter( '/tensorflow/lite/convert/attempt', 'Counter for number of conversion attempts.') _counter_conversion_success = monitoring.Counter( '/tensorflow/lite/convert/success', 'Counter for number of successful conversions.') _gauge_conversion_params = monitoring.StringGauge( '/tensorflow/lite/convert/params', 'Gauge for keeping conversion parameters.', 'name') _gauge_conversion_errors = monitoring.StringGauge( '/tensorflow/lite/convert/errors', 'Gauge for collecting conversion errors. The value represents the error ' 'message.', 'component', 'subcomponent', 'op_name', 'error_code') _gauge_conversion_latency = monitoring.IntGauge( '/tensorflow/lite/convert/latency', 'Conversion latency in ms.') class TFLiteMetrics(metrics_interface.TFLiteMetricsInterface): """TFLite metrics helper for prod (borg) environment. Attributes:
# The following are conversion metrics. Attempt and success are kept separated # instead of using a single metric with a label because the converter may # raise exceptions if conversion failed. That may lead to cases when we are # unable to capture the conversion attempt. Increasing attempt count at the # beginning of conversion process and the success count at the end is more # suitable in these cases. _counter_conversion_attempt = monitoring.Counter( '/tensorflow/lite/convert/attempt', 'Counter for number of conversion attempts.') _counter_conversion_success = monitoring.Counter( '/tensorflow/lite/convert/success', 'Counter for number of successful conversions.') _gauge_conversion_params = monitoring.StringGauge( '/tensorflow/lite/convert/params', 'Gauge for keeping conversion parameters.', 'name') class TFLiteMetrics(metrics_interface.TFLiteMetricsInterface): """TFLite metrics helper for prod (borg) environment. Attributes: model_hash: A string containing the hash of the model binary. model_path: A string containing the path of the model for debugging purposes. """ def __init__(self, model_hash: Optional[Text] = None, model_path: Optional[Text] = None) -> None: del self # Temporarily removing self until parameter logic is implemented.