def test_scope(): assert metrics.get_training_metrics() == set() with metrics.scope(["flip_ratio"]): assert metrics.get_training_metrics() == {"flip_ratio"} assert metrics.get_training_metrics() == set() with pytest.raises(ValueError, match=r".*unknown_metric.*"): with metrics.scope(["flip_ratio", "unknown_metric"]): pass
def __init__( self, *args, input_quantizer=None, depthwise_quantizer=None, pointwise_quantizer=None, metrics=None, **kwargs, ): self.input_quantizer = quantizers.get(input_quantizer) self.depthwise_quantizer = quantizers.get(depthwise_quantizer) self.pointwise_quantizer = quantizers.get(pointwise_quantizer) self._custom_metrics = ( metrics if metrics is not None else lq_metrics.get_training_metrics() ) super().__init__(*args, **kwargs) if depthwise_quantizer and not self.depthwise_constraint: log.warning( "Using `depthwise_quantizer` without setting `depthwise_constraint` " "may result in starved weights (where the gradient is always zero)." ) if pointwise_quantizer and not self.pointwise_constraint: log.warning( "Using `pointwise_quantizer` without setting `pointwise_constraint` " "may result in starved weights (where the gradient is always zero)." )
def __init__( self, *args, input_quantizer=None, kernel_quantizer=None, metrics=None, **kwargs ): self.input_quantizer = quantizers.get(input_quantizer) self.kernel_quantizer = quantizers.get(kernel_quantizer) self._custom_metrics = ( metrics if metrics is not None else lq_metrics.get_training_metrics() ) super().__init__(*args, **kwargs) if kernel_quantizer and not self.kernel_constraint: log.warning( "Using a weight quantizer without setting `kernel_constraint` " "may result in starved weights (where the gradient is always zero)." )