Ejemplo n.º 1
0
    def __init__(
        self,
        *args,
        input_quantizer=None,
        depthwise_quantizer=None,
        pointwise_quantizer=None,
        **kwargs,
    ):
        # This is currently undocumented until we have explored better options
        self._custom_metrics = kwargs.pop("metrics", ["flip_ratio"])

        self.input_quantizer = quantizers.get(input_quantizer)
        self.depthwise_quantizer = quantizers.get(depthwise_quantizer)
        self.pointwise_quantizer = quantizers.get(pointwise_quantizer)
        self.quantized_latent_weights = []
        self.quantizers = []

        super().__init__(*args, **kwargs)
        if depthwise_quantizer and not self.depthwise_constraint:
            log.warning(
                "Using `depthwise_quantizer` without setting `depthwise_constraint` "
                "may result in starved weights (where the gradient is always zero)."
            )
        if pointwise_quantizer and not self.pointwise_constraint:
            log.warning(
                "Using `pointwise_quantizer` without setting `pointwise_constraint` "
                "may result in starved weights (where the gradient is always zero)."
            )
Ejemplo n.º 2
0
    def __init__(
        self,
        *args,
        input_quantizer=None,
        depthwise_quantizer=None,
        pointwise_quantizer=None,
        metrics=None,
        **kwargs,
    ):
        self.input_quantizer = quantizers.get(input_quantizer)
        self.depthwise_quantizer = quantizers.get(depthwise_quantizer)
        self.pointwise_quantizer = quantizers.get(pointwise_quantizer)
        self._custom_metrics = (
            metrics if metrics is not None else lq_metrics.get_training_metrics()
        )

        super().__init__(*args, **kwargs)
        if depthwise_quantizer and not self.depthwise_constraint:
            log.warning(
                "Using `depthwise_quantizer` without setting `depthwise_constraint` "
                "may result in starved weights (where the gradient is always zero)."
            )
        if pointwise_quantizer and not self.pointwise_constraint:
            log.warning(
                "Using `pointwise_quantizer` without setting `pointwise_constraint` "
                "may result in starved weights (where the gradient is always zero)."
            )
Ejemplo n.º 3
0
    def __init__(
        self, *args, input_quantizer=None, kernel_quantizer=None, metrics=None, **kwargs
    ):
        self.input_quantizer = quantizers.get(input_quantizer)
        self.kernel_quantizer = quantizers.get(kernel_quantizer)
        self._custom_metrics = (
            metrics if metrics is not None else lq_metrics.get_training_metrics()
        )

        super().__init__(*args, **kwargs)
        if kernel_quantizer and not self.kernel_constraint:
            log.warning(
                "Using a weight quantizer without setting `kernel_constraint` "
                "may result in starved weights (where the gradient is always zero)."
            )
Ejemplo n.º 4
0
 def __init__(self, *args, input_quantizer=None, **kwargs):
     self.input_quantizer = quantizers.get(input_quantizer)
     super().__init__(*args, **kwargs)