def test_FLOPsEstimator(): x = nn.Variable((1, 3, 12, 12)) y = PF.depthwise_convolution(x, kernel=(5, 5), with_bias=True) t = PF.fused_batch_normalization(y) z = F.relu6(F.sigmoid(PF.affine(t, (3, 3), base_axis=2) + 3)) z = F.global_average_pooling(z) est = FLOPsEstimator() assert est.predict(z) == 17644
def ref_activation(x, nonlinearity, nonlinearity_args): if nonlinearity == 'identity' or not nonlinearity: return x elif nonlinearity == 'relu': return F.relu(x) elif nonlinearity == 'sigmoid': return F.sigmoid(x) elif nonlinearity == 'tanh': return F.tanh(x) elif nonlinearity == 'leaky_relu': return F.leaky_relu(x, nonlinearity_args[0]) elif nonlinearity == 'elu': return F.elu(x, nonlinearity_args[0]) elif nonlinearity == 'relu6': return F.relu6(x) raise ValueError("unknown nonlinearity type {}".format(nonlinearity))
def call(self, input): return F.relu6(input)
def hswish(self, x): return x * F.relu6(x + 3.0) / 6.0