def gluon_batchnorm(x, gamma, beta, moving_mean, moving_var, momentum=0.9, axis=1, epsilon=1e-5, fix_gamma=False): """ Apply native MXNet/Gluon batch normalization on x with given moving_mean, moving_var, beta and gamma. Parameters ---------- x : keras.backend tensor/variable/symbol Input tensor/variable/symbol. gamma : keras.backend tensor/variable/symbol Tensor by which to scale the input. beta : keras.backend tensor/variable/symbol Tensor by which to center the input. moving_mean : keras.backend tensor/variable/symbol Moving mean. moving_var : keras.backend tensor/variable/symbol Moving variance. momentum : float, default 0.9 Momentum for the moving average. axis : int, default 1 Axis along which BatchNorm is applied. Axis usually represent axis of 'channels'. MXNet follows 'channels_first'. epsilon : float, default 1e-5 Small float added to variance to avoid dividing by zero. fix_gamma : bool, default False Fix gamma while training. Returns ------- keras.backend tensor/variable/symbol Resulted tensor/variable/symbol. """ if isinstance(x, KerasSymbol): x = x.symbol if isinstance(moving_mean, KerasSymbol): moving_mean = moving_mean.symbol if isinstance(moving_var, KerasSymbol): moving_var = moving_var.symbol if isinstance(beta, KerasSymbol): beta = beta.symbol if isinstance(gamma, KerasSymbol): gamma = gamma.symbol return KerasSymbol( mx.sym.BatchNorm(data=x, gamma=gamma, beta=beta, moving_mean=moving_mean, moving_var=moving_var, momentum=momentum, axis=axis, eps=epsilon, fix_gamma=fix_gamma))
def gluon_lrn(x, alpha, beta, k, n): if isinstance(x, KerasSymbol): x = x.symbol if isinstance(alpha, KerasSymbol): alpha = alpha.symbol if isinstance(beta, KerasSymbol): beta = beta.symbol if isinstance(k, KerasSymbol): k = k.symbol if isinstance(n, KerasSymbol): n = n.symbol return KerasSymbol( mx.sym.LRN(data=x, alpha=alpha, beta=beta, knorm=k, nsize=n))