def __init__(self, input_dim, output_dim, weight_bias_init, act_str, keep_prob=0.8, scale_coef=1.0, use_activation=True, convert_dtype=True, drop_out=False): super(DenseLayer, self).__init__() weight_init, bias_init = weight_bias_init self.weight = init_method(weight_init, [input_dim, output_dim], name="weight") self.bias = init_method(bias_init, [output_dim], name="bias") self.act_func = self._init_activation(act_str) self.matmul = P.MatMul(transpose_b=False) self.bias_add = P.BiasAdd() self.cast = P.Cast() self.dropout = Dropout(keep_prob=keep_prob) self.mul = P.Mul() self.realDiv = P.RealDiv() self.scale_coef = scale_coef self.use_activation = use_activation self.convert_dtype = convert_dtype self.drop_out = drop_out
def __init__(self, keep_prob, seed0, seed1, strategy=None): super(Net, self).__init__() self.drop = Dropout(keep_prob, seed0, seed1, dtype=ms.float32, strategy=strategy)
def __init__(self, in_channels, out_channels, dropout): layers = [ Norm(in_channels), Act(), Conv2d(in_channels, out_channels, kernel_size=3), Norm(out_channels), Act(), Conv2d(out_channels, out_channels, kernel_size=3), ] if dropout: layers.insert(5, Dropout(dropout)) super().__init__(layers)
def __init__(self, in_channels, out_channels, stride, dropout): super().__init__() self.norm1 = Norm(in_channels) self.act1 = Act() self.conv1 = Conv2d(in_channels, out_channels, kernel_size=3, stride=stride) self.norm2 = Norm(out_channels) self.act2 = Act() self.dropout = Dropout(dropout) if dropout else Identity() self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3) self.shortcut = Conv2d(in_channels, out_channels, kernel_size=1, stride=stride)