def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.n_input_plane = n_input_plane self.n_output_plane = n_output_plane self.k_w = k_w self.k_h = k_h self.d_w = d_w self.d_h = d_h self.pad_w = pad_w self.pad_h = pad_h self.with_bias = with_bias w_shape = (n_output_plane, n_input_plane, k_h, k_w) w_fan = (n_input_plane * k_w * k_h, n_output_plane * k_w * k_h) self.weight, self.grad_weight = create_param_and_grad( w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad( n_output_plane, initB, name='bconv_{}'.format(n_output_plane))
def __init__(self, nin, nout, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.nin = nin self.nout = nout self.with_bias = with_bias self.weight, self.grad_weight = create_param_and_grad((nin, nout), initW, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad(nout, initB, name='blin_{}'.format(nout))
def __init__(self, nin, nout, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.nin = nin self.nout = nout self.with_bias = with_bias self.weight, self.grad_weight = create_param_and_grad( (nin, nout), initW, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad( nout, initB, name='blin_{}'.format(nout))
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.n_input_plane = n_input_plane self.n_output_plane = n_output_plane self.k_w = k_w self.k_h = k_h self.d_w = d_w self.d_h = d_h self.pad_w = pad_w self.pad_h = pad_h self.with_bias = with_bias w_shape = (n_output_plane, n_input_plane, k_h, k_w) w_fan = (n_input_plane*k_w*k_h, n_output_plane*k_w*k_h) self.weight, self.grad_weight = create_param_and_grad(w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad(n_output_plane, initB, name='bconv_{}'.format(n_output_plane))
def prelu(gain=1): return xavier(gain * _np.sqrt(2))