def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.n_input_plane = n_input_plane self.n_output_plane = n_output_plane self.k_w = k_w self.k_h = k_h self.d_w = d_w self.d_h = d_h self.pad_w = pad_w self.pad_h = pad_h self.with_bias = with_bias w_shape = (n_output_plane, n_input_plane, k_h, k_w) w_fan = (n_input_plane * k_w * k_h, n_output_plane * k_w * k_h) self.weight, self.grad_weight = create_param_and_grad( w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad( n_output_plane, initB, name='bconv_{}'.format(n_output_plane))
def __init__(self, nin, nout, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.nin = nin self.nout = nout self.with_bias = with_bias self.weight, self.grad_weight = create_param_and_grad((nin, nout), initW, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad(nout, initB, name='blin_{}'.format(nout))
def __init__(self, nin, nout, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.nin = nin self.nout = nout self.with_bias = with_bias self.weight, self.grad_weight = create_param_and_grad( (nin, nout), initW, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad( nout, initB, name='blin_{}'.format(nout))
def __init__(self, n_features, eps=None): Module.__init__(self) self.weight, self.grad_weight = create_param_and_grad(n_features, const(1), name='W_BN_{}'.format(n_features)) self.bias, self.grad_bias = create_param_and_grad(n_features, const(0), name='b_BN_{}'.format(n_features)) self.inference_weight = create_param(n_features, const(1), name='W_BN_{}_inf'.format(n_features)) self.inference_bias = create_param(n_features, const(0), name='b_BN_{}_inf'.format(n_features)) # These are buffers for collecting the minibatch statistics. self.buffer_variance = create_param(n_features, const(1), name='BN_var_{}'.format(n_features)) self.buffer_mean = create_param(n_features, const(0), name='BN_mean_{}'.format(n_features)) self.buffer_counts = _th.shared(_np.asarray(0, dtype=_th.config.floatX), name='BN_count_{}'.format(n_features)) self.eps = eps or 1e-5 self.batch_mean = None self.batch_var = None
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, initW=xavier(), initB=const(0)): Module.__init__(self) self.n_input_plane = n_input_plane self.n_output_plane = n_output_plane self.k_w = k_w self.k_h = k_h self.d_w = d_w self.d_h = d_h self.pad_w = pad_w self.pad_h = pad_h self.with_bias = with_bias w_shape = (n_output_plane, n_input_plane, k_h, k_w) w_fan = (n_input_plane*k_w*k_h, n_output_plane*k_w*k_h) self.weight, self.grad_weight = create_param_and_grad(w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h)) if self.with_bias: self.bias, self.grad_bias = create_param_and_grad(n_output_plane, initB, name='bconv_{}'.format(n_output_plane))