def __init__(self, hidden_size, stddev=None): ''' hidden_size: int, ''' self.stddev = stddev self.hidden_size = hidden_size self.W = tf.Variable(Randomer.random_normal( [self.hidden_size, self.hidden_size]), trainable=True) self.U = tf.Variable(Randomer.random_normal( [self.hidden_size, self.hidden_size]), trainable=True) self.b = tf.Variable(tf.zeros([1]), trainable=True)
def __init__(self, w_shape, stddev=None, params=None): ''' :param w_shape: [input_dim, output_dim] :param stddev: 用于初始化 :param params: 从外界制定参数 ''' if params is None: self.w = tf.Variable(Randomer.random_normal(w_shape), trainable=True) else: self.w = params['w']
def __init__(self, edim, class_num, stddev=None, params=None): ''' class_num: class type num. edim: the input embedding dim. params = {'wline': wline, 'bline': bline} ''' self.edim = edim self.class_num = class_num # the linear basic_layer for softmax. if params is None: self.wline_softmax = tf.Variable(Randomer.random_normal( [self.edim, self.class_num]), trainable=True) self.bline_softmax = tf.Variable(tf.zeros([1, 1]), trainable=True) else: self.wline_softmax = params['wline'] self.bline_softmax = params['bline']
def __init__(self, w_shape=None, stddev=None, params=None, active='tanh'): ''' the initialize function. w_shape is the shape of the w param. if params is None, need. staddev is the stddev of the tf.random_normal. if params is None, need. params = {'wline':wline}, is use to assign the params. active is the active function. ''' self.w_shape = w_shape self.stddev = stddev if params is None: self.wline = tf.Variable( Randomer.random_normal(self.w_shape), # tf.random_uniform(self.w_shape, -0.0015, 0.035), trainable=True) else: self.wline = params['wline'] self.active = active