def __init__(self, options): """Initializes the parameters for a LSTM layer of a recurrent neural network. :type options: dict :param options: a dictionary of training options """ self.options = options # Initialize the parameters. self.init_params = OrderedDict() nin = self.options['dim_word'] dim = self.options['dim'] W = numpy.concatenate([normalized_weight(nin, dim), normalized_weight(nin, dim), normalized_weight(nin, dim)], axis=1) self.init_params['encoder_W'] = W n_gates = 3 self.init_params['encoder_b'] = numpy.zeros((n_gates * dim,)).astype('float32') U = numpy.concatenate([orthogonal_weight(dim), orthogonal_weight(dim), orthogonal_weight(dim)], axis=1) self.init_params['encoder_U'] = U Wx = normalized_weight(nin, dim) self.init_params['encoder_Wx'] = Wx Ux = orthogonal_weight(dim) self.init_params['encoder_Ux'] = Ux self.init_params['encoder_bx'] = numpy.zeros((dim,)).astype('float32')
def __init__(self, options): """Initializes the parameters for a feed-forward layer of a neural network. :type options: dict :param options: a dictionary of training options """ # Create the parameters. self.init_params = OrderedDict() nin = options['dim'] nout = options['dim_word'] self.init_params['ff_logit_lstm_W'] = normalized_weight(nin, nout, scale=0.01, ortho=False) self.init_params['ff_logit_lstm_b'] = numpy.zeros((nout,)).astype('float32') nin = options['dim_word'] nout = options['dim_word'] self.init_params['ff_logit_prev_W'] = normalized_weight(nin, nout, scale=0.01, ortho=False) self.init_params['ff_logit_prev_b'] = numpy.zeros((nout,)).astype('float32') nin = options['dim_word'] nout = options['n_words'] self.init_params['ff_logit_W'] = normalized_weight(nin, nout, scale=0.01, ortho=True) self.init_params['ff_logit_b'] = numpy.zeros((nout,)).astype('float32')
def __init__(self, options): """Initializes the parameters for a LSTM layer of a recurrent neural network. :type options: dict :param options: a dictionary of training options """ self.options = options # Initialize the parameters. self.init_params = OrderedDict() nin = self.options['dim_word'] dim = self.options['dim'] W = numpy.concatenate([ normalized_weight(nin, dim), normalized_weight(nin, dim), normalized_weight(nin, dim) ], axis=1) self.init_params['encoder_W'] = W n_gates = 3 self.init_params['encoder_b'] = numpy.zeros( (n_gates * dim, )).astype('float32') U = numpy.concatenate([ orthogonal_weight(dim), orthogonal_weight(dim), orthogonal_weight(dim) ], axis=1) self.init_params['encoder_U'] = U Wx = normalized_weight(nin, dim) self.init_params['encoder_Wx'] = Wx Ux = orthogonal_weight(dim) self.init_params['encoder_Ux'] = Ux self.init_params['encoder_bx'] = numpy.zeros((dim, )).astype('float32')
def __init__(self, options): """Initializes the parameters for the first layer of a neural network language model, which creates the word embeddings. :type options: dict :param options: a dictionary of training options """ # Initialize the parameters. self.init_params = OrderedDict() nin = options['n_words'] nout = options['dim_word'] self.init_params['Wemb'] = normalized_weight(nin, nout)
def __init__(self, options): """Initializes the parameters for a feed-forward layer of a neural network. :type options: dict :param options: a dictionary of training options """ # Create the parameters. self.init_params = OrderedDict() nin = options['dim'] nout = options['dim_word'] self.init_params['ff_logit_lstm_W'] = normalized_weight(nin, nout, scale=0.01, ortho=False) self.init_params['ff_logit_lstm_b'] = numpy.zeros( (nout, )).astype('float32') nin = options['dim_word'] nout = options['dim_word'] self.init_params['ff_logit_prev_W'] = normalized_weight(nin, nout, scale=0.01, ortho=False) self.init_params['ff_logit_prev_b'] = numpy.zeros( (nout, )).astype('float32') nin = options['dim_word'] nout = options['n_words'] self.init_params['ff_logit_W'] = normalized_weight(nin, nout, scale=0.01, ortho=True) self.init_params['ff_logit_b'] = numpy.zeros( (nout, )).astype('float32')