Beispiel #1
0
def getScalingDenseLayer(input_location, input_scale):
    recip_input_scale = np.reciprocal(input_scale)
    
    waux = np.diag(recip_input_scale)
    baux = -input_location*recip_input_scale
    
    dL = Dense(input_location.shape[0], activation = None, input_shape = input_location.shape)
    dL.build(input_shape = input_location.shape)
    dL.set_weights([waux, baux])
    dL.trainable = False
    return dL
Beispiel #2
0
def inputsSelection(inputs_shape, ndex):
    if not hasattr(ndex,'index'):
        ndex = list(ndex)
    input_mask = np.zeros([inputs_shape[-1], len(ndex)])
    for i in range(inputs_shape[-1]):
        for v in ndex:
            if i == v:
                input_mask[i,ndex.index(v)] = 1
        
    dL = Dense(len(ndex), activation = None, input_shape = inputs_shape, 
               use_bias = False)
    dL.build(input_shape = inputs_shape)
    dL.set_weights([input_mask])
    dL.trainable = False
    return dL
Beispiel #3
0
def get_sparse_mlp(ws, bs, ls, reference):
    """
    :param ws: Weights of the MLP (as a list)
    :param bs: Biases of the MLP (as a list)
    :param ls: link functions
    :param reference: baselines (aka reference points)
    :return: (keras) sparse version of the model
    """
    # New MLP number of neurons
    times = np.ones(len(ls)).astype(np.int)
    for j in range(len(ls) - 2, -1, -1):
        times[j] = np.size(ws[j], 1) * times[j + 1]

    # build sparse model
    sparse_model = Sequential()
    for j in range(len(ls)):
        # Need to store dimensions in some scalar
        n_neurons_realnet = np.size(ws[j], 0)
        # Initialize Weight vector to 0s
        this_w = np.zeros((n_neurons_realnet * times[j], times[j]))
        # and biases
        this_b = np.zeros(times[j])
        # Fill the biases and the weights in the correct place
        col = 0
        for i in range(np.size(this_w, 1)):
            if col == np.size(ws[j], 1): col = 0
            this_w[i * n_neurons_realnet:(i + 1) * n_neurons_realnet,
                   i] = ws[j][:, col]
            this_b[i] = np.asarray(bs[j])[col]
            col += 1
        # Add layer to the network
        this_dense = Dense(units=np.size(this_w, 1),
                           activation=ls[j],
                           input_shape=(np.size(this_w, 0), ))
        sparse_model.add(this_dense)
        this_dense.set_weights([this_w, this_b])
    # compile the new (sparse) model
    opt = keras.optimizers.Adam()
    sparse_model.compile(loss=keras.losses.binary_crossentropy,
                         optimizer=opt,
                         metrics=['accuracy'])

    # Print reference
    at_reference = sparse_model.predict(np.array(reference).reshape(1, -1))
    print("Prediction at the reference point is: ", at_reference)
    return sparse_model
Beispiel #4
0
#decoder
de_input = Input(shape=(1, fr_se_vocab))
de_state_in = Input(shape=(hsize, ))  #---> = result from encoder.predict

#decoder's interim layers
de_gru = GRU(hsize, return_state=True)
de_out, de_state_out = de_gru(de_input, initial_state=de_state_in)
de_dense = Dense(fr_se_vocab, activation='softmax')
de_pred = de_dense(de_out)
decoder = Model(inputs=[de_input, de_state_in],
                outputs=[de_pred, de_state_out])

#set weights from teacher-forcing model
en_gru.set_weights(model_weights.en_gru_weight())
de_gru.set_weights(model_weights.de_gru_weight())
de_dense.set_weights(model_weights.de_dense_weight())

###################################################


#function to translate english to french
def translate_to_french(english_sentence):
    #convert english sentence to one hot
    en_sent_oh = sent2oh(english_sentence, language='en', reverse=True)
    #convert word 'sos' to oh
    de_w_seq = word2oh('sos', language='fr', se=True)
    de_w_seq = de_w_seq.reshape(1, 1, fr_se_vocab)
    #get the en_state from result in encoder
    de_state = encoder.predict(en_sent_oh)

    fr_sent = ''