Exemplo n.º 1
0
    def __init__(self, hyperParams=None):

        self.params = hyperParams

        if(self.checkSavedModel()):
            return

        p = self.params["model"]
        model = Sequential()
        
        embed_matrix = self.params["embedding"].getWord2VecMatrix() 

        emb = Embedding(
                embed_matrix.shape[0], 
                embed_matrix.shape[1], 
                # weights=[embed_matrix], 
                mask_zero=True,
                learn=(int(self.params["learn_embedding"]) == 1)
        )
        emb.W.set_value(floatX(embed_matrix))

        model.add(emb)

        print "Initialized Embeddings"
        srnn = SimpleRNN(
                input_dim=embed_matrix.shape[1],
                output_dim=int(p.get("hidden_nodes", 100)),
                activation='tanh', 
                init='uniform', 
                inner_init='uniform', 
                # inner_activation='hard_sigmoid',
                return_sequences=False,
                truncate_gradient=int(p.get("depth", 3)),
        )

        model.add(srnn)
        print "Initialized Recurrent Layer"

        denseL = Dense(
                input_dim=int(p.get("hidden_nodes", 100)),
                output_dim=int(p.get("output_nodes", 4)),
                activation='softmax', 
                init='uniform', 
        )

        model.add(denseL)
        print "Initialized Dense Layer"

        self.model = model
        self.compile()
        self.model.layers[0].params = [self.model.layers[0].W]
        self.saveModel()
Exemplo n.º 2
0
    def setEmbeddingWeights(self, embed_matrix):

        # emb = Embedding(
        #         embed_matrix.shape[0], 
        #         embed_matrix.shape[1], 
        #         weights=[embed_matrix], 
        #         mask_zero=True,
        #         # learn=(self.params["learn_embedding"] == 1)
        # )
        #
        # self.model.layers[0] = emb

        self.model.layers[0].W.set_value(floatX(embed_matrix))
        # self.model.layers[0].set_weights([embed_matrix])
        print "Changed embed layer weights!"