Exemplo n.º 1
0
    def __init__(self, hyperParams=None):

        self.params = hyperParams

        hyperParams = {
                    'input': 16,
                    'l1_output': 4,
                    'l2_output': 4,
                    'output': 16,
            }

        p = hyperParams

        model = Sequential()

        model.add(Dense(p['input'], 5, init='uniform'))
        model.add(Activation('sigmoid'))
        model.add(Dropout(0.5))
        # model.add(Dense(24, 4, init='uniform'))
        # model.add(Activation('tanh'))
        # model.add(Dropout(0.5))
        model.add(Dense(5, p["output"]))
        model.add(Activation('softmax'))
        
        self.model = model
        self.compile()
Exemplo n.º 2
0
    def __init__(self, hyperParams=None):

        self.params = hyperParams
        p = self.params["model"]

        if(self.checkSavedModel()):
            return

        model = Sequential()
        
        embed_matrix = self.params["embedding"].getWord2VecMatrix() 

        emb = Embedding(
                embed_matrix.shape[0], 
                embed_matrix.shape[1], 
                weights=[embed_matrix], 
                mask_zero=True,
                # learn=(self.params["learn_embedding"] == 1)
        )

        model.add(emb)

        srnn = SimpleRNN(
                input_dim=embed_matrix.shape[1],
                output_dim=embed_matrix.shape[0],
                activation='softmax', 
                init='uniform', 
                # inner_activation='hard_sigmoid',
                return_sequences=True,
                truncate_gradient=int(p.get("depth", -1)),
        )


        print "Done"

        model.add(srnn)

        # model.add(Activation('softmax'))

        # model.add(LSTM(embed_matrix.shape[1], 128, activation='sigmoid', inner_activation='hard_sigmoid'))
        # model.add(Dropout(0.5))
        # model.add(Dense(128, 1))
        # model.add(Activation('sigmoid'))

        self.model = model

        # import pdb; pdb.set_trace()
        self.compile()

        self.saveModel()
Exemplo n.º 3
0
    def __init__(self, hyperParams=None):

        self.params = hyperParams

        if(self.checkSavedModel()):
            return
        
        p = self.params["model"]

        model = Sequential()
        
        embed_matrix = self.params["embedding"].getWord2VecMatrix() 

        emb = Embedding(
                embed_matrix.shape[0], 
                embed_matrix.shape[1], 
                weights=[embed_matrix], 
                mask_zero=True,
                # learn=(self.params["learn_embedding"] == 1)
        )

        model.add(emb)

        srnn = SimpleDeepRNN(
                input_dim=embed_matrix.shape[1],
                output_dim=embed_matrix.shape[0],
                activation='softmax', 
                init='uniform', 
                inner_init='uniform', 
                depth = int(p.get("depth", 3)),
                inner_activation='sigmoid',
                return_sequences=True,
        )


        print "Done"

        model.add(srnn)
        self.model = model
        self.compile()

        self.saveModel()
Exemplo n.º 4
0
    def __init__(self, hyperParams=None):

        self.params = hyperParams

        if(self.checkSavedModel()):
            return

        p = self.params["model"]
        model = Sequential()
        
        embed_matrix = self.params["embedding"].getWord2VecMatrix() 

        emb = Embedding(
                embed_matrix.shape[0], 
                embed_matrix.shape[1], 
                # weights=[embed_matrix], 
                mask_zero=True,
                learn=(int(self.params["learn_embedding"]) == 1)
        )
        emb.W.set_value(floatX(embed_matrix))

        model.add(emb)

        print "Initialized Embeddings"
        srnn = SimpleRNN(
                input_dim=embed_matrix.shape[1],
                output_dim=int(p.get("hidden_nodes", 100)),
                activation='tanh', 
                init='uniform', 
                inner_init='uniform', 
                # inner_activation='hard_sigmoid',
                return_sequences=False,
                truncate_gradient=int(p.get("depth", 3)),
        )

        model.add(srnn)
        print "Initialized Recurrent Layer"

        denseL = Dense(
                input_dim=int(p.get("hidden_nodes", 100)),
                output_dim=int(p.get("output_nodes", 4)),
                activation='softmax', 
                init='uniform', 
        )

        model.add(denseL)
        print "Initialized Dense Layer"

        self.model = model
        self.compile()
        self.model.layers[0].params = [self.model.layers[0].W]
        self.saveModel()
Exemplo n.º 5
0
    def __init__(self, embed_matrix):
        model = Sequential()
        # Add a mask_zero=True to the Embedding connstructor if 0 is a left-padding value in your data

        emb = Embedding(embed_matrix.shape[0], embed_matrix.shape[1], weights=[embed_matrix], mask_zero=True)

        model.add(emb)
        model.add(LSTM(embed_matrix.shape[1], 128, activation='sigmoid', inner_activation='hard_sigmoid'))
        model.add(Dropout(0.5))
        model.add(Dense(128, 1))
        model.add(Activation('sigmoid'))

        self.model = model
        self.compile()
Exemplo n.º 6
0
    def __init__(self, hyperParams=None):

        # if hyperParams == None :

        self.params = hyperParams

        hyperParams = {
                'l1_input': 100,
                'l1_output': 64,
                'l2_output': 64,
                }
        p = hyperParams

        model = Sequential()

        model.add(Dense(p['l1_input'], 64, init='uniform'))
        model.add(Activation('tanh'))
        model.add(Dropout(0.5))
        model.add(Dense(64, 64, init='uniform'))
        model.add(Activation('tanh'))
        model.add(Dropout(0.5))
        model.add(Dense(64, 1))
        model.add(Activation('sigmoid'))
        
        self.model = model
        self.compile()