Exemplo n.º 1
0
    def RNNmodel(self):

        size_1 = self.model_setting['size1']
        size_2 = self.model_setting['size2']
        p = self.model_setting['dropout']
        gru1 = Bidirectional(CuDNNGRU(size_1, return_sequences=True),
                             merge_mode='ave')
        gru2 = Bidirectional(CuDNNGRU(size_2, return_sequences=True),
                             merge_mode='ave')

        layer = {
            'gru1': gru1,
            'gru2': gru2,
        }

        embedding_layer = SpatialDropout1D(p)(self.embedding_layer)

        x = gru1(embedding_layer)

        x = gru2(x)  # 200 * 80

        x1 = GlobalMaxPooling1D()(x)
        x2 = GlobalAveragePooling1D()(x)

        self.cat_layers += [x1, x2]

        y = Concatenate()(self.cat_layers)
        # y = Dense(90,activation='relu')(y)

        fc = Dense(6)(y)
        result_layer = Activation(activation='sigmoid')(fc)

        if self.boost:
            result_layer = concatenate([result_layer, self.boost_layer],
                                       axis=-1)
            result_layer = boostLayer()(result_layer)

        self.result_model = Model(inputs=self.inputs, outputs=result_layer)

        print(self.result_model.summary())

        if self.lossname == 'rankLoss':
            loss_layer = fc
        else:
            loss_layer = result_layer
        self.set_loss(loss_layer)

        if self.load_weight:
            name = self.lossname
            name = 'focalLoss'
            gru1_weight = np.load(WEIGHT_FILE + name + 'gru1_weight.npy')
            gru1.set_weights(gru1_weight)
            gru2_weight = np.load(WEIGHT_FILE + name + 'gru2_weight.npy')
            gru2.set_weights(gru2_weight)

        return layer
Exemplo n.º 2
0
def lrpify_model(model):
		'''
		This function takes as input user defined keras Model, and replace all LSTM/Bi_LSTM 
		with equivalent one which have LRP_LSTMCell as core cell 
		'''
		
		cell_config_keys = ['units', 'activation', 'recurrent_activation', 'use_bias', 'unit_forget_bias', 'kernel_constraint', 'recurrent_constraint', 'bias_constraint']
		rnn_config_keys = ['return_sequences', 'return_state', 'go_backwards', 'stateful', 'unroll']
		bidirect_config_keys = ['merge_mode']
		
		
		for i,layer in enumerate(model.layers):
			if isinstance(layer,Bidirectional):
				weights = layer.get_weights()
				inp_shape = layer.input_shape
				cell_config = {key:layer.get_config()['layer']['config'][key] for key in cell_config_keys}
				rnn_config  = {key:layer.get_config()['layer']['config'][key] for key in rnn_config_keys}
				bidirect_config = {key:layer.get_config()[key] for key in bidirect_config_keys}
				
				with CustomObjectScope({'LRP_LSTMCell': LRP_LSTMCell}):
					cell = LRP_LSTMCell(**cell_config, implementation=1)
					bi_lstm = Bidirectional(RNN(cell,**rnn_config),**bidirect_config)
					bi_lstm.build(inp_shape)
					bi_lstm.call(layer.input)
					bi_lstm._inbound_nodes = layer._inbound_nodes
					bi_lstm._outbound_nodes = layer._outbound_nodes
					bi_lstm.set_weights(weights)

				model.layers[i] = bi_lstm 
			
			if isinstance(layer,LSTM):
				weights = layer.get_weights()
				inp_shape = layer.input_shape
				cell_config = {key:layer.get_config()[key] for key in cell_config_keys}
				rnn_config  = {key:layer.get_config()[key] for key in rnn_config_keys}
				with CustomObjectScope({'LRP_LSTMCell': LRP_LSTMCell}):
					cell = LRP_LSTMCell(**cell_config,implementation=1)

					lstm = RNN(cell,**rnn_config)
					lstm.build(inp_shape)
					lstm.call(layer.input)
					lstm.set_weights(weights)
					lstm._inbound_nodes = layer._inbound_nodes
					lstm._outbound_nodes = layer._outbound_nodes
				model.layers[i] = lstm
		return model