def model_dnn_3(): #train:0.8596 #test:0.9308 input_data = Input((28 * 28, )) temp_data = Dense(128)(input_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(64)(temp_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(48)(temp_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(10)(temp_data) output_data = Activation('softmax')(temp_data) model = Model(inputs=[input_data], outputs=[output_data]) modelcheck = ModelCheckpoint('model/model.hdf5', monitor='loss', verbose=1, save_best_only=True) model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy']) model.fit([mnist.train.images], [mnist.train.labels], batch_size=256, epochs=1, callbacks=[modelcheck], validation_data=(mnist.test.images, mnist.test.labels)) print('acc:{}'.format(accuracy_mnist(model, mnist)))
def test_bias(model_path): model = load_model(model_path) dic = {} dic[5] = [] dic[10] = [] dic[20] = [] dic[30] = [] dic[50] = [] for extent in [5, 10, 20, 30, 50]: ratio = 0.0 step = 0.1 for i in range(5): ratio_temp = 0.0 change = False acc = 0.98 while change | (acc > 0.9): ratio = ratio + step if ratio == ratio_temp: break _, _, _, _, model_change = model_mutation_single_neuron( model, cls='bias', random_ratio=ratio, extent=extent) acc = accuracy_mnist(model_change, mnist) print(acc) if acc <= 0.90: if step != 0.001: ratio_temp = ratio step = step / 10.0 ratio = ratio - step * 10.0 change = True print(step, ratio) dic[extent].append((ratio, acc)) ratio = 0.0 step = 0.1 return dic
def test_del(model_path): model = load_model(model_path) ratio_lst = [0.05, 0.1, 0.15, 0.2, 0.3] dic = {} dic[0.05] = [] dic[0.1] = [] dic[0.15] = [] dic[0.2] = [] dic[0.3] = [] for ratio in ratio_lst: for i in range(5): model_change = random_del_neuron(model, ratio=ratio) acc = accuracy_mnist(model_change, mnist) print(acc, ratio) dic[ratio].append(acc) return dic
def bp_bias(model_path): model = load_model(model_path) extent_lst = [0.01, 0.1, 0.5, 1.5, 2, 3, 5, 10] statistic = {i: [] for i in extent_lst} ratio_lst = [0.01, 0.03, 0.05, 0.1, 0.2] for extent in extent_lst: for ratio in ratio_lst: lst = [] for i in range(10): print(i) _, _, _, _, model_change = model_mutation_single_neuron( model, cls='bias', random_ratio=ratio, extent=extent) acc = accuracy_mnist(model_change, mnist) lst.append(acc) statistic[extent].append(lst) return statistic
def cnn_kernel(model_path): model = load_model(model_path) extent_lst = [0.01, 0.1, 0.5, 1.5, 2, 3, 5, 10] statistic = {i: [] for i in extent_lst} ratio_lst = [0.01, 0.03, 0.05, 0.1, 0.2] for extent in extent_lst: for ratio in ratio_lst: lst = [] for i in range(10): print(i) _, _, _, _, model_change = model_mutation_single_neuron_cnn( model, cls='kernel', layers='conv', random_ratio=ratio, extent=extent) acc = accuracy_mnist(model_change, mnist, cnn=True) lst.append(acc) print(i, acc) statistic[extent].append(lst) return statistic
input_data = Input((28 * 28, )) temp_data = Dense(128)(input_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(64)(temp_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(48)(temp_data) temp_data = Activation('relu')(temp_data) temp_data = Dense(10)(temp_data) output_data = Activation('softmax')(temp_data) model = Model(inputs=[input_data], outputs=[output_data]) modelcheck = ModelCheckpoint('model/model.hdf5', monitor='loss', verbose=1, save_best_only=True) model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy']) model.fit([mnist.train.images], [mnist.train.labels], batch_size=256, epochs=1, callbacks=[modelcheck], validation_data=(mnist.test.images, mnist.test.labels)) print('acc:{}'.format(accuracy_mnist(model, mnist))) if __name__ == '__main__': #model_dnn_3() model_path = './model/model.hdf5' model = load_model(model_path) print('acc:{}'.format(accuracy_mnist(model, mnist)))