train_images = train_images.reshape((60000, 28 * 28)) train_images = train_images.astype('float32') / 255 test_images = test_images.reshape((10000, 28 * 28)) test_images = test_images.astype('float32') / 255 # 标签处理 train_labels = to_categorical(train_labels) test_labels = to_categorical(test_labels) # 构建模型 model = models.Sequential() model.add(layers.Dense(512, activation='relu', input_shape=(28*28,))) model.add(layers.Dropout(0.6)) model.add(layers.Dense(256, activation='relu', input_shape=(28*28,))) model.add(layers.Dropout(0.5)) model.add(layers.Dense(128, activation='relu', input_shape=(28*28,))) model.add(layers.Dropout(0.4)) model.add(layers.Dense(10, activation='softmax')) model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) time_start = time.time() history = model.fit(train_images, train_labels, epochs=80, batch_size=128,validation_data=(test_images, test_labels)) print("训练时间:{}s\n".format(int(time.time()-time_start))) # # 分析结果 test_loss, test_acc = model.evaluate(test_images, test_labels) print("test_acc:{}\n".format(test_acc)) history_dict = history.history plot_utils.plot_history(history_dict)
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001), input_shape=(10000, ))) model.add(layers.Dropout(0.6)) model.add( layers.Dense(32, activation='relu', kernel_regularizer=regularizers.l2(0.001))) model.add(layers.Dropout(0.4)) model.add(layers.Dense(46, activation='softmax')) model.compile(optimizer=optimizers.Adam(), loss=losses.categorical_crossentropy, metrics=['accuracy']) # 留出验证集 x_val = x_train[:1000] partial_x_train = x_train[1000:] y_val = train_labels[:1000] partial_y_train = train_labels[1000:] history = model.fit(partial_x_train, partial_y_train, epochs=50, batch_size=512, validation_data=(x_val, y_val)) results = model.evaluate(x_val, y_val) print("results:{}\n".format(results)) print("predict:{}\n".format(model.predict(x_val)[20])) plot_utils.plot_history(history.history)