def create_module(): train_images, test_images, train_labels, test_labels = keras_load_data( keras.datasets.cifar10) verify(train_images, train_labels) model = models.Sequential() # Conv2D(32,(3,3)) 32,卷积核的数目(即输出的维度);(3,3)kernel size,卷积核尺寸 # input_shape,32*32*3,x=32,y=32,color=3 model.add( layers.Conv2D(32, (3, 3), activation='relu', input_shape=(32, 32, 3))) model.add(layers.MaxPooling2D((2, 2))) model.add(layers.Conv2D(64, (3, 3), activation='relu')) model.add(layers.MaxPooling2D((2, 2))) model.add(layers.Conv2D(64, (3, 3), activation='relu')) model.add(layers.Flatten()) model.add(layers.Dense(64, activation='relu')) model.add(layers.Dense(10)) model.summary() model.compile( optimizer='adam', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy']) history = model.fit(train_images, train_labels, epochs=10, validation_data=(test_images, test_labels)) evaluete(history, model, test_images, test_labels)
def save_fashion_mnist(workdir="/WORK/datasset/fashion_mnist"): class_names = [ 'T-shirt_top', 'Trouser', 'Pullover', 'Dress', 'Coat', 'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle_boot' ] train_images, test_images, train_labels, test_labels = keras_load_data( keras.datasets.fashion_mnist) __save_dateset(class_names, train_images, train_labels, workdir + "/train") __save_dateset(class_names, test_images, test_labels, workdir + "/val")
def save_cifar_10(workdir="/WORK/datasset/cifar10"): train_images, test_images, train_labels, test_labels = keras_load_data( keras.datasets.cifar10) class_names = [ 'airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck' ] __save_dateset(class_names, train_images, numpy.array(train_labels).flatten().tolist(), workdir + "/train") __save_dateset(class_names, test_images, numpy.array(test_labels).flatten().tolist(), workdir + "/val")
# coding:utf-8 # Copyright (C) # Author: I # Contact: [email protected] if __name__ == '__main__': from tools.save_module import load_module from tools.load_data import keras_load_data from tensorflow import keras import tensorflow as tf module = load_module("../tmp/save/keras_mnist") print(module) train_images, test_images, train_labels, test_labels = keras_load_data(keras.datasets.mnist) prediced = module.predict(test_images) print(test_labels[0], tf.argmax(prediced[0]))
def save_mnist(workdir="/WORK/datasset/mnist"): train_images, test_images, train_labels, test_labels = keras_load_data( keras.datasets.mnist) class_names = ['0', '1', "2", "3", "4", "5", "6", "7", "8", "9"] __save_dateset(class_names, train_images, train_labels, workdir + "/train") __save_dateset(class_names, test_images, test_labels, workdir + "/val")
def load_mnist(): train_images, test_images, train_labels, test_labels = keras_load_data( keras.datasets.mnist) print(train_images.shape) return train_images, test_images, train_labels, test_labels
# coding:utf-8 # Copyright (C) # Author: I # Contact: [email protected] import tensorflow as tf from tools.load_data import keras_load_data if __name__ == '__main__': (train_image, train_label), (test_images, test_label) = keras_load_data( tf.keras.datasets.fashion_mnist) # 打印训练数据规模 结果为(60000, 28, 28),代表6w张图片 print(train_image.shape) # 引入模型 model = tf.keras.Sequential() # 把图像扁平成28*28的向量 model.add(tf.keras.layers.Flatten(input_shape=(28, 28))) # 隐藏层 model.add(tf.keras.layers.Dense(128, activation='relu')) # 输出层,输出10个概率值,使用softmax把十个输出变成一个概率分布 model.add(tf.keras.layers.Dense(10, activation='softmax')) # 编译模型,规定优化方法和损失函数,当标签使用的是数字编码,使用sparse_categorical_crossentropy这个损失函数 model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['acc']) # 训练模型,次数为5 model.fit(train_image, train_label, epochs=10) # 在测试数据上,对我们的模型进行评价 print(model.evaluate(test_images, test_label))