def parseTfTodataset(): """ 将tfrecors文件的数据读出转化为slim需要的datasets :return: """ dataset = flowers.get_split("train", flowersDataPath) dataProvider = provider.DatasetDataProvider(dataset=dataset, common_queue_capacity=32, common_queue_min=1) image, label = dataProvider.get(["image", "label"]) return image, label
def trainModelWithMyClass(): """ 修改最终的输出分类【1001】==》【5】,有1001个分类改为5个分类 :return: """ fintuing = tf.Graph() with fintuing.as_default(): tf.logging.set_verbosity(tf.logging.INFO) # 获取数据集 dataset = flowers.get_split("train", flowersDatDir) images, labels = loadBatch(dataset=dataset) # 载入模型,此时模型还没有载入参数 with slim.arg_scope(V2model.inception_resnet_v2_arg_scope()): logits, end_points = V2model.inception_resnet_v2(inputs=images, num_classes=1001) #增加一个全连接层,将输出1001分类改为输出5个分类 logits = slim.fully_connected(inputs=logits, num_outputs=5) prob = tf.nn.softmax(logits) oneHotLabels = slim.one_hot_encoding(labels=labels, num_classes=5) # 创建损失函数 slim.losses.softmax_cross_entropy(logits=prob, onehot_labels=oneHotLabels) totalLoss = slim.losses.get_total_loss() # 创建训练节点 optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01) trainOp = slim.learning.create_train_op(total_loss=totalLoss, optimizer=optimizer) # 准备载入模型权重的函数 modelPath = os.path.join(inceptionV2ModelPath, inceptionV2ModelCkName) variables = slim.get_model_variables("InceptionResnetV2") init_fn = slim.assign_from_checkpoint_fn(model_path=modelPath, var_list=variables) # 正式载入模型并开始训练 with tf.Session() as sess: init_fn(sess) print("done!") final_loss = slim.learning.train( train_op=trainOp, logdir=None, number_of_steps=10 # 训练10波 )
def trainModel(): tf.logging.set_verbosity(tf.logging.INFO) dataset = flowers.get_split("train", flowersDataPath) images, labels = loadBatch(dataset=dataset) prob = Slim_cnn(images=images, numClass=5) prob = tf.nn.softmax(prob.net) oneHotLabel = slim.one_hot_encoding(labels=labels, num_classes=5) slim.losses.softmax_cross_entropy(prob, onehot_labels=oneHotLabel) totalLoss = slim.losses.get_total_loss() optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01) trainOp = slim.learning.create_train_op(total_loss=totalLoss, optimizer=optimizer) finalLoss = slim.learning.train(train_op=trainOp, logdir=configure.saveModel, number_of_steps=100)
def showFlowers(): """ 展示数据集中的图片 :return: """ with tf.Graph().as_default(): dataset = flowers.get_split("train", flowersDataPath) dataProvider = provider.DatasetDataProvider(dataset=dataset, common_queue_capacity=32, common_queue_min=1) image, label = dataProvider.get(["image", "label"]) with tf.Session() as sess: with slim.queues.QueueRunners(sess): for i in range(5): npImage, npLabel = sess.run([image, label]) height, width, channel = npImage.shape className = name = dataset.labels_to_names[npLabel] plt.figure() plt.imshow(npImage) plt.title("%s,%d x %d" % (name, height, width)) plt.axis("off") plt.show()
def trainModelWithNew(): """ 从头训练自己的模型 :return: """ graph = tf.Graph() with graph.as_default(): tf.logging.set_verbosity(tf.logging.INFO) # 获取数据集 dataset = flowers.get_split("train", flowersDatDir) images, labels = loadBatch(dataset=dataset) # 载入模型,此时模型还没有载入参数 with slim.arg_scope(V2model.inception_resnet_v2_arg_scope()): logits, end_points = V2model.inception_resnet_v2(inputs=images, num_classes=5) prob = tf.nn.softmax(logits) oneHotLabels = slim.one_hot_encoding(labels=labels, num_classes=5) print("oneHotLabels:", oneHotLabels.shape) print("prob:", prob.shape) # 创建损失函数 slim.losses.softmax_cross_entropy(logits=prob, onehot_labels=oneHotLabels) totalLoss = slim.losses.get_total_loss() # 创建训练节点 optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01) trainOp = slim.learning.create_train_op(total_loss=totalLoss, optimizer=optimizer) final_loss = slim.learning.train( train_op=trainOp, logdir=None, number_of_steps=10 # 训练10波 ) print("trianing done and finished")