Exemplo n.º 1
0
    for d in range(9):
        _, filter_result_d, _ = tf.split(relu_output_x, [d + 1, 1, 16 - d - 2],
                                         -1)
        filter_sum_d = -1 * tf.reduce_sum(filter_result_d) + tf.reduce_sum(
            tf.square(gen_image)) * 2
        gen_ops.append(
            tf.train.AdamOptimizer(0.0001).minimize(filter_sum_d,
                                                    var_list=gen_image))
        gen_loss_op.append(filter_sum_d)

# create saver
saver = util.TrainSaver("ckpt_zoo/mnist_cnn_bn/cnn_bn.ckpt", "main")

#%%
# train !
training_grads = util.get_grads(grad_and_var)
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    saver.try_load(sess)
    saver.remove_old_ckpt(sess, global_step=global_step)

    for i in range(TRAIN_STEPS):
        train_images, train_labels = mnist.train.next_batch(BATCH_SIZE)
        train_images = train_images.reshape(-1, 28, 28, 1)
        _, np_loss = sess.run([train_step, loss],
                              feed_dict={
                                  inputs: train_images,
                                  labels: train_labels,
                                  is_training: True
                              })
Exemplo n.º 2
0
plt.plot(onnx_predict_values, label='tf', linewidth=4)
plt.plot(tf_predict_values, label='onnx')
plt.legend()
plt.show()

#%%
#获得参数
with tf.Session() as sess:
    if False == saver.try_load(sess):
        raise Exception("not train?")

    train_input_data, train_label_data = sess.run([train_input, train_label])

    vars, grads = sess.run(
        [util.get_vars(grad_and_var),
         util.get_grads(grad_and_var)],
        feed_dict={
            input: train_input_data,
            lable: train_label_data
        })

    names = util.get_names(grad_and_var)

    for v, name in zip(vars, names):
        plt.title("var:[%s]" % name)
        plt.hist(v.flatten(), bins=50)
        plt.show()
    print("----------------------")
    for g, name in zip(grads, names):
        plt.title("grad:[%s]" % name)
        plt.hist(g.flatten(), bins=50)
Exemplo n.º 3
0
    inf_d2_output = act(tf.matmul(inf_d3_output, d2) + db2)
    inf_d1_output = tf.nn.sigmoid(tf.matmul(inf_d2_output, d1) + db1)
    output = tf.identity(inf_d1_output, 'output')

    with tf.name_scope("main_train"):
        opt = tf.train.AdamOptimizer(0.01)
        grad_and_var = opt.compute_gradients(loss)
        train_step = opt.apply_gradients(grad_and_var, global_step=global_step)

# create saver
saver = util.TrainSaver("ckpt_zoo/mnist_auto_encoder/mnist_auto_encoder.ckpt",
                        "main")

#%%
# train !
training_grads = util.get_grads(grad_and_var)
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    saver.try_load(sess)
    saver.remove_old_ckpt(sess, global_step=global_step)

    for i in range(TRAIN_STEPS):
        train_images, _ = mnist.train.next_batch(BATCH_SIZE)
        _, np_loss = sess.run([train_step, loss],
                              feed_dict={input: train_images})

        if i % (TRAIN_STEPS / 10) == 0:
            loss_value, step, grad_list = sess.run(
                [loss, global_step, training_grads],
                feed_dict={input: train_images})
            print("After %d training step(s), loss: %g" % (step, loss_value))
Exemplo n.º 4
0
    gen_image_step = tf.train.AdamOptimizer(0.0001).minimize(filter_sum, var_list=gen_image)
    gen_ops.append(gen_image_step)
    gen_loss_op.append(filter_sum)
    
    for d in range(2):
        _, filter_result_d, _ = tf.split(relu_output_x, [d + 1, 1, 4 - d -2], -1)
        filter_sum_d = -1 * tf.reduce_sum(filter_result_d) + tf.reduce_sum(tf.square(gen_image)) * 2
        gen_ops.append(tf.train.AdamOptimizer(0.0001).minimize(filter_sum_d, var_list=gen_image))
        gen_loss_op.append(filter_sum_d)

# create saver
saver = util.TrainSaver("ckpt_zoo/mnist_cnn/cnn.ckpt", "main")

#%%
# train !
training_grads = util.get_grads(grad_and_var)
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    saver.try_load(sess)
    saver.remove_old_ckpt(sess, global_step=global_step)

    for i in range(TRAIN_STEPS):
        train_images, train_labels = mnist.train.next_batch(BATCH_SIZE)
        train_images =  train_images.reshape(-1, 28, 28, 1)
        _, np_loss = sess.run([train_step, loss], feed_dict={inputs: train_images, labels: train_labels})

        if i % (TRAIN_STEPS/10) == 0:
            loss_value, step, acc_value, grad_list = sess.run([loss, global_step, acc, training_grads], feed_dict={inputs: train_images, labels: train_labels})
            print("After %d training step(s), loss: %g, acc:%f." % (step, loss_value, acc_value))

            saver.save(sess, global_step=global_step)