EPOCH = 2000 LR = 1e-5 cwd = '/home/qinlong/PycharmProjects/NEU/w_b_transfer/w_data/' # input x = tf.placeholder(tf.float32, shape=[BATCH_SIZE,5], name='structure_parameter') para_train, id_train, sp_train = tfrecord.read_tfrecord(cwd + '/train_data.tfrecord', BATCH_SIZE) para_test, id_test, sp_test = tfrecord.read_tfrecord(cwd + '/test_data.tfrecord', BATCH_SIZE) test_dataset = tfrecord.read_tfrecord(cwd + 'train_data.tfrecord', BATCH_SIZE) # loss r_spec = tf.placeholder(tf.float32, shape=[BATCH_SIZE, 603], name='r_spec') p_spec = fn.fw_net(x) loss = ms.huber_loss(r_spec, p_spec) # optimizer optimizer = tf.train.AdamOptimizer(learning_rate=LR, beta1=0.9, beta2=0.999, epsilon=1e-8) # minimize fw_op = optimizer.minimize(loss=loss) # start training init = tf.global_variables_initializer() train_summary = tf.summary.scalar('Train loss', loss) test_summary = tf.summary.scalar('Test loss', loss)
t = ms.conv(t, 32, [3, 3]) # x = ms.bn(x, training=training) t = ms.activation(t, relu=True) t = ms.conv(t, 32, [3, 3]) # x = ms.bn(x, training=training) t = ms.activation(t, relu=True) t = tf.reshape(t, [-1, 40 * 40 * 32]) with tf.variable_scope('fc2'): t = ms.fc(t, units=402) # x = ms.bn(x, training=training) t = ms.activation(t, relu=False) new_loss = ms.huber_loss(t, y_) new_optimizer = tf.train.AdamOptimizer(learning_rate=LR, beta1=0.9, beta2=0.999, epsilon=1e-8, name='new_adam') new_op = new_optimizer.minimize(loss=new_loss) var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='trans_part') sess.run(tf.variables_initializer(var_list=var)) sess.run(tf.variables_initializer(new_optimizer.variables())) # summary tensorboard