net_shape = net.get_shape().as_list() net = tf.reshape(net, [-1, net_shape[1] * net_shape[2] * net_shape[3]]) # Compute logits (1 per class) logits = tf.layers.dense(net, n_target_classes, activation=None, name='logits') cross_entropy = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y_)) train_step = tf.train.AdamOptimizer(1e-5, name='train_step').minimize(cross_entropy) correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(y_, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name='accuracy') model_file = os.path.dirname( os.path.realpath(__file__)) + '/' + os.path.basename(__file__) trainer = Trainer(data_path=data_path, model_file=model_file, s3_bucket=s3_bucket, total_epochs=epochs, max_sample_records=100, show_speed=show_speed, s3_sync=s3_sync) trainer.train(sess=sess, x=x, y_=y_, optimization=accuracy, train_step=train_step, train_feed_dict={}, test_feed_dict={})
from ai.Trainer import Trainer, parse_args args = parse_args() trainer = Trainer(data_path=args["data_path"], postgres_host=args["postgres_host"], port=args['port'], model_base_directory=args['model_base_directory'], total_epochs=args["epochs"], image_scale=args['image_scale'], crop_percent=args['crop_percent']) trainer.train()
Regarding tf.control_dependencies: with g.control_dependencies([a, b, c]): # `d` and `e` will only run after `a`, `b`, and `c` have executed. d = ... e = ... ''' update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): train_step = tf.train.AdamOptimizer(1e-5).minimize(rmse) model_file = os.path.dirname( os.path.realpath(__file__)) + '/' + os.path.basename(__file__) trainer = Trainer(data_path=data_path, model_file=model_file, s3_bucket=s3_bucket, total_epochs=epochs, max_sample_records=100, show_speed=show_speed, s3_sync=s3_sync, save_to_disk=save_to_disk, image_scale=image_scale) trainer.train(sess=sess, x=x, y_=y_, optimization=rmse, train_step=train_step, train_feed_dict={'phase:0': True}, test_feed_dict={})
from ai.Trainer import Trainer, parse_args args = parse_args() trainer = Trainer(overfit=args['overfit'], data_path=args["data_path"], batch_size=int(args['batch_size']), postgres_host=args["postgres_host"], port=args['port'], model_base_directory=args['model_base_directory'], model_id=args['model_id'], total_epochs=args["epochs"], image_scale=args['image_scale'], crop_percent=args['crop_percent']) trainer.train()