tensorboard_logdir = os.path.join(OUTPUT_DIR, 'logs/') model_weights_dir = os.path.join(OUTPUT_DIR, 'model_weights/') tf_serving_model_dir = os.path.join(OUTPUT_DIR, 'tf_serving/') training_history_dict = os.path.join(OUTPUT_DIR, 'training_history') prediction_dir = os.path.join(OUTPUT_DIR, 'predictions/') checkpoint_path = os.path.join(OUTPUT_DIR, "checkpoints/cp-{epoch:04d}-ssim-{val_ssim:.4f}.ckpt") checkpoint_dir = os.path.dirname(checkpoint_path) # Load neural network settings, set the batch size network_settings = task['network_settings'] batch_size = network_settings['batch_size'] # Load corresponding data depending on the type of the task if task_type in ['train', 'train_and_predict']: x_train = extract_images(task['input_data_path']['x_train'], 'imagesRecon') y_train = extract_images(task['input_data_path']['y_train'], 'imagesTrue') x_validation = extract_images(task['input_data_path']['x_val'], 'imagesRecon') y_validation = extract_images(task['input_data_path']['y_val'], 'imagesTrue') # input_data_shape = x_train.shape # Create callback list. Checkpoint, tensorbaord and earlystopping callback are added by default. callback_list = [] cp_callback = tf.keras.callbacks.ModelCheckpoint(checkpoint_path, monitor='val_ssim', verbose=1, save_weights_only=True) callback_list.append(cp_callback) tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=tensorboard_logdir, histogram_freq=2, write_graph=True, write_grads=True, write_images=True, batch_size=batch_size) callback_list.append(tensorboard_callback) if network_settings['early_stopping']['use']:
import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D import baxter_writer as bw import dataset import vae_assoc import utils np.random.seed(0) tf.set_random_seed(0) print 'Loading image data...' img_data = utils.extract_images(fname='bin/img_data_extend.pkl', only_digits=False) # img_data = utils.extract_images(fname='bin/img_data.pkl', only_digits=False) # img_data_sets = dataset.construct_datasets(img_data) print 'Loading joint motion data...' fa_data, fa_mean, fa_std = utils.extract_jnt_fa_parms(fname='bin/jnt_ik_fa_data_extend.pkl', only_digits=False) # fa_data, fa_mean, fa_std = utils.extract_jnt_fa_parms(fname='bin/jnt_fa_data.pkl', only_digits=False) #normalize data fa_data_normed = (fa_data - fa_mean) / fa_std # fa_data_sets = dataset.construct_datasets(fa_data_normed) print 'Constructing dataset...' #put them together aug_data = np.concatenate((img_data, fa_data_normed), axis=1) data_sets = dataset.construct_datasets(aug_data, validation_ratio=.1, test_ratio=.1) print 'Start training...'