learning_rate=0.001, momentum=0.8) f_eval = theano.function([input_var, input2_var, input3_var], eval_out) f_train = theano.function([input_var, input2_var, input3_var, target_var], [cost], updates=updates) import Evaluation as E with np.load('/home/xvt131/Functions/epoch_31_params.npz') as f: param_values = [f['arr_%d' % i] for i in range(len(f.files))] lasagne.layers.set_all_param_values(output, param_values) Tibial_Score, Femoral_Score = E.Evaluate1("/home/xvt131/Running/train2", DP.image_load, 9, 5, f_eval) #E.Segment("/home/xvt131/Running/evaluation", DP.image_load_eval, 9, 5 ,f_eval) #for param in DP.get_paths('/home/xvt131/Functions/params_101'): # with np.load(param) as f: # param_values = [f['arr_%d' % i] for i in range(len(f.files))] # lasagne.layers.set_all_param_values(output, param_values) # Tibial_Score, Femoral_Score = E.Evaluate1("/home/xvt131/Running/testing", DP.image_load, 9, 5 ,f_eval) # print param # print "Mean Tibia Dice Score:" , np.mean(Tibial_Score) # print "Mean Femur Dice Score:", np.mean(Femoral_Score) print "Mean Tibia Dice Score:", np.mean(Tibial_Score) print "Mean Femur Dice Score:", np.mean(Femoral_Score)
loss += [cur_loss / batch_size] for i in range(num_batches_train): idx = range(i * batch_size, (i + 1) * batch_size) x_batch = X_train[idx] pos_batch = X_pos[idx] Bigger_batch = X_Bigger[idx] targets_batch = Y_train[idx] net_out = f_eval(x_batch, pos_batch, Bigger_batch) preds = np.argmax(net_out, axis=-1) confusion_train.batch_add(targets_batch, preds) for img in Test: Tibia, Femur, Time = E.Evaluate1("/home/xvt131/Running/vtest", DP.image_load, 9, 5, f_eval) print "Mean Tibia Dice Score:", np.mean(Tibia) print "Mean Femur Dice Score:", np.mean(Femur) # [X_test, X_Bigs], X_post, Y_test = DP.voxel_samples(im, [9, 5]) # num_samples_valid = Y_test.shape[0] # num_batches_valid = num_samples_valid // batch_size # for i in range(num_batches_valid): # idx = range(i*batch_size, (i+1)*batch_size) # x_batch = X_test[idx] # post_batch = X_post[idx] # big_batch = X_Bigs[idx] # targets_batch = Y_test[idx] # net_out = f_eval(x_batch, post_batch, big_batch) # preds = np.argmax(net_out, axis=-1)
x_batch = X_test[idx] post_batch = X_post[idx] big_batch = X_Bigs[idx] targets_batch = Y_test[idx] net_out = f_eval(x_batch, post_batch, big_batch) preds = np.argmax(net_out, axis=-1) confusion_valid.batch_add(targets_batch, preds) train_acc_cur = confusion_train.accuracy() valid_acc_cur = confusion_valid.accuracy() if (epoch) % 5 == 0: np.savez('/home/xvt131/Functions/epoch_%d_params.npz' % (epoch + 1), *lasagne.layers.get_all_param_values(output)) print confusion_train print "Epoch %i : Train Loss %e , Train acc %f, Valid acc %f " % ( epoch + 1, loss[-1], train_acc_cur, valid_acc_cur) import Evaluation as E np.savez('Evaluation_Params.npz', *lasagne.layers.get_all_param_values(output)) X, Y = E.Evaluate1("/home/xvt131/Running/validating", DP.image_load, 9, 5, f_eval) print "Mean Tibia Dice Score:", np.mean(X) print "Mean Femur Dice Score:", np.mean(Y)
for img in Test: [X_test, X_Bigs], X_post, Y_test = DP.voxel_samples3(im, [15, 9]) num_samples_valid = Y_test.shape[0] num_batches_valid = num_samples_valid // batch_size for i in range(num_batches_valid): idx = range(i * batch_size, (i + 1) * batch_size) x_batch = X_test[idx] post_batch = X_post[idx] big_batch = X_Bigs[idx] targets_batch = Y_test[idx] net_out = f_eval(x_batch, post_batch, big_batch) preds = np.argmax(net_out, axis=-1) confusion_valid.batch_add(targets_batch, preds) train_acc_cur = confusion_train.accuracy() valid_acc_cur = confusion_valid.accuracy() print confusion_train print "Epoch %i : Train Loss %e , Train acc %f, Valid acc %f " % ( epoch + 1, loss[-1], train_acc_cur, valid_acc_cur) import Evaluation as E X, Y, Z, Pre, Lab = E.Evaluate1("/home/xvt131/Running/train4", DP.image_load, 15, 9, f_eval) print "Mean Tibia Dice Score:", np.mean(X) print "Mean Femur Dice Score:", np.mean(Y)