Exemplo n.º 1
0
    loss_new = (loss_old + losses/avg_loss) / 2.
    return loss_new'''

def update_losses(losses, idx, loss):
    losses[idx] = (losses[idx] + loss*2.) / 3.
    return losses

n_epochs = 30
auc_scores=None
for epoch in range(14,n_epochs):
    data_gen_train = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, BATCH_SIZE, validation_patients, mode="train", ignore=[40], losses=losses)
    data_gen_train = seg_channel_selection_generator(data_gen_train, [2])
    data_gen_train = rotation_generator(data_gen_train)
    data_gen_train = center_crop_generator(data_gen_train, (PATCH_SIZE, PATCH_SIZE))
    data_gen_train = elastric_transform_generator(data_gen_train, 550., 20.)
    data_gen_train = Multithreaded_Generator(data_gen_train, 12, 100)
    data_gen_train._start()
    print "epoch: ", epoch
    train_loss = 0
    train_acc_tmp = 0
    train_loss_tmp = 0
    batch_ctr = 0
    for data, seg, idx in data_gen_train:
        if batch_ctr != 0 and batch_ctr % int(np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)) == 0:
            print "number of batches: ", batch_ctr, "/", n_batches_per_epoch
            print "training_loss since last update: ", train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch), " train accuracy: ", train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)
            all_training_losses.append(train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            all_training_accuracies.append(train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            train_loss_tmp = 0
            train_acc_tmp = 0
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["bg", "brain", "edema", "ce_tumor", "necrosis"], ylim_score=(0,0.75))
Exemplo n.º 2
0
'''d, s, l = data_gen_train.next()
plt.figure(figsize=(12, 5))
plt.subplot(1, 3, 1)
plt.imshow(d[0,0], cmap="gray")
plt.subplot(1, 3, 2)
d1=elastic_transform_2d(d[0,0], 550., 20.)
plt.imshow(d1, cmap="gray")
plt.subplot(1, 3, 3)
plt.imshow(d[0,0]-d1)
plt.show()
plt.close()'''

data_gen_validation = SegmentationBatchGeneratorBraTS2014(all_patients, BATCH_SIZE, validation_patients, PATCH_SIZE=PATCH_SIZE, mode="test", ignore=[81], losses=None, num_batches=None, seed=None)
data_gen_validation = seg_channel_selection_generator(data_gen_validation, [2])
data_gen_validation = center_crop_seg_generator(data_gen_validation, (180, 164))
data_gen_validation = Multithreaded_Generator(data_gen_validation, 2, 30)
data_gen_validation._start()

net = build_UNet(20, BATCH_SIZE, num_output_classes=6, base_n_filters=16, input_dim=PATCH_SIZE, pad="valid")
output_layer_for_loss = net["output_flattened"]

n_batches_per_epoch = 300
# n_batches_per_epoch = np.floor(n_training_samples/float(BATCH_SIZE))
n_test_batches = 30
# n_test_batches = np.floor(n_val_samples/float(BATCH_SIZE))

x_sym = T.tensor4()
seg_sym = T.ivector()
w_sym = T.vector()

# add some weight decay
    return loss_new'''

losses = np.ones(len(memmap_gt))
def update_losses(losses, idx, loss):
    losses[idx] = (losses[idx] + loss*2.) / 3.
    return losses

n_epochs = 40
auc_scores=None
for epoch in range(0,n_epochs):
    data_gen_train = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, BATCH_SIZE, validation_patients, mode="train", ignore=[40], losses=losses)
    data_gen_train = seg_channel_selection_generator(data_gen_train, [2])
    data_gen_train = rotation_generator(data_gen_train)
    data_gen_train = center_crop_generator(data_gen_train, (PATCH_SIZE, PATCH_SIZE))
    data_gen_train = elastric_transform_generator(data_gen_train, 550., 20.)
    data_gen_train = Multithreaded_Generator(data_gen_train, 12, 100)
    data_gen_train._start()
    print "epoch: ", epoch
    train_loss = 0
    train_acc_tmp = 0
    train_loss_tmp = 0
    batch_ctr = 0
    for data, seg, idx in data_gen_train:
        if batch_ctr != 0 and batch_ctr % int(np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)) == 0:
            print "number of batches: ", batch_ctr, "/", n_batches_per_epoch
            print "training_loss since last update: ", train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch), " train accuracy: ", train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)
            all_training_losses.append(train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            all_training_accuracies.append(train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            train_loss_tmp = 0
            train_acc_tmp = 0
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["bg", "brain", "edema", "ce_tumor", "necrosis"], ylim_score=(0,1.5))
'''d, s, l = data_gen_train.next()
plt.figure(figsize=(12, 5))
plt.subplot(1, 3, 1)
plt.imshow(d[0,0], cmap="gray")
plt.subplot(1, 3, 2)
d1=elastic_transform_2d(d[0,0], 550., 20.)
plt.imshow(d1, cmap="gray")
plt.subplot(1, 3, 3)
plt.imshow(d[0,0]-d1)
plt.show()
plt.close()'''

data_gen_validation = SegmentationBatchGeneratorDavid(all_patients, BATCH_SIZE, validation_patients, PATCH_SIZE=INPUT_PATCH_SIZE, mode="test", ignore=[81], losses=None, num_batches=None, seed=None)
data_gen_validation = seg_channel_selection_generator(data_gen_validation, [2])
data_gen_validation = center_crop_seg_generator(data_gen_validation, OUTPUT_PATCH_SIZE)
data_gen_validation = Multithreaded_Generator(data_gen_validation, 2, 10)
data_gen_validation._start()

net = build_UNet(25, BATCH_SIZE, num_output_classes=num_classes, base_n_filters=16, input_dim=INPUT_PATCH_SIZE, pad="valid")
output_layer_for_loss = net["output_flattened"]

n_batches_per_epoch = 300
# n_batches_per_epoch = np.floor(n_training_samples/float(BATCH_SIZE))
n_test_batches = 30
# n_test_batches = np.floor(n_val_samples/float(BATCH_SIZE))

x_sym = T.tensor4()
seg_sym = T.ivector()
w_sym = T.vector()

# add some weight decay
tmp = SegmentationBatchGeneratorFromRawData(all_patients, BATCH_SIZE, validation_patients, PATCH_SIZE=PATCH_SIZE, mode="train", ignore=[40], losses=None, num_batches=None, seed=None)

losses = np.ones(tmp.get_losses().shape[0])
def update_losses(losses, idx, loss):
    losses[idx] = (losses[idx] + loss*2.) / 3.
    return losses

n_epochs = 30
auc_scores=None
for epoch in range(0,n_epochs):
    data_gen_train = SegmentationBatchGeneratorFromRawData(all_patients, BATCH_SIZE, validation_patients, PATCH_SIZE=PATCH_SIZE, mode="train", ignore=[40], losses=losses, num_batches=1500, seed=None)
    data_gen_train = seg_channel_selection_generator(data_gen_train, [2])
    data_gen_train = rotation_generator(data_gen_train)
    # data_gen_train = center_crop_generator(data_gen_train, (PATCH_SIZE, PATCH_SIZE))
    data_gen_train = elastric_transform_generator(data_gen_train, 450., 16.)
    data_gen_train = Multithreaded_Generator(data_gen_train, 12, 100)
    data_gen_train._start()
    print "epoch: ", epoch
    train_loss = 0
    train_acc_tmp = 0
    train_loss_tmp = 0
    batch_ctr = 0
    for data, seg, idx in data_gen_train:
        if batch_ctr != 0 and batch_ctr % int(np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)) == 0:
            print "number of batches: ", batch_ctr, "/", n_batches_per_epoch
            print "training_loss since last update: ", train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch), " train accuracy: ", train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch)
            all_training_losses.append(train_loss_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            all_training_accuracies.append(train_acc_tmp/np.floor(n_batches_per_epoch/n_feedbacks_per_epoch))
            train_loss_tmp = 0
            train_acc_tmp = 0
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["bg", "brain", "edema", "ce_tumor", "necrosis"], ylim_score=(0,0.75))
'''d, s, l = data_gen_train.next()
plt.figure(figsize=(12, 5))
plt.subplot(1, 3, 1)
plt.imshow(d[0,0], cmap="gray")
plt.subplot(1, 3, 2)
d1=elastic_transform_2d(d[0,0], 550., 20.)
plt.imshow(d1, cmap="gray")
plt.subplot(1, 3, 3)
plt.imshow(d[0,0]-d1)
plt.show()
plt.close()'''

data_gen_validation = SegmentationBatchGeneratorBraTS2014(all_patients, BATCH_SIZE, validation_patients, PATCH_SIZE=PATCH_SIZE, mode="test", ignore=[81], losses=None, num_batches=None, seed=None)
data_gen_validation = seg_channel_selection_generator(data_gen_validation, [2])
data_gen_validation = center_crop_seg_generator(data_gen_validation, (180, 164))
data_gen_validation = Multithreaded_Generator(data_gen_validation, 2, 30)
data_gen_validation._start()

manually_labeled_patients = np.concatenate((range(4), np.arange(222, 232), np.arange(245, 251)))

net = build_UNet(20, BATCH_SIZE, num_output_classes=6, base_n_filters=16, input_dim=PATCH_SIZE, pad="valid")
output_layer_for_loss = net["output_flattened"]

n_batches_per_epoch = 100
# n_batches_per_epoch = np.floor(n_training_samples/float(BATCH_SIZE))
n_test_batches = 10
# n_test_batches = np.floor(n_val_samples/float(BATCH_SIZE))

x_sym = T.tensor4()
seg_sym = T.ivector()
w_sym = T.vector()
Exemplo n.º 7
0
memmap_name = "patchSegmentation_allInOne_ws_t1km_flair_adc_cbv_resized"

BATCH_SIZE = 10
PATCH_SIZE = 15

with open(dataset_folder + "%s_properties.pkl" % (memmap_name), 'r') as f:
    my_dict = cPickle.load(f)

data_ctr = my_dict['n_data']
train_shape = my_dict['train_neg_shape']
info_memmap_shape = my_dict['info_shape']
memmap_data = memmap(dataset_folder + "%s.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=train_shape)
memmap_gt = memmap(dataset_folder + "%s_info.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=info_memmap_shape)


data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1], num_batches=10)

for data, seg, ids in data_gen:
    print ids[0]

for _ in range(5):
    data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1])
    data_gen_mt = Multithreaded_Generator(data_gen, 8, 30)
    ctr = 0
    for data, seg, ids in data_gen_mt:
        print ids[0]
        ctr += 1
        if ctr > 10:
            break
    data_gen_mt._finish()