Exemplo n.º 1
0
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["brain", "1", "2", "3", "4"], ylim_score=(0,0.08))
        # loss, acc = train_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss_vec, acc = train_fn(data, seg_flat, w) #class_weights[seg_flat]
        loss = loss_vec.mean()
        loss_per_sample = loss_vec.reshape(BATCH_SIZE, -1).mean(axis=1)
        losses = update_losses(losses, idx, loss_per_sample)
        train_loss += loss
        train_loss_tmp += loss
        train_acc_tmp += acc
        batch_ctr += 1
        if batch_ctr > n_batches_per_epoch:
            break

    data_gen_train._finish()

    train_loss /= n_batches_per_epoch
    print "training loss average on epoch: ", train_loss
    if epoch <= 1:
        losses[:] = 100.

    y_true = []
    y_pred = []
    test_loss = 0
    accuracies = []
    valid_batch_ctr = 0
    for data, seg, labels in data_gen_validation:
        # loss, acc = val_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
Exemplo n.º 2
0
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["bg", "brain", "edema", "ce_tumor", "necrosis"], ylim_score=(0,0.75))
        # loss, acc = train_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss_vec, acc = train_fn(data, seg_flat, w) #class_weights[seg_flat]
        loss = loss_vec.mean()
        loss_per_sample = loss_vec.reshape(BATCH_SIZE, -1).mean(axis=1)
        losses = update_losses(losses, idx, loss_per_sample)
        train_loss += loss
        train_loss_tmp += loss
        train_acc_tmp += acc
        batch_ctr += 1
        if batch_ctr > n_batches_per_epoch:
            break

    data_gen_train._finish()

    train_loss /= n_batches_per_epoch
    print "training loss average on epoch: ", train_loss
    if epoch <= 2:
        losses[:] = 100.

    y_true = []
    y_pred = []
    test_loss = 0
    accuracies = []
    valid_batch_ctr = 0
    for data, seg, labels in data_gen_validation:
        # loss, acc = val_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
                auc_scores = np.concatenate(auc_all, axis=0).reshape(-1, len(class_frequencies2)-1)
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["brain", "1", "2", "3", "4"], ylim_score=(0,0.08))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss_vec, acc = train_fn(data, seg_flat, w) #class_weights[seg_flat]
        loss = loss_vec.mean()
        loss_per_sample = loss_vec.reshape(BATCH_SIZE, -1).mean(axis=1)
        losses = update_losses(losses, idx, loss_per_sample)
        train_loss += loss
        train_loss_tmp += loss
        train_acc_tmp += acc
        batch_ctr += 1
        if batch_ctr > n_batches_per_epoch:
            break

    data_gen_train._finish()

    train_loss /= n_batches_per_epoch
    print "training loss average on epoch: ", train_loss
    if epoch <= 1:
        losses[:] = 100.

    y_true = []
    y_pred = []
    test_loss = 0
    accuracies = []
    valid_batch_ctr = 0
    for data, seg, labels in data_gen_validation:
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss, acc = val_fn(data, seg_flat, w) #, class_weights[seg_flat]
            printLosses(all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, os.path.join(results_dir, "%s.png" % EXPERIMENT_NAME), n_feedbacks_per_epoch, auc_scores=auc_scores, auc_labels=["bg", "brain", "edema", "ce_tumor", "necrosis"], ylim_score=(0,1.5))
        # loss, acc = train_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
        loss_vec, acc = train_fn(data, seg_flat, w) #class_weights[seg_flat]
        loss = loss_vec.mean()
        loss_per_sample = loss_vec.reshape(BATCH_SIZE, -1).mean(axis=1)
        losses = update_losses(losses, idx, loss_per_sample)
        train_loss += loss
        train_loss_tmp += loss
        train_acc_tmp += acc
        batch_ctr += 1
        if batch_ctr > n_batches_per_epoch:
            break

    data_gen_train._finish()

    train_loss /= n_batches_per_epoch
    print "training loss average on epoch: ", train_loss
    if epoch <= 2:
        losses[:] = train_loss

    y_true = []
    y_pred = []
    test_loss = 0
    accuracies = []
    valid_batch_ctr = 0
    for data, seg, labels in data_gen_validation:
        # loss, acc = val_fn(data, convert_seg_map_for_crossentropy(seg, range(4)).astype(np.float32))
        seg_flat = seg.flatten().astype(np.int32)
        w = class_frequencies2[seg_flat]
Exemplo n.º 5
0
memmap_name = "patchSegmentation_allInOne_ws_t1km_flair_adc_cbv_resized"

BATCH_SIZE = 10
PATCH_SIZE = 15

with open(dataset_folder + "%s_properties.pkl" % (memmap_name), 'r') as f:
    my_dict = cPickle.load(f)

data_ctr = my_dict['n_data']
train_shape = my_dict['train_neg_shape']
info_memmap_shape = my_dict['info_shape']
memmap_data = memmap(dataset_folder + "%s.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=train_shape)
memmap_gt = memmap(dataset_folder + "%s_info.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=info_memmap_shape)


data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1], num_batches=10)

for data, seg, ids in data_gen:
    print ids[0]

for _ in range(5):
    data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1])
    data_gen_mt = Multithreaded_Generator(data_gen, 8, 30)
    ctr = 0
    for data, seg, ids in data_gen_mt:
        print ids[0]
        ctr += 1
        if ctr > 10:
            break
    data_gen_mt._finish()