Exemplo n.º 1
0
def init(cfg):

    save_path = cfg.save_path + cfg.save_dir_prefix + str(
        cfg.experiment_idx).zfill(3)

    mkdir(save_path)

    trial_id = (len([dir for dir in os.listdir(save_path) if 'trial' in dir]) +
                1) if cfg.trial_id is None else cfg.trial_id
    trial_save_path = save_path + '/trial_' + str(trial_id)

    if not os.path.isdir(trial_save_path):
        mkdir(trial_save_path)
        copytree(os.getcwd(),
                 trial_save_path + '/source_code',
                 ignore=ignore_patterns('*.git', '*.txt', '*.tif', '*.pkl',
                                        '*.off', '*.so', '*.json', '*.jsonl',
                                        '*.log', '*.patch', '*.yaml', 'wandb',
                                        'run-*'))

    seed = trial_id
    np.random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed(seed)
    torch.backends.cudnn.enabled = True  # speeds up the computation

    return trial_save_path, trial_id
Exemplo n.º 2
0
def test(unet, all_data, sampler, save_path):

    # result parent dir
    save_path = save_path + '/IoUs'
    mkdir(save_path)

    all_results = {}
    for tag, d in all_data.items():
        logger.info(tag + ' set evaluation...')
        results = dict()
        loss = compute_loss(d.x, d.seed, d.y, results, sampler, unet.config.num_classes, unet, d.seed_ids)
        all_results[tag] = {'loss':loss, 'predictions':results}

        save_line(list_to_string(loss), save_path + '/' + tag + '_IoU.txt')

    return all_results
Exemplo n.º 3
0
    def evaluate(self, epoch, writer=None, backup_writer=None):
        # self.net = self.net.eval()
        performences = {}
        predictions = {}

        for split in [DataModes.TESTING]:
            dataloader = DataLoader(self.data[split],
                                    batch_size=1,
                                    shuffle=False)
            performences[split], predictions[split] = self.evaluate_set(
                dataloader)

            write_to_wandb(writer, epoch, split, performences,
                           self.config.num_classes)

        if self.support.update_checkpoint(best_so_far=self.current_best,
                                          new_value=performences):

            mkdir(self.save_path)
            mkdir(self.save_path + '/mesh')
            mkdir(self.save_path + '/voxels')

            self.save_model(epoch)
            self.save_results(predictions[DataModes.TESTING], epoch,
                              performences[DataModes.TESTING], self.save_path,
                              '/testing_')
            self.current_best = performences
Exemplo n.º 4
0
def write_to_disk_em(synapse_list, unet, save_at, mode, results_for_log):
    gap = np.ones((388, 20))


    cmap = plt.get_cmap('jet')

    jc_sum_all = np.zeros(unet.config.num_classes)
    jc_count_all = 0

    save_at = save_at + '/' + mode
    for syn in synapse_list:
        syn_path = save_at + '/' + syn

        mkdir(syn_path)
        mkdir(syn_path + '/y_hat')
        mkdir(syn_path + '/y_hat_overlayed')

        jc_sum, count = synapse_list[syn].IoU()

        jc_sum_all += jc_sum
        jc_count_all += count
        save_line(list_to_string(jc_sum/count), syn_path + '/jaccard_average_synapse.txt')

        for slice in synapse_list[syn].slices:


            y_hat = 255 * slice.y_hat / unet.config.num_classes
            io.imsave(syn_path + '/y_hat/y_hat_' + str(slice.slice_id) + '.png', np.uint8(y_hat))
            io.imsave(syn_path + '/y_hat_overlayed/y_hat_overlayed_' + str(slice.slice_id) + '.png', slice.overlayed)

            save_line(list_to_string(slice.jaccard_indx), syn_path + '/jaccard_slice.txt')
            save_line(str(slice.slice_id) + ' : ' + list_to_string(slice.center), syn_path + '/centers.txt')

    jc_avg = jc_sum_all/jc_count_all
    for i in range(unet.config.num_classes):
        results_for_log[mode + '_jaccard_index_' + str(i)] =  jc_avg[i]

    save_line(list_to_string(jc_avg), save_at + '/jaccard_average_all.txt')
Exemplo n.º 5
0
def save_predictions(iter, all_results, unet, save_path):

    results_for_log = dict()
    # result parent dir
    if unet.config.more_options.synapse_layer_mode=='synapse_dtf':
        save_path = save_path + '/doc_manual_label_synapses_dtf'
    elif unet.config.more_options.synapse_layer_mode=='center_dot_dtf':
        save_path = save_path + '/doc_manual_label_center_dot_dtf'
    mkdir(save_path)

    # result dir for the niter
    save_path = save_path + '/itr_' + str(iter)
    mkdir(save_path)

    for tag, results in all_results.items():
        mkdir(save_path + '/' + tag)
        if unet.config.more_options.dataset_name == 'EM':
            write_to_disk_em(results['predictions'], unet, save_path, tag, results_for_log)
        else:
            write_to_disk(results['predictions'], unet, save_path, tag, results_for_log)


    return results_for_log
Exemplo n.º 6
0
def write_to_disk_em_old(synapse_list, unet, save_at, mode, results_for_log):
    gap = np.ones((388, 20))


    cmap = plt.get_cmap('jet')

    jc_sum_all = np.zeros(unet.config.num_classes)
    jc_count_all = 0

    save_at = save_at + '/' + mode
    for syn in synapse_list:
        syn_path = save_at + '/' + syn

        mkdir(syn_path)
        mkdir(syn_path + '/x')
        mkdir(syn_path + '/y')
        mkdir(syn_path + '/y_hat_heat_map')
        mkdir(syn_path + '/y_hat')
        mkdir(syn_path + '/y_hat_overlayed')

        jc_sum, count = synapse_list[syn].IoU()

        jc_sum_all += jc_sum
        jc_count_all += count
        save_line(list_to_string(jc_sum/count), syn_path + '/jaccard_average_synapse.txt')

        for slice in synapse_list[syn].slices:
            x = slice.x
            io.imsave(syn_path + '/x/x_0' + str(slice.slice_id) + '.png', np.uint8(255 * x[0]))
            io.imsave(syn_path + '/x/x_1' + str(slice.slice_id) + '.png', np.uint8(255 * x[1]))

            y = 255 * slice.y / unet.config.num_classes
            io.imsave(syn_path + '/y/y_' + str(slice.slice_id) + '.png', np.uint8(y))

            y_hat = 255 * slice.y_hat / unet.config.num_classes
            io.imsave(syn_path + '/y_hat/y_hat_' + str(slice.slice_id) + '.png', np.uint8(y_hat))

            heat_maps = slice.y_hat_heat_map

            color_heat_maps = [np.delete(cmap(map), 3, 2) for map in heat_maps]
            #color_heat_maps = np.concatenate( (color_heat_maps[0], gap3, color_heat_maps[1], gap3, color_heat_maps[2]), axis=1)

            color_heat_maps = np.concatenate( [np.concatenate((map, np.ones((map.shape[0], 20, 3))), axis=1) for map in color_heat_maps ], axis=1)
            color_heat_maps = np.uint8(255 * color_heat_maps)
            io.imsave(syn_path + '/y_hat_heat_map/y_hat_heat_map_' + str(slice.slice_id) + '.png', color_heat_maps)

            io.imsave(syn_path + '/y_hat_overlayed/y_hat_overlayed_' + str(slice.slice_id) + '.png', slice.overlayed)

            save_line(list_to_string(slice.jaccard_indx), syn_path + '/jaccard_slice.txt')
            save_line(str(slice.slice_id) + ' : ' + list_to_string(slice.center), syn_path + '/centers.txt')

    jc_avg = jc_sum_all/jc_count_all
    for i in range(unet.config.num_classes):
        results_for_log[mode + '_jaccard_index_' + str(i)] =  jc_avg[i]

    save_line(list_to_string(jc_avg), save_at + '/jaccard_average_all.txt')