def neuroproof_lmc(ds_train_str, ds_test_str, seg_id_train, seg_id_test,
                   local_feats_list, lifted_feats_list, mc_params):

    meta.load()

    ds_train = meta.get_dataset(ds_train_str)
    ds_test = meta.get_dataset(ds_test_str)

    ds_train.make_filters(0, mc_params.anisotropy_factor)
    ds_test.make_filters(0, mc_params.anisotropy_factor)
    ds_train.make_filters(1, mc_params.anisotropy_factor)
    ds_test.make_filters(1, mc_params.anisotropy_factor)

    mc_node, mc_edges, mc_energy, t_inf = lifted_multicut_workflow(
        ds_train,
        ds_test,
        seg_id_train,
        seg_id_test,
        local_feats_list,
        lifted_feats_list,
        mc_params,
        gamma=2.,
        warmstart=False,
        weight_z_lifted=False)

    mc_seg = ds_test.project_mc_result(seg_id_test, mc_node)

    return mc_seg
def isbi12_multicut(ds_train_str, ds_test_str,
        seg_id_train, seg_id_test,
        local_feats_list, mc_params):

    meta.load()
    ds_train = meta.get_dataset(ds_train_str)
    ds_test = meta.get_dataset(ds_test_str)

    mc_node, mc_edges, mc_energy, t_inf = multicut_workflow(
            ds_train, ds_test,
            seg_id_train, seg_id_test,
            local_feats_list, mc_params)

    if ds_test_str == "isbi2012_train":
        return eval_lazy(mc_edges, ds_test._rag(seg_id_test) )

    else:
        assert ds_test_str == "isbi2012_test"
        res_folder = "/home/consti/Work/nature_experiments/results/isbi12"
        mc_seg = ds_test.project_mc_result( seg_id_test, mc_node )

        # save segmentation result
        seg_name = "_".join( ["mcresult", str(seg_id_test), "seg"] ) + ".h5"
        seg_path = os.path.join(res_folder, seg_name)
        vigra.writeHDF5(mc_seg, seg_path, "data")

        # save binary edges
        edge_name = "_".join( ["mcresult", str(seg_id_test), "edges"] ) + ".tif"
        edge_path = os.path.join(res_folder, edge_name)
        edge_vol = edges_to_binary(ds_test._rag(seg_id_test), mc_edges)
        vigra.impex.writeVolume(edge_vol, edge_path, '', dtype = np.uint8 )
        return 0, 0
Beispiel #3
0
def run_mc(ds_train_name, ds_test_name, mc_params, save_path):

    assert os.path.exists(os.path.split(save_path)[0]), "Please choose an existing folder to save your results"

    # if you have added multiple segmentations, you can choose on which one to run
    # experiments with the seg_id
    seg_id = 0

    # these strings encode the features that are used for the local features
    feature_list = ['raw', 'prob', 'reg']

    meta.load()
    ds_train = meta.get_dataset(ds_train_name)
    ds_test  = meta.get_dataset(ds_test_name)

    # use this for running the mc without defected slices
    mc_nodes, _, _, _ = multicut_workflow(
            ds_train, ds_test,
            seg_id, seg_id,
            feature_list, mc_params)

    # use this for running the mc with defected slices
    #mc_nodes, _, _, _ = multicut_workflow_with_defect_correction(
    #        ds_train, ds_test,
    #        seg_id, seg_id,
    #        feature_list, mc_params)

    segmentation = ds_test.project_mc_result(seg_id, mc_nodes)
    vigra.writeHDF5(segmentation, save_path, 'data', compression = 'gzip')
def view_test(res1, res2):
    ds = meta.get_dataset('snemi3d_test')
    #volumina_n_layer([ds.inp(0), ds.inp(1), pm_new, pm_new1], ['raw','pm_old', 'pm_new1', 'pm_new2'])
    #else:
    volumina_n_layer(
        [ds.inp(0), ds.inp(1), ds.seg(0), res1, res2],
        ['raw', 'pmap', 'ws', 'curr_res', 'best_res'])
Beispiel #5
0
def snemi3d_mc(ds_train_str, ds_test_str, seg_id_train, seg_id_test,
               local_feats_list, mc_params):

    meta.load()

    ds_train = meta.get_dataset(ds_train_str)
    ds_test = meta.get_dataset(ds_test_str)

    mc_node, mc_edges, mc_energy, t_inf = multicut_workflow(
        ds_train, ds_test, seg_id_train, seg_id_test, local_feats_list,
        mc_params)

    mc_seg = ds_test.project_mc_result(seg_id_test, mc_node)

    print mc_energy, t_inf

    return mc_seg
def snemi3d_lmc(ds_train_str, ds_test_str, seg_id_train, seg_id_test,
                local_feats_list, lifted_feats_list, mc_params):

    meta.load()

    ds_train = meta.get_dataset(ds_train_str)
    ds_test = meta.get_dataset(ds_test_str)
    ds_train.make_filters(0, mc_params.anisotropy_factor)
    ds_test.make_filters(0, mc_params.anisotropy_factor)
    ds_train.make_filters(1, mc_params.anisotropy_factor)
    ds_test.make_filters(1, mc_params.anisotropy_factor)
    mc_node, mc_edges, mc_energy, t_inf = lifted_multicut_workflow(
        ds_train, ds_test, seg_id_train, seg_id_test, local_feats_list,
        lifted_feats_list, mc_params)

    mc_seg = ds_test.project_mc_result(seg_id_test, mc_node)

    return mc_seg
def neuroproof_mc(ds_train_str, ds_test_str,
        seg_id_train, seg_id_test,
        local_feats_list, mc_params):

    meta.load()

    ds_train = meta.get_dataset(ds_train_str)
    ds_test = meta.get_dataset(ds_test_str)

    mc_node, mc_edges, mc_energy, t_inf = multicut_workflow(
            ds_train, ds_test,
            seg_id_train, seg_id_test,
            local_feats_list, mc_params)

    print np.unique(mc_node).shape

    mc_seg = ds_test.project_mc_result(seg_id_test, mc_node)

    return mc_seg
def run_lmc(ds_train_name, ds_test_name, mc_params, save_path):

    assert os.path.exists(os.path.split(save_path)[0]), "Please choose an existing folder to save your results"

    # if you have added multiple segmentations, you can choose on which one to run
    # experiments with the seg_id
    seg_id = 0

    # these strings encode the features that are used for the local features
    feature_list = ['raw', 'prob', 'reg']

    # these strings encode the features that will be used for the lifted edges
    feature_list_lifted = ['cluster', 'reg']

    # this factor determines the weighting of lifted vs. local edge costs
    gamma = 2.

    meta.load()
    ds_train = meta.get_dataset(ds_train_name)
    ds_test  = meta.get_dataset(ds_test_name)

    # need to make filters for the trainset beforehand
    ds_train.make_filters(0, mc_params.anisotropy_factor)
    ds_train.make_filters(1, mc_params.anisotropy_factor)

    # use this for running the mc without defected slices
    mc_nodes, _, _, _ = lifted_multicut_workflow(
            ds_train, ds_test,
            seg_id, seg_id,
            feature_list, feature_list_lifted,
            mc_params, gamma = gamma)

    # use this for running the mc with defected slices
    #mc_nodes, _, _, _ = lifted_multicut_workflow_with_defect_correction(
    #        ds_train, ds_test,
    #        seg_id, seg_id,
    #        feature_list, feature_list_lifted,
    #        mc_params, gamma = gamma)

    segmentation = ds_test.project_mc_result(seg_id, mc_nodes)
    vigra.writeHDF5(segmentation, save_path, 'data', compression = 'gzip')
def view_test_pmaps(new_pmaps):
    ds = meta.get_dataset('snemi3d_test')

    raw = ds.inp(0)
    pm_old = ds.inp(1)
    pm_2d = vigra.readHDF5(
        '/home/constantin/Work/neurodata_hdd/snemi3d_data/probabilities/pmaps_icv2_test.h5',
        'data')
    data = [raw, pm_old, pm_2d]
    data.extend(new_pmaps)
    labels = [
        'raw', '3d_v2', '2d', '3d_v3_i1', '3d_v3_i2', '3d_v3_i3', 'ensemble'
    ]
    volumina_n_layer(data, labels)
def view_train():
    ds = meta.get_dataset('snemi3d_train')
    pmap = vigra.readHDF5('/home/constantin/Downloads/traininf-cst-inv.h5',
                          'data')
    volumina_n_layer([ds.inp(0), ds.inp(1), pmap, ds.seg(0), ds.gt()])
        i += 1

    mean = np.mean(accuracies)
    std = np.std(accuracies)
    logging.info("accuracy mean:", mean)
    logging.info("accuracy std:", std)

    mean_xy = np.mean(accuracies_xy)
    std_xy = np.std(accuracies_xy)

    mean_z = np.mean(accuracies_z)
    std_z = np.std(accuracies_z)

    return mean, std, mean_xy, std_xy, mean_z, std_z


if __name__ == '__main__':

    meta.load()
    ds = meta.get_dataset("snemi3d_train")

    local_feats_list_xy = ("prob", "curve")
    local_feats_list_z = ("prob", )

    #print "Results single RF"
    #print cv_singlerf(ds, local_feats_list_z, 1)

    print "Results two RF"
    print cv_tworf(ds, local_feats_list_xy, local_feats_list_z, 1)
def regfeats():
    meta.load()
    ds = meta.get_dataset("snemi3d_train")