Example #1
0
def main():
    args = process_command_line()

    out_folder = args.output_folder
    assert os.path.exists(
        out_folder), "Please choose an existing folder for the output"
    cache_folder = os.path.join(out_folder, "cache")

    # init the cache when running experiments the first time
    # if the meta set wasn't saved yet, we need to recreate the cache
    if not os.path.exists(os.path.join(cache_folder, "meta_dict.pkl")):
        init(args.data_folder, cache_folder, args.snemi_mode)

    meta = MetaSet(cache_folder)
    meta.load()

    ds_train = meta.get_dataset("ds_train")
    ds_test = meta.get_dataset("ds_test")

    # experiment settings
    exp_params = ExperimentSettings()

    exp_params.set_rfcache(os.path.join(cache_folder, "rf_cache"))

    # use extra 2d features
    exp_params.set_use2d(True)

    # parameters for learning
    exp_params.set_fuzzy_learning(True)
    exp_params.set_ntrees(500)

    # parameters for lifted multicut
    exp_params.set_lifted_neighborhood(3)

    # features used
    local_feats_list = ("raw", "prob", "reg", "topo")
    # we don't use the multicut feature here, because it can take too long
    lifted_feats_list = ("cluster", "reg")

    if args.snemi_mode:
        exp_params.set_anisotropy(5.)
        exp_params.set_weighting_scheme("all")
        exp_params.set_solver("multicut_exact")
        gamma = 10000.
    else:
        exp_params.set_anisotropy(25.)
        exp_params.set_weighting_scheme("z")
        exp_params.set_solver("multicut_fusionmoves")
        gamma = 2.

    seg_id = 0

    if args.use_lifted:
        print "Starting Lifted Multicut Workflow"

        # have to make filters first due to cutouts...
        ds_train.make_filters(0, exp_params.anisotropy_factor)
        ds_train.make_filters(1, exp_params.anisotropy_factor)
        ds_test.make_filters(0, exp_params.anisotropy_factor)
        ds_test.make_filters(1, exp_params.anisotropy_factor)

        mc_node, mc_edges, mc_energy, t_inf = lifted_multicut_workflow(
            ds_train,
            ds_test,
            seg_id,
            seg_id,
            local_feats_list,
            lifted_feats_list,
            exp_params,
            gamma=gamma,
            weight_z_lifted=True)

        save_path = os.path.join(out_folder,
                                 "lifted_multicut_segmentation.tif")

    else:
        print "Starting Multicut Workflow"
        mc_node, mc_edges, mc_energy, t_inf = multicut_workflow(
            ds_train, ds_test, seg_id, seg_id, local_feats_list, exp_params)

        save_path = os.path.join(out_folder, "multicut_segmentation.tif")

    mc_seg = ds_test.project_mc_result(seg_id, mc_node)

    print "Saving Result to", save_path
    vigra.impex.writeVolume(mc_seg, save_path, '')
    anisotropy = 25. # the anisotropy of the data, this is used in the filter calculation
    # set to 1. for isotropic data, to the actual degree for mildly anisotropic data or to > 20. to compute filters in 2d
    mc_params.set_anisotropy(25.)

    # set to true for segmentations with flat superpixels
    mc_params.set_use2d(True)

    # number of threads used for multithreaded computations
    mc_params.set_nthreads(8)

    # number of trees used in the random forest
    mc_params.set_ntrees(200)

    # solver used for the multicut
    mc_params.set_solver("multicut_fusionmoves")
    # pipeline verbosity
    mc_params.set_verbose(True)

    # weighting scheme for edge-costs in the mc problem
    # set to 'none' for no weighting
    # 'z' or 'xyz' or 'all' for flat superpixels (z usually works best)
    # 'all' for isotropic data with 3d superpixel
    mc_params.set_weighting_scheme("z")

    # range of lifted edges
    mc_params.set_lifted_neighborhood(3)

    # path to save the segmentation result, order has to already exist
    save_path = '/path/to/mc_result.h5'
    run_lmc('my_train', 'my_test', mc_params, save_path)