try: args.append(pkl.load(f)) except EOFError: break scaling = global_params.config.entries['Dataset']['scaling'] for cc in args: svixs = list(cc.nodes()) cc_ix = np.min(svixs) sso = SuperSegmentationObject(cc_ix, version="gliaremoval", nb_cpus=2, working_dir=global_params.config.working_dir, create=True, scaling=scaling, sv_ids=svixs) so_cc = nx.Graph() for e in cc.edges(): so_cc.add_edge(sso.get_seg_obj("sv", e[0]), sso.get_seg_obj("sv", e[1])) sso._rag = so_cc sd = sos_dict_fact(svixs) sos = init_sos(sd) sso._objects["sv"] = sos try: sso.gliasplit(verbose=False) except Exception as e: print("\n-------------------------------------\n" "Splitting of SSV %d failed with %s." "\n-------------------------------------\n" % (cc_ix, e)) with open(path_out_file, "wb") as f: pkl.dump("0", f)
args.append(pkl.load(f)) except EOFError: break scaling = global_params.config['scaling'] # TODO: This coulb be cunked by loading `mesh_bb` and glia prob. prediction cache arrays # (might have to be create via `dataset_analysis`) for cc in args: svixs = list(cc.nodes()) cc_ix = np.min(svixs) sso = SuperSegmentationObject(cc_ix, version="gliaremoval", nb_cpus=1, working_dir=global_params.config.working_dir, create=True, scaling=scaling, sv_ids=svixs) so_cc = nx.Graph() for e in cc.edges(): so_cc.add_edge(sso.get_seg_obj("sv", e[0]), sso.get_seg_obj("sv", e[1])) sso._rag = so_cc sd = sos_dict_fact(svixs) sos = init_sos(sd) sso._objects["sv"] = sos sso.load_attr_dict() sso.gliasplit(verbose=False, recompute=False) with open(path_out_file, "wb") as f: pkl.dump("0", f)