so_chunk_paths = args[0] so_kwargs = args[1] working_dir = so_kwargs['working_dir'] global_params.wd = working_dir # TODO: preprocess meshes in case they dont exist and then load mesh dict next to the attribute dict for p in so_chunk_paths: # get SV IDs stored in this storage attr_dc_p = p + "/attr_dict.pkl" mesh_dc_p = p + "/mesh.pkl" ad = AttributeDict(attr_dc_p, disable_locking=True) md = MeshStorage(mesh_dc_p, disable_locking=True) svixs = list(ad.keys()) # replace SO objects by using rep_coord and IDs stored in the attribute dict sd = sos_dict_fact(svixs, **so_kwargs) sos = init_sos(sd) # compute locations and use already processed meshes loc_dc_p = p + "/locations.pkl" loc_dc = CompressedStorage(loc_dc_p, disable_locking=True) for so in sos: try: ix = so.id if not ix in md.keys(): verts = so.mesh[1].reshape(-1, 3) else: verts = md[ix][1].reshape(-1, 3) if len(verts) == 0: coords = np.array([ so.rep_coord, ], dtype=np.float32)
try: args.append(pkl.load(f)) except EOFError: break scaling = global_params.config.entries['Dataset']['scaling'] for cc in args: svixs = list(cc.nodes()) cc_ix = np.min(svixs) sso = SuperSegmentationObject(cc_ix, version="gliaremoval", nb_cpus=2, working_dir=global_params.config.working_dir, create=True, scaling=scaling, sv_ids=svixs) so_cc = nx.Graph() for e in cc.edges(): so_cc.add_edge(sso.get_seg_obj("sv", e[0]), sso.get_seg_obj("sv", e[1])) sso._rag = so_cc sd = sos_dict_fact(svixs) sos = init_sos(sd) sso._objects["sv"] = sos try: sso.gliasplit(verbose=False) except Exception as e: print("\n-------------------------------------\n" "Splitting of SSV %d failed with %s." "\n-------------------------------------\n" % (cc_ix, e)) with open(path_out_file, "wb") as f: pkl.dump("0", f)