示例#1
0
def _run_huge_ssv_render_worker(q, q_out):
    while True:
        inp = q.get()
        if inp == -1:
            break
        kk, g, version = inp
        # Create SSV object
        sv_ixs = np.sort(list(g.nodes()))
        sso = SuperSegmentationObject(
            sv_ixs[0],
            working_dir=global_params.config.working_dir,
            version=version,
            create=False,
            sv_ids=sv_ixs)
        # nodes of sso._rag need to be SV
        new_G = nx.Graph()
        for e in g.edges():
            new_G.add_edge(sso.get_seg_obj("sv", e[0]),
                           sso.get_seg_obj("sv", e[1]))
        sso._rag = new_G
        sso.render_views(add_cellobjects=False,
                         cellobjects_only=False,
                         skip_indexviews=True,
                         woglia=False,
                         overwrite=True,
                         qsub_co_jobs=global_params.NGPU_TOTAL)
        q_out.put(0)
示例#2
0
def _run_huge_ssv_render_worker(q: Queue, q_out: Queue):
    """
    Helper method of :func:`~run_glia_rendering`.

    Args:
        q: Input queue.
        q_out: Output queue.

    """
    while True:
        inp = q.get()
        if inp == -1:
            break
        kk, g, version = inp
        # Create SSV object
        sv_ixs = np.sort(list(g.nodes()))
        sso = SuperSegmentationObject(sv_ixs[0], working_dir=global_params.config.working_dir,
                                      version=version, create=False, sv_ids=sv_ixs)
        # nodes of sso._rag need to be SV
        new_G = nx.Graph()
        for e in g.edges():
            new_G.add_edge(sso.get_seg_obj("sv", e[0]),
                           sso.get_seg_obj("sv", e[1]))
        sso._rag = new_G
        sso.render_views(add_cellobjects=False, cellobjects_only=False,
                         skip_indexviews=True, woglia=False, overwrite=True,
                         qsub_co_jobs=global_params.config.ngpu_total)
        q_out.put(0)
示例#3
0
def run_glia_rendering():
    log = initialize_logging('glia_view_rendering',
                             global_params.config.working_dir + '/logs/',
                             overwrite=False)
    np.random.seed(0)

    # view rendering prior to glia removal, choose SSD accordingly
    version = "tmp"  # glia removal is based on the initial RAG and does not require explicitly stored SSVs

    G = nx.Graph()  # TODO: Add factory method for initial RAG
    with open(global_params.config.init_rag_path, 'r') as f:
        for l in f.readlines():
            edges = [int(v) for v in re.findall('(\d+)', l)]
            G.add_edge(edges[0], edges[1])

    all_sv_ids_in_rag = np.array(list(G.nodes()), dtype=np.uint)
    log.info("Found {} SVs in initial RAG.".format(len(all_sv_ids_in_rag)))

    # add single SV connected components to initial graph
    sd = SegmentationDataset(obj_type='sv',
                             working_dir=global_params.config.working_dir)
    sv_ids = sd.ids
    diff = np.array(list(set(sv_ids).difference(set(all_sv_ids_in_rag))))
    log.info('Found {} single connected component SVs which were missing'
             ' in initial RAG.'.format(len(diff)))

    for ix in diff:
        G.add_node(ix)

    all_sv_ids_in_rag = np.array(list(G.nodes()), dtype=np.uint)
    log.info("Found {} SVs in initial RAG after adding size-one connected "
             "components. Writing kml text file".format(
                 len(all_sv_ids_in_rag)))

    # write out readable format for 'glia_prediction.py'
    ccs = [[n for n in cc] for cc in nx.connected_component_subgraphs(G)]
    kml = knossos_ml_from_ccs([np.sort(cc)[0] for cc in ccs], ccs)
    with open(global_params.config.working_dir + "initial_rag.txt", 'w') as f:
        f.write(kml)

    # generate parameter for view rendering of individual SSV
    log.info("Starting view rendering.")
    multi_params = []
    for cc in nx.connected_component_subgraphs(G):
        multi_params.append(cc)
    multi_params = np.array(multi_params)

    # identify huge SSVs and process them individually on whole cluster
    nb_svs = np.array([g.number_of_nodes() for g in multi_params])
    big_ssv = multi_params[nb_svs > RENDERING_MAX_NB_SV]

    for kk, g in enumerate(big_ssv[::-1]):
        # Create SSV object
        sv_ixs = np.sort(list(g.nodes()))
        log.info("Processing SSV [{}/{}] with {} SVs on whole cluster.".format(
            kk + 1, len(big_ssv), len(sv_ixs)))
        sso = SuperSegmentationObject(
            sv_ixs[0],
            working_dir=global_params.config.working_dir,
            version=version,
            create=False,
            sv_ids=sv_ixs)
        # nodes of sso._rag need to be SV
        new_G = nx.Graph()
        for e in g.edges():
            new_G.add_edge(sso.get_seg_obj("sv", e[0]),
                           sso.get_seg_obj("sv", e[1]))
        sso._rag = new_G
        sso.render_views(add_cellobjects=False,
                         cellobjects_only=False,
                         skip_indexviews=True,
                         woglia=False,
                         qsub_pe="openmp",
                         overwrite=True,
                         qsub_co_jobs=global_params.NCORE_TOTAL)

    # render small SSV without overhead and single cpus on whole cluster
    multi_params = multi_params[nb_svs <= RENDERING_MAX_NB_SV]
    np.random.shuffle(multi_params)
    multi_params = chunkify(multi_params, 2000)

    # list of SSV IDs and SSD parameters need to be given to a single QSUB job
    multi_params = [(ixs, global_params.config.working_dir, version)
                    for ixs in multi_params]
    path_to_out = qu.QSUB_script(multi_params,
                                 "render_views_glia_removal",
                                 n_max_co_processes=global_params.NCORE_TOTAL,
                                 pe="openmp",
                                 queue=None,
                                 script_folder=None,
                                 suffix="")

    # check completeness
    sd = SegmentationDataset("sv",
                             working_dir=global_params.config.working_dir)
    res = find_missing_sv_views(sd, woglia=False, n_cores=10)
    missing_not_contained_in_rag = []
    missing_contained_in_rag = []
    for el in res:
        if el not in all_sv_ids_in_rag:
            missing_not_contained_in_rag.append(el)
        else:
            missing_contained_in_rag.append(el)
    if len(missing_not_contained_in_rag):
        log.info("%d SVs were not rendered but also not part of the initial"
                 "RAG: {}".format(missing_not_contained_in_rag))
    if len(missing_contained_in_rag) != 0:
        msg = "Not all SSVs were rendered completely! Missing:\n" \
              "{}".format(missing_contained_in_rag)
        log.error(msg)
        raise RuntimeError(msg)
示例#4
0
import sys

try:
    import cPickle as pkl
except ImportError:
    import pickle as pkl
from syconn.reps.super_segmentation import SuperSegmentationObject

path_storage_file = sys.argv[1]
path_out_file = sys.argv[2]

with open(path_storage_file, 'rb') as f:
    args = []
    while True:
        try:
            args.append(pkl.load(f))
        except EOFError:
            break

ch = args[0]
wd = args[1]
for ssv_ix in ch:
    sso = SuperSegmentationObject(ssv_ix,
                                  working_dir=wd,
                                  enable_locking_so=True)
    sso.load_attr_dict()
    sso.render_views(add_cellobjects=True, woglia=True, overwrite=True)

with open(path_out_file, "wb") as f:
    pkl.dump("0", f)
示例#5
0
with open(path_storage_file, 'rb') as f:
    args = []
    while True:
        try:
            args.append(pkl.load(f))
        except EOFError:
            break

ch = args[0]
wd = args[1]
version = args[2]
for g in ch:
    # only generate SSV temporarily but do not write it to FS
    # corresponding SVs are parsed explicitly ('sv_ids=sv_ixs')
    sv_ixs = np.sort(list(g.nodes()))
    sso = SuperSegmentationObject(sv_ixs[0], working_dir=wd, version=version,
                                  create=False, sv_ids=sv_ixs,
                                  enable_locking_so=True)
    sso.load_attr_dict()
    # nodes of sso._rag need to be SV
    new_G = nx.Graph()
    for e in g.edges():
        new_G.add_edge(sso.get_seg_obj("sv", e[0]),
                       sso.get_seg_obj("sv", e[1]))
    sso._rag = new_G
    sso.render_views(add_cellobjects=False, woglia=False, overwrite=True,
                     skip_indexviews=True)

with open(path_out_file, "wb") as f:
    pkl.dump("0", f)
示例#6
0
path_out_file = sys.argv[2]

with open(path_storage_file, 'rb') as f:
    args = []
    while True:
        try:
            args.append(pkl.load(f))
        except EOFError:
            break

ch = args[0]
sso_kwargs = args[1]
if type(sso_kwargs) is str:  # only the working directory is given
    sso_kwargs = dict(working_dir=sso_kwargs, enable_locking_so=False)
if len(args) == 3:
    render_kwargs = args[2]
else:
    render_kwargs = dict(add_cellobjects=True, woglia=True, overwrite=True)
for ssv_ix in ch:
    if not np.isscalar(ssv_ix):
        sv_ids = ssv_ix[1]
        ssv_ix = ssv_ix[0]
    else:
        sv_ids = None  # will be loaded from attribute dict
    sso = SuperSegmentationObject(ssv_ix, sv_ids=sv_ids, **sso_kwargs)
    sso.load_attr_dict()
    sso.render_views(**render_kwargs)

with open(path_out_file, "wb") as f:
    pkl.dump("0", f)