Exemplo n.º 1
0
def setup_s3(params, inputs):
    sdir = params['sdir']
    stdout = params['stdout']
    pbtx_edge_list = params['pbtx_edge_list']
    pbtx_edge_chunk_size = params['pbtx_edge_chunk_size']
    pbtx_random_seed = params['pbtx_random_seed']
    params['subject_random_seed'] = random.randint(
        0, 999999) if pbtx_random_seed is None else pbtx_random_seed
    pbtk_dir = join(sdir, "EDI", "PBTKresults")
    connectome_dir = join(sdir, "EDI", "CNTMresults")
    consensus_dir = join(pbtk_dir, "twoway_consensus_edges")
    edi_maps = join(sdir, "EDI", "EDImaps")
    tmp_dir = join(sdir, "tmp")
    smart_remove(tmp_dir)
    smart_remove(pbtk_dir)
    smart_remove(connectome_dir)
    smart_remove(consensus_dir)
    smart_mkdir(tmp_dir)
    smart_mkdir(pbtk_dir)
    smart_mkdir(connectome_dir)
    smart_mkdir(consensus_dir)

    # Probtrackx
    s3_1_future = s3_1_start(params, inputs=inputs)
    s3_2_futures = []
    edges_chunk = []
    for edge in get_edges_from_file(pbtx_edge_list):
        edges_chunk.append(edge)
        if len(edges_chunk) >= pbtx_edge_chunk_size:
            s3_2_futures.append(
                s3_2_probtrackx(params, edges_chunk, inputs=[s3_1_future]))
            edges_chunk = []
    if edges_chunk:  # run last chunk if it's not empty
        s3_2_futures.append(
            s3_2_probtrackx(params, edges_chunk, inputs=[s3_1_future]))
    s3_3_future = s3_3_combine(params, inputs=s3_2_futures)

    # EDI consensus
    s3_4_futures = []
    edges_chunk = []
    consensus_edges = []
    for edge in get_edges_from_file(pbtx_edge_list):
        a, b = edge
        if [a, b] in consensus_edges or [b, a] in consensus_edges:
            continue
        edges_chunk.append(edge)
        consensus_edges.append(edge)
        if len(edges_chunk) >= pbtx_edge_chunk_size:
            s3_4_futures.append(
                s3_4_edi_consensus(params, edges_chunk, inputs=[s3_3_future]))
            edges_chunk = []
    if edges_chunk:  # run last chunk if it's not empty
        s3_4_futures.append(
            s3_4_edi_consensus(params, edges_chunk, inputs=[s3_3_future]))

    return s3_5_edi_combine(params, consensus_edges, inputs=s3_4_futures)
Exemplo n.º 2
0
def setup_s3_alt(params, inputs):
    sdir = params['sdir']
    pbtx_edge_list = params['pbtx_edge_list']
    volumes = set([])
    for edge in get_edges_from_file(pbtx_edge_list):
        volumes.add(edge[0])
        volumes.add(edge[1])
    # run_vtk = join(sdir, 'run_vtk.py')
    # copyfile('subscripts/run_vtk.py', run_vtk)
    s3_1_future = s3_1_start(params, volumes, inputs=inputs)
    s3_2_futures = []
    for vol in volumes:
        s3_2_futures.append(s3_2_probtrackx(params, vol, volumes, inputs=[s3_1_future]))
    return s3_3_combine(params, volumes, inputs=s3_2_futures)
Exemplo n.º 3
0
def s3_5_edi_combine(params, consensus_edges, inputs=[]):
    import time,tarfile
    from subscripts.utilities import run,smart_remove,smart_mkdir,write,record_apptime,record_finish, \
                                     update_permissions,get_edges_from_file,strip_trailing_slash
    from os.path import join,exists,basename
    from shutil import copyfile
    pbtx_edge_list = params['pbtx_edge_list']
    sdir = params['sdir']
    stdout = params['stdout']
    container = params['container']
    start_time = time.time()
    pbtk_dir = join(sdir,"EDI","PBTKresults")
    connectome_dir = join(sdir,"EDI","CNTMresults")
    compress_pbtx_results = params['compress_pbtx_results']
    consensus_dir = join(pbtk_dir,"twoway_consensus_edges")
    edi_maps = join(sdir,"EDI","EDImaps")
    edge_total = join(edi_maps,"FAtractsumsTwoway.nii.gz")
    tract_total = join(edi_maps,"FAtractsumsRaw.nii.gz")
    smart_remove(edi_maps)
    smart_mkdir(edi_maps)

    # Collect number of probtrackx tracts per voxel
    for edge in get_edges_from_file(pbtx_edge_list):
        a, b = edge
        a_to_b_formatted = "{}_s2fato{}_s2fa.nii.gz".format(a,b)
        a_to_b_file = join(pbtk_dir,a_to_b_formatted)
        if not exists(tract_total):
            copyfile(a_to_b_file, tract_total)
        else:
            run("fslmaths {0} -add {1} {1}".format(a_to_b_file, tract_total), params)

    # Collect number of parcel-to-parcel edges per voxel
    for edge in consensus_edges:
        a, b = edge
        consensus = join(consensus_dir, "{}_to_{}.nii.gz".format(a,b))
        if not exists(consensus):
            write(stdout,"{} has been thresholded. See {} for details".format(edge, join(pbtk_dir, "zerosl.txt")))
            continue
        if not exists(edge_total):
            copyfile(consensus, edge_total)
        else:
            run("fslmaths {0} -add {1} {1}".format(consensus, edge_total), params)
    if not exists(edge_total):
        write(stdout, "Error: Failed to generate {}".format(edge_total))

    if compress_pbtx_results:
        pbtk_archive = strip_trailing_slash(pbtk_dir) + '.tar.gz'
        connectome_archive = strip_trailing_slash(connectome_dir) + '.tar.gz'
        write(stdout,"\nCompressing probtrackx output at {} and {}".format(pbtk_archive, connectome_archive))
        smart_remove(pbtk_archive)
        smart_remove(connectome_archive)
        with tarfile.open(pbtk_archive, mode='w:gz') as archive:
            archive.add(pbtk_dir, recursive=True, arcname=basename(pbtk_dir))
        with tarfile.open(connectome_archive, mode='w:gz') as archive:
            archive.add(connectome_dir, recursive=True, arcname=basename(connectome_dir))
        smart_remove(pbtk_dir)
        smart_remove(connectome_dir)

    update_permissions(params)
    record_apptime(params, start_time, 4)
    record_finish(params)
Exemplo n.º 4
0
def s3_3_combine(params, inputs=[]):
    import numpy as np
    import scipy.io
    import time
    from subscripts.utilities import record_apptime,record_finish,update_permissions,is_float,write,get_edges_from_file
    from os.path import join,exists
    from shutil import copyfile
    sdir = params['sdir']
    stdout = params['stdout']
    pbtx_sample_count = int(params['pbtx_sample_count'])
    pbtx_edge_list = params['pbtx_edge_list']
    connectome_idx_list = params['connectome_idx_list']
    connectome_idx_list_copy = join(sdir, 'connectome_idxs.txt')
    start_time = time.time()
    connectome_dir = join(sdir,"EDI","CNTMresults")
    oneway_list = join(sdir, "connectome_{}samples_oneway.txt".format(pbtx_sample_count))
    twoway_list = join(sdir, "connectome_{}samples_twoway.txt".format(pbtx_sample_count))
    oneway_nof = join(sdir, "connectome_{}samples_oneway_nof.mat".format(pbtx_sample_count)) # nof = number of fibers
    twoway_nof = join(sdir, "connectome_{}samples_twoway_nof.mat".format(pbtx_sample_count))
    oneway_nof_normalized = join(sdir, "connectome_{}samples_oneway_nofn.mat".format(pbtx_sample_count)) # nofn = number of fibers, normalized
    twoway_nof_normalized = join(sdir, "connectome_{}samples_twoway_nofn.mat".format(pbtx_sample_count))
    smart_remove(oneway_list)
    smart_remove(twoway_list)
    smart_remove(oneway_nof_normalized)
    smart_remove(twoway_nof_normalized)
    smart_remove(oneway_nof)
    smart_remove(twoway_nof)
    oneway_edges = {}
    twoway_edges = {}

    copyfile(connectome_idx_list, connectome_idx_list_copy) # give each subject a copy for reference

    vol_idxs = {}
    with open(connectome_idx_list) as f:
        lines = [x.strip() for x in f.readlines() if x]
        max_idx = -1
        for line in lines:
            vol, idx = line.split(',', 1)
            idx = int(idx)
            vol_idxs[vol] = idx
            if idx > max_idx:
                max_idx = idx
        oneway_nof_normalized_matrix = np.zeros((max_idx+1, max_idx+1))
        oneway_nof_matrix = np.zeros((max_idx+1, max_idx+1))
        twoway_nof_normalized_matrix = np.zeros((max_idx+1, max_idx+1))
        twoway_nof_matrix = np.zeros((max_idx+1, max_idx+1))

    for edge in get_edges_from_file(pbtx_edge_list):
        a, b = edge
        edge_file = join(connectome_dir, "{}_to_{}.dot".format(a, b))
        with open(edge_file) as f:
            chunks = [x.strip() for x in f.read().strip().split(' ') if x]
            a_to_b = (chunks[0], chunks[1])
            b_to_a = (chunks[1], chunks[0])
            waytotal_count = float(chunks[2])
            fdt_count = float(chunks[3])
            if b_to_a in twoway_edges:
                twoway_edges[b_to_a][0] += waytotal_count
                twoway_edges[b_to_a][1] += fdt_count
            else:
                twoway_edges[a_to_b] = [waytotal_count, fdt_count]
            oneway_edges[a_to_b] = [waytotal_count, fdt_count]

    for a_to_b in oneway_edges:
        a = a_to_b[0]
        b = a_to_b[1]
        for vol in a_to_b:
            if vol not in vol_idxs:
                write(stdout, 'Error: could not find {} in connectome idxs'.format(vol))
                break
        else:
            write(oneway_list, "{} {} {} {}".format(a, b, oneway_edges[a_to_b][0], oneway_edges[a_to_b][1]))
            oneway_nof_matrix[vol_idxs[a]][vol_idxs[b]] = oneway_edges[a_to_b][0]
            oneway_nof_normalized_matrix[vol_idxs[a]][vol_idxs[b]] = oneway_edges[a_to_b][1]

    for a_to_b in twoway_edges:
        a = a_to_b[0]
        b = a_to_b[1]
        for vol in a_to_b:
            if vol not in vol_idxs:
                write(stdout, 'Error: could not find {} in connectome idxs'.format(vol))
                break
        else:
            write(twoway_list, "{} {} {} {}".format(a, b, twoway_edges[a_to_b][0], twoway_edges[a_to_b][1]))
            twoway_nof_matrix[vol_idxs[a]][vol_idxs[b]] = twoway_edges[a_to_b][0]
            twoway_nof_normalized_matrix[vol_idxs[a]][vol_idxs[b]] = twoway_edges[a_to_b][1]
    scipy.io.savemat(oneway_nof, {'data': oneway_nof_matrix})
    scipy.io.savemat(oneway_nof_normalized, {'data': oneway_nof_normalized_matrix})
    scipy.io.savemat(twoway_nof, {'data': twoway_nof_matrix})
    scipy.io.savemat(twoway_nof_normalized, {'data': twoway_nof_normalized_matrix})

    update_permissions(params)
    record_apptime(params, start_time, 2)
    record_finish(params)