Example #1
0
# ---------------------------------------------------------
tall = time()
tract_results = []

if verbose:
    txt = (
        "Process running with % d max processes available on % d subjects with % d subjects in parallel each using % d processes"
        % (mp.cpu_count(), np.size(l), subject_processes, function_processes))
    print(txt)
    send_mail(txt, subject="Main process start msg ")

duration1 = time()

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

#    tract_results = pool.starmap_async(create_tracts, [(dwipath, outtrkpath, subject, stepsize, function_processes, strproperty,
#                                                            saved_streamlines, savefa, labelslist, bvec_orient, verbose) for subject in
#                                                           l]).get()
    tract_results = pool.starmap_async(
        evaluate_tracts,
        [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
          labelslist, outpathpickle, figspath, function_processes, allsave,
          display, strproperty, ratio, verbose) for subject in l]).get()
    pool.close()
else:
    for subject in l:
        #        tract_results.append(create_tracts(dwipath, outtrkpath, subject, stepsize, function_processes, strproperty,
Example #2
0
    picklepath_connect = figspath + subject + str_identifier + '_connectomes.p'
    excel_path = figspath + subject + str_identifier + "_connectomes.xlsx"
    if os.path.exists(picklepath_connect) and os.path.exists(excel_path):
        print("The writing of pickle and excel of " + str(subject) + " is already done")
        donelist.append(subject)
    else:
        notdonelist.append(subject)

createmask = True

dwi_results = []
vol_b0 = [0,1,2,3]

if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    #dwi_results = pool.starmap_async(diff_preprocessing, [(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
    #                                 createmask, vol_b0, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(create_tracts, [(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes,
                                                        str_identifier, ratio, classifiertype, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(tract_connectome_analysis, [(dwipath_preprocessed, outtrkpath, str_identifier, figspath,
                                                                   subject, atlas_legends, bvec_orient, brainmask,
                                                                    inclusive,function_processes, forcestart,
                                                                    picklesave, verbose) for subject in l]).get()
    pool.close()
else:
    for subject in l:
        excel_path = figspath + subject + str_identifier + "_connectomes.xlsx"
        if os.path.exists(picklepath_connect) and os.path.exists(excel_path):
            print("The writing of pickle and excel of " + str(subject) +
                  " is already done")
            donelist.append(subject)
        else:
            notdonelist.append(subject)

dwi_results = []
tract_results = []

print(f'Overwrite is {overwrite}')

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)
    if make_tracts:
        tract_results = pool.starmap_async(
            create_tracts,
            [(diff_preprocessed, trkpath, subject, figspath, stepsize,
              function_processes, str_identifier, ratio, brainmask, classifier,
              labelslist, bvec_orient, doprune, overwrite, get_params, denoise,
              verbose) for subject in subjects]).get()
    if make_connectomes:
        tract_results = pool.starmap_async(
            tract_connectome_analysis,
            [(diff_preprocessed, trkpath, str_identifier, figspath, subject,
              atlas_legends, bvec_orient, brainmask, inclusive,
              function_processes, overwrite, picklesave, labeltype, symmetric,
Example #4
0
if verbose:
    txt = (
        "Process running with % d max processes available on % d subjects with % d subjects in parallel each using % d processes"
        % (mp.cpu_count(), np.size(l), subject_processes, function_processes))
    print(txt)
    send_mail(txt, subject="Main process start msg ")

duration1 = time()
txtfile = "/Users/alex/bass/testdata/"

get_params = True
print(bvec_orient_list)

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(
        create_tracts, [(dwipath, outtrkpath, subject, stepsize,
                         function_processes, strproperty, ratio, savefa,
                         labelslist, bvec_orient, get_params, verbose)
                        for subject in l]).get()
    #    tract_results = pool.starmap_async(evaluate_tracts, [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
    #                                                         figspath, function_processes, doprune, display, verbose)
    #                                                        for subject in l]).get()
    pool.close()
else:
    for subject in l:
        txtfile = dwipath + subject + "/params.txt"
if mp.cpu_count() < max_processors:
    max_processors = mp.cpu_count()
subject_processes = np.size(subjects)
if max_processors < subject_processes:
    subject_processes = max_processors
# accepted values are "small" for one in ten streamlines, "all or "large" for all streamlines,
# "none" or None variable for neither and "both" for both of them
nominal_bval = 1000
masking = 'bet'
denoise = 'mpca'
verbose = True
function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    results = pool.starmap_async(
        launch_preprocessing,
        [(proc_subjn + subject,
          largerfile(
              glob.glob(
                  os.path.join(os.path.join(
                      diffpath, "*" + subject + "*")))[0]), outpath, cleanup,
          nominal_bval, SAMBA_inputs_folder, shortcuts_all_folder, gunniespath,
          function_processes, masking, ref, transpose, overwrite, denoise,
          recenter, verbose) for subject in subjects]).get()
else:
    for subject in subjects:
    for subject in subjects:
        picklepath_connect = figspath + subject + str_identifier + '_connectomes.p'
        excel_path = figspath + subject + str_identifier + "_connectomes.xlsx"
        if os.path.exists(picklepath_connect) and os.path.exists(excel_path):
            print("The writing of pickle and excel of " + str(subject) + " is already done")
            donelist.append(subject)
        else:
            notdonelist.append(subject)

dwi_results = []
tract_results = []


if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    dwi_results = pool.starmap_async(dwi_preprocessing, [(datapath, dwi_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
                                     createmask, vol_b0, verbose) for subject in subjects]).get()
    tract_results = pool.starmap_async(create_tracts, [(dwi_preprocessed, trkpath, subject, figspath, stepsize, function_processes,
                                                        str_identifier, ratio, masktype, classifier, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for subject in subjects]).get()
    if make_connectomes:
        tract_results = pool.starmap_async(tract_connectome_analysis, [(dwi_preprocessed, trkpath, str_identifier, figspath,
                                                                       subject, atlas_legends, bvec_orient, inclusive,
                                                                       function_processes, forcestart, picklesave, verbose)
                                                                     for subject in subjects]).get()
    pool.close()
else:
elif masktype == "T1":
    #bet bia6_02491_40006.nii.gz 02491.nii.gz -m -o -f 0.4
    #mv 02491_mask.nii.gz 02491_T1_binary_mask.nii.gz
    mask, affinemask = getmask(outpath,subject,"T1",verbose)

import itertools
bvec_orient1 = (np.array(list(itertools.permutations([1, 2, 3]))))
bvec_orient2 = [elm*[-1, 1, 1] for elm in bvec_orient1]
bvec_orient3 = [elm*[1, -1, 1] for elm in bvec_orient1]
bvec_orient4 = [elm*[1, 1, -1] for elm in bvec_orient1]

bvec_orient_list = np.concatenate((bvec_orient1, bvec_orient2, bvec_orient3, bvec_orient4))

if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(create_tracts, [(dwipath, outpath, subject, figspath, stepsize, function_processes,
                                                        orient_to_str(bvec_orient), ratio, masktype, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for bvec_orient in bvec_orient_list]).get()
    pool.close()
else:
    txtfile = os.path.join(outpath, subject + "_params.txt")
    for bvec_orient in bvec_orient_list:
        tract_results = []
        print(bvec_orient)
        str_identifier = orient_to_str(bvec_orient)
        str_identifier = strproperty + str_identifier
        tract_results.append(create_tracts(dwipath, outpath, subject, figspath, stepsize, function_processes,
        if not os.path.exists(new_bvec_file):
            shutil.copyfile(bvec_file, new_bvec_file)

max_processors = 20
if mp.cpu_count() < max_processors:
    max_processors = mp.cpu_count()

# accepted values are "small" for one in ten streamlines, "all or "large" for all streamlines,
# "none" or None variable for neither and "both" for both of them
nominal_bval = 4000
verbose = True
function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)
    results = pool.starmap_async(launch_preprocessing, [
        launch_preprocessing(
            proc_subjn + subject, max_file, outpath, cleanup, nominal_bval,
            SAMBA_inputs_folder, shortcuts_all_folder, gunniespath,
            function_processes, masking, ref, transpose, overwrite, denoise,
            recenter, recenter, verbose) for subject in subjects
    ]).get()
else:
    for subject in subjects:
        max_size = 0
        subjectpath = glob.glob(
            os.path.join(os.path.join(diffpath,
                                      "diffusion*" + subject + "*")))[0]
Example #9
0
    if os.path.exists(picklepath_connect) and os.path.exists(excel_path):
        print("The writing of pickle and excel of " + str(subject) +
              " is already done")
        donelist.append(subject)
    else:
        notdonelist.append(subject)

str_identifier = '_wholebrain_small_stepsize_2'
createmask = True

dwi_results = []
vol_b0 = [0, 1, 2, 3]

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    #dwi_results = pool.starmap_async(dwi_preprocessing, [(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
    #                                 createmask, vol_b0, verbose) for subject in l]).get()
    #tract_results = pool.starmap_async(create_tracts, [(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes,
    #                                                    str_identifier, ratio, classifiertype, labelslist, bvec_orient, doprune,
    #                                                    overwrite, get_params, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(
        tract_connectome_analysis_pickle,
        [(dwipath_preprocessed, outtrkpath, str_identifier, figspath, subject,
          atlas_legends, bvec_orient, brainmask, inclusive, function_processes,
          forcestart, picklesave, verbose) for subject in l]).get()
    pool.close()
else:
Example #10
0
if verbose:
    txt = (
        "Process running with % d max processes available on % d subjects with % d subjects in parallel each using % d processes"
        % (mp.cpu_count(), np.size(l), subject_processes, function_processes))
    print(txt)
    send_mail(txt, subject="Main process start msg ")

duration1 = time()
txtfile = "/Users/alex/bass/testdata/"

get_params = True
print(bvec_orient_list)

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(
        create_tracts,
        [(dwipath, outtrkpath, subject, figspath, stepsize, function_processes,
          str_identifier, ratio, brainmask, classifiertype, labelslist,
          bvec_orient, doprune, overwrite, get_params, denoise, verbose)
         for subject in l]).get()
    #    tract_results = pool.starmap_async(evaluate_tracts, [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
    #                                                         figspath, function_processes, doprune, display, verbose)
    #                                                        for subject in l]).get()
    pool.close()
else:
    for subject in l:
max_processors = 50
if mp.cpu_count() < max_processors:
    max_processors = mp.cpu_count()
subject_processes = np.size(subjects)
subject_processes = 1
if max_processors < subject_processes:
    subject_processes = max_processors
# accepted values are "small" for one in ten streamlines, "all or "large" for all streamlines,
# "none" or None variable for neither and "both" for both of them

function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    results = pool.starmap_async(launch_preprocessing, [
        (subject,
         largerfile(
             glob.glob(os.path.join(os.path.join(
                 dwipath, "*" + subject + "*")))[0]), outpath)
        for subject in subjects
    ]).get()
else:
    for subject in subjects:
        max_size = 0
        subjectpath = glob.glob(
            os.path.join(os.path.join(dwipath, "*" + subject + "*")))[0]