示例#1
0
    send_mail(txt, subject="Main process start msg ")

duration1 = time()

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

#    tract_results = pool.starmap_async(create_tracts, [(dwipath, outtrkpath, subject, stepsize, function_processes, strproperty,
#                                                            saved_streamlines, savefa, labelslist, bvec_orient, verbose) for subject in
#                                                           l]).get()
    tract_results = pool.starmap_async(
        evaluate_tracts,
        [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
          labelslist, outpathpickle, figspath, function_processes, allsave,
          display, strproperty, ratio, verbose) for subject in l]).get()
    pool.close()
else:
    for subject in l:
        #        tract_results.append(create_tracts(dwipath, outtrkpath, subject, stepsize, function_processes, strproperty,
        #                                         saved_streamlines, savefa, labelslist, bvec_orient, verbose))
        tract_results.append(
            evaluate_tracts(dwipath, outtrkpath, subject, stepsize,
                            saved_streamlines, labelslist, outpathpickle,
                            figspath, function_processes, allsave, display,
                            strproperty, ratio, verbose))

#dwip_results = pool.starmap_async(dwi_preprocessing[(dwipath,outpath,subject,denoise,savefa,function_processes, verbose) for subject in l]).get()
示例#2
0
createmask = True

dwi_results = []
vol_b0 = [0,1,2,3]

if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    #dwi_results = pool.starmap_async(diff_preprocessing, [(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
    #                                 createmask, vol_b0, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(create_tracts, [(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes,
                                                        str_identifier, ratio, classifiertype, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(tract_connectome_analysis, [(dwipath_preprocessed, outtrkpath, str_identifier, figspath,
                                                                   subject, atlas_legends, bvec_orient, brainmask,
                                                                    inclusive,function_processes, forcestart,
                                                                    picklesave, verbose) for subject in l]).get()
    pool.close()
else:
    for subject in l:
       #dwi_results.append(diff_preprocessing(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa,
       #                                  function_processes, createmask, vol_b0, verbose))
       tract_results.append(create_tracts(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes, str_identifier,
                                              ratio, classifiertype, labelslist, bvec_orient, doprune, overwrite, get_params,
                                           verbose))
       tract_results.append(tract_connectome_analysis(dwipath, outtrkpath, str_identifier, figspath, subject,
                                                     atlas_legends, bvec_orient, brainmask, inclusive, function_processes,
            notdonelist.append(subject)

dwi_results = []
tract_results = []

print(f'Overwrite is {overwrite}')

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)
    if make_tracts:
        tract_results = pool.starmap_async(
            create_tracts,
            [(diff_preprocessed, trkpath, subject, figspath, stepsize,
              function_processes, str_identifier, ratio, brainmask, classifier,
              labelslist, bvec_orient, doprune, overwrite, get_params, denoise,
              verbose) for subject in subjects]).get()
    if make_connectomes:
        tract_results = pool.starmap_async(
            tract_connectome_analysis,
            [(diff_preprocessed, trkpath, str_identifier, figspath, subject,
              atlas_legends, bvec_orient, brainmask, inclusive,
              function_processes, overwrite, picklesave, labeltype, symmetric,
              reference_weighting, volume_weighting, verbose)
             for subject in subjects]).get()
    pool.close()
else:
    for subject in subjects:
        if make_tracts:
            tract_results.append(
示例#4
0
duration1 = time()
txtfile = "/Users/alex/bass/testdata/"

get_params = True
print(bvec_orient_list)

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(
        create_tracts, [(dwipath, outtrkpath, subject, stepsize,
                         function_processes, strproperty, ratio, savefa,
                         labelslist, bvec_orient, get_params, verbose)
                        for subject in l]).get()
    #    tract_results = pool.starmap_async(evaluate_tracts, [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
    #                                                         figspath, function_processes, doprune, display, verbose)
    #                                                        for subject in l]).get()
    pool.close()
else:
    for subject in l:
        txtfile = dwipath + subject + "/params.txt"
        for bvec_orient in bvec_orient_list:
            tract_results = []
            print(bvec_orient)
            strproperty = orient_to_str(bvec_orient)
            tract_results.append(
                create_tracts(dwipath, outtrkpath, subject, figspath, stepsize,
masking = 'bet'
denoise = 'mpca'
verbose = True
function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    results = pool.starmap_async(
        launch_preprocessing,
        [(proc_subjn + subject,
          largerfile(
              glob.glob(
                  os.path.join(os.path.join(
                      diffpath, "*" + subject + "*")))[0]), outpath, cleanup,
          nominal_bval, SAMBA_inputs_folder, shortcuts_all_folder, gunniespath,
          function_processes, masking, ref, transpose, overwrite, denoise,
          recenter, verbose) for subject in subjects]).get()
else:
    for subject in subjects:
        max_size = 0
        subjectpath = glob.glob(
            os.path.join(os.path.join(outpath,
                                      "diffusion*" + subject + "*")))[0]
        print(subjectpath)
        max_file = largerfile(subjectpath)
        max_file = os.path.join(subjectpath,
                                "nii4D_" + proc_subjn + subject + ".nii.gz")
        print(max_file)
            print("The writing of pickle and excel of " + str(subject) + " is already done")
            donelist.append(subject)
        else:
            notdonelist.append(subject)

dwi_results = []
tract_results = []


if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    dwi_results = pool.starmap_async(dwi_preprocessing, [(datapath, dwi_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
                                     createmask, vol_b0, verbose) for subject in subjects]).get()
    tract_results = pool.starmap_async(create_tracts, [(dwi_preprocessed, trkpath, subject, figspath, stepsize, function_processes,
                                                        str_identifier, ratio, masktype, classifier, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for subject in subjects]).get()
    if make_connectomes:
        tract_results = pool.starmap_async(tract_connectome_analysis, [(dwi_preprocessed, trkpath, str_identifier, figspath,
                                                                       subject, atlas_legends, bvec_orient, inclusive,
                                                                       function_processes, forcestart, picklesave, verbose)
                                                                     for subject in subjects]).get()
    pool.close()
else:
    for subject in subjects:
        dwi_results.append(dwi_preprocessing(datapath, dwi_preprocessed, subject, bvec_orient, denoise, savefa,
                                             function_processes, createmask, vol_b0, verbose))
        tract_results.append(
            create_tracts(dwi_preprocessed, trkpath, subject, figspath, stepsize, function_processes, str_identifier,
import itertools
bvec_orient1 = (np.array(list(itertools.permutations([1, 2, 3]))))
bvec_orient2 = [elm*[-1, 1, 1] for elm in bvec_orient1]
bvec_orient3 = [elm*[1, -1, 1] for elm in bvec_orient1]
bvec_orient4 = [elm*[1, 1, -1] for elm in bvec_orient1]

bvec_orient_list = np.concatenate((bvec_orient1, bvec_orient2, bvec_orient3, bvec_orient4))

if subject_processes>1:
    if function_processes>1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(create_tracts, [(dwipath, outpath, subject, figspath, stepsize, function_processes,
                                                        orient_to_str(bvec_orient), ratio, masktype, labelslist, bvec_orient, doprune,
                                                        overwrite, get_params, verbose) for bvec_orient in bvec_orient_list]).get()
    pool.close()
else:
    txtfile = os.path.join(outpath, subject + "_params.txt")
    for bvec_orient in bvec_orient_list:
        tract_results = []
        print(bvec_orient)
        str_identifier = orient_to_str(bvec_orient)
        str_identifier = strproperty + str_identifier
        tract_results.append(create_tracts(dwipath, outpath, subject, figspath, stepsize, function_processes,
                          str_identifier, ratio, masktype, 'FA', labelslist, bvec_orient, doprune, overwrite, get_params,
                          verbose))
        print(tract_results)

    """
# accepted values are "small" for one in ten streamlines, "all or "large" for all streamlines,
# "none" or None variable for neither and "both" for both of them
nominal_bval = 4000
verbose = True
function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)
    results = pool.starmap_async(launch_preprocessing, [
        launch_preprocessing(
            proc_subjn + subject, max_file, outpath, cleanup, nominal_bval,
            SAMBA_inputs_folder, shortcuts_all_folder, gunniespath,
            function_processes, masking, ref, transpose, overwrite, denoise,
            recenter, recenter, verbose) for subject in subjects
    ]).get()
else:
    for subject in subjects:
        max_size = 0
        subjectpath = glob.glob(
            os.path.join(os.path.join(diffpath,
                                      "diffusion*" + subject + "*")))[0]
        subject_outpath = os.path.join(
            outpath, 'diffusion_prep_' + proc_subjn + subject)
        max_file = largerfile(subjectpath)
        if os.path.exists(
                os.path.join(
                    subject_outpath,
示例#9
0
vol_b0 = [0, 1, 2, 3]

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    #dwi_results = pool.starmap_async(dwi_preprocessing, [(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa, function_processes,
    #                                 createmask, vol_b0, verbose) for subject in l]).get()
    #tract_results = pool.starmap_async(create_tracts, [(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes,
    #                                                    str_identifier, ratio, classifiertype, labelslist, bvec_orient, doprune,
    #                                                    overwrite, get_params, verbose) for subject in l]).get()
    tract_results = pool.starmap_async(
        tract_connectome_analysis_pickle,
        [(dwipath_preprocessed, outtrkpath, str_identifier, figspath, subject,
          atlas_legends, bvec_orient, brainmask, inclusive, function_processes,
          forcestart, picklesave, verbose) for subject in l]).get()
    pool.close()
else:
    for subject in l:
        #dwi_results.append(dwi_preprocessing(dwipath, dwipath_preprocessed, subject, bvec_orient, denoise, savefa,
        #                                  function_processes, createmask, vol_b0, verbose))
        #tract_results.append(create_tracts(dwipath_preprocessed, outtrkpath, subject, figspath, stepsize, function_processes, str_identifier,
        #                                       ratio, classifiertype, labelslist, bvec_orient, doprune, overwrite, get_params,
        #                                    verbose))
        tract_results.append(
            tract_connectome_analysis_pickle(dwipath, outtrkpath,
                                             str_identifier, figspath, subject,
                                             atlas_legends, bvec_orient,
                                             brainmask, inclusive,
示例#10
0
duration1 = time()
txtfile = "/Users/alex/bass/testdata/"

get_params = True
print(bvec_orient_list)

if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    tract_results = pool.starmap_async(
        create_tracts,
        [(dwipath, outtrkpath, subject, figspath, stepsize, function_processes,
          str_identifier, ratio, brainmask, classifiertype, labelslist,
          bvec_orient, doprune, overwrite, get_params, denoise, verbose)
         for subject in l]).get()
    #    tract_results = pool.starmap_async(evaluate_tracts, [(dwipath, outtrkpath, subject, stepsize, saved_streamlines,
    #                                                         figspath, function_processes, doprune, display, verbose)
    #                                                        for subject in l]).get()
    pool.close()
else:
    for subject in l:
        txtfile = os.path.join(txtpath, subject + "_" + "params.txt")
        with open(txtfile, 'a') as fi:
            fi.write("Parameters for subject %s \n" % subject)
        for bvec_orient in bvec_orient_list:
            tract_results = []
            print(bvec_orient)
            strproperty = orient_to_str(bvec_orient)
    subject_processes = max_processors
# accepted values are "small" for one in ten streamlines, "all or "large" for all streamlines,
# "none" or None variable for neither and "both" for both of them

function_processes = np.int(max_processors / subject_processes)
results = []
if subject_processes > 1:
    if function_processes > 1:
        pool = MyPool(subject_processes)
    else:
        pool = mp.Pool(subject_processes)

    results = pool.starmap_async(launch_preprocessing, [
        (subject,
         largerfile(
             glob.glob(os.path.join(os.path.join(
                 dwipath, "*" + subject + "*")))[0]), outpath)
        for subject in subjects
    ]).get()
else:
    for subject in subjects:
        max_size = 0
        subjectpath = glob.glob(
            os.path.join(os.path.join(dwipath, "*" + subject + "*")))[0]
        max_file = largerfile(subjectpath)
        #command = gunniespath + "mouse_diffusion_preprocessing.bash"+ f" {subject} {max_file} {outpath}"
        launch_preprocessing(subject,
                             max_file,
                             outpath,
                             nominal_bval=800,
                             shortcutpath=shortcutpath,