Exemple #1
0
    fig = plot_measures(
        group_struct, ncols=4, subject_measures=subj_struct,
        title="QC measures: " + fname.split("_")[0] + " " + args.subjectid,
        figsize=(8.27, 11.69), display_type="violin")
    fig.savefig(dist_snap, dpi=300)


"""
Movement quantity and spike detection
"""
snap_mvt, displacement_file, mvt_scores = time_serie_mq(
    args.func, args.transformations, "SPM", subjectdir, time_axis=-1,
    slice_axis=-2, mvt_thr=50, rot_thr=50, volumes_to_ignore=args.crop)
figures.append(snap_mvt)
# spike
snap_spike, spikes_file = spike_detector(args.func, subjectdir)
figures.append(snap_spike)


"""
Create a report
"""
report_snap = os.path.join(subjectdir, "report_" + args.subjectid + ".pdf")
concat_pdf(figures, out_file=report_snap)

scores.update(mvt_scores)
scores.update({"scanner": args.scan_id})

scores_json = os.path.join(subjectdir, "extra_scores.json")
with open(scores_json, "w") as open_file:
    json.dump(scores, open_file, indent=4)
Exemple #2
0
def run_worker(inputs_queue, outputs_queue, index):

    # get something from the pile
    while True:
        inputs = inputs_queue.get()
        # stop condition
        if inputs == FLAG_ALL_DONE:
            outputs_queue.put(FLAG_WORKER_FINISHED_PROCESSING)
            break

        subj_id = inputs[0]
        fmri_file = inputs[1]
        rp_file = inputs[2]
        root_output = inputs[3]

        # define working directory
        working_directory = os.path.join(root_output, subj_id, "outputs")
        if os.path.isdir(working_directory):
            shutil.rmtree(working_directory)
        os.makedirs(working_directory)

        try:

            # get data array
            fmri_file_data = nibabel.load(fmri_file).get_data()

            # step 1: get movement snap and parameters
            snap_mvt, displacement_file = time_serie_mq(fmri_file,
                                                        rp_file,
                                                        "SPM",
                                                        working_directory,
                                                        time_axis=-1,
                                                        slice_axis=-2,
                                                        mvt_thr=1.5,
                                                        rot_thr=0.5)

            # step 2: get efc score (entropy focus criterion)
            r_efc = efc(fmri_file_data)

            # step 3: get masks from afni
            mask_file = os.path.join(working_directory, "mask.nii")
            cmd = ["3dAutomask", "-prefix", mask_file, fmri_file]
            subprocess.check_call(cmd)
            mask_data = nibabel.load(mask_file).get_data()

            # step 4: get fber score (foreground to background energy ratio)
            r_fber = fber(fmri_file_data, mask_data)

            #step 5: get smoothness of voxels score
            r_fwhm = fwhm(fmri_file, mask_file)

            # step 6: detect outlier timepoints in each volume
            outliers = outlier_timepoints(fmri_file, mask_file)
            mean_outliers = numpy.mean(outliers)

            # step 7: ghost scores
            gsrs = ghost_all(fmri_file_data, mask_data)

            # step 8: quality timepoints
            qt = quality_timepoints(fmri_file, automask=True)
            mean_qt = numpy.mean(qt)

            # step 9: spike detection
            snap_spikes, spikes_file = spike_detector(fmri_file,
                                                      working_directory)

            with open(spikes_file) as _file:
                spikes_dict = json.load(_file)

            # final step: save scores in dict
            scores = {
                "efc": "{0}".format(r_efc),
                "fber": "{0}".format(r_fber),
                "fwhm": "{0}".format(r_fwhm),
                "outliers": "{0}".format(outliers),
                "mean_outliers": "{0}".format(mean_outliers),
                "x_gsr": "{0}".format(gsrs[0]),
                "y_gsr": "{0}".format(gsrs[1]),
                "quality": "{0}".format(qt),
                "mean_quality": "{0}".format(mean_qt)
            }

            scores.update(spikes_dict)

            scores_file = os.path.join(working_directory, "qa_scores.json")
            with open(scores_file, "w") as _file:
                json.dump(scores, _file, indent=4)

            outputs_queue.put("{0} - Success".format(subj_id))
        except:
            outputs_queue.put("{0} - FAIL:".format(subj_id))
            traceback.print_exc()
Exemple #3
0
                            ("corr_normative_measures.pdf", corr_struct)]:
    dist_snap = os.path.join(subjectdir, fname)
    figures.append(dist_snap)
    fig = plot_measures(group_struct,
                        ncols=4,
                        subject_measures=subj_struct,
                        title="QC measures: " + fname.split("_")[0] + " " +
                        args.subjectid,
                        figsize=(8.27, 11.69),
                        display_type="violin")
    fig.savefig(dist_snap, dpi=300)
"""
Movement quantity (add figures) and spike detection
"""
figures.append(snap_mvt)
# spike
snap_spike, spikes_file = spike_detector(args.func, subjectdir, zalph=2.5)
figures.append(snap_spike)
"""
Create a report
"""
report_snap = os.path.join(subjectdir, "report_" + args.subjectid + ".pdf")
concat_pdf(figures, out_file=report_snap)

scores.update(mvt_scores)
scores.update({"scanner": args.scan_id})

scores_json = os.path.join(subjectdir, "extra_scores.json")
with open(scores_json, "w") as open_file:
    json.dump(scores, open_file, indent=4)
def run_worker(inputs_queue, outputs_queue, index):

    # get something from the pile
    while True:
        inputs = inputs_queue.get()
        # stop condition
        if inputs == FLAG_ALL_DONE:
            outputs_queue.put(FLAG_WORKER_FINISHED_PROCESSING)
            break

        subj_id = inputs[0]
        fmri_file = inputs[1]
        rp_file = inputs[2]
        root_output = inputs[3]

        # define working directory
        working_directory = os.path.join(root_output,
                                         subj_id,
                                         "outputs")
        if os.path.isdir(working_directory):
                shutil.rmtree(working_directory)
        os.makedirs(working_directory)

        try:

            # get data array
            fmri_file_data = nibabel.load(fmri_file).get_data()

            # step 1: get movement snap and parameters
            snap_mvt, displacement_file = time_serie_mq(fmri_file,
                                                        rp_file,
                                                        "SPM",
                                                        working_directory,
                                                        time_axis=-1,
                                                        slice_axis=-2,
                                                        mvt_thr=1.5,
                                                        rot_thr=0.5)

            # step 2: get efc score (entropy focus criterion)
            r_efc = efc(fmri_file_data)

            # step 3: get masks from afni
            mask_file = os.path.join(working_directory, "mask.nii")
            cmd = ["3dAutomask", "-prefix", mask_file, fmri_file]
            subprocess.check_call(cmd)
            mask_data = nibabel.load(mask_file).get_data()

            # step 4: get fber score (foreground to background energy ratio)
            r_fber = fber(fmri_file_data, mask_data)

            #step 5: get smoothness of voxels score
            r_fwhm = fwhm(fmri_file, mask_file)

            # step 6: detect outlier timepoints in each volume
            outliers = outlier_timepoints(fmri_file, mask_file)
            mean_outliers = numpy.mean(outliers)

            # step 7: ghost scores
            gsrs = ghost_all(fmri_file_data, mask_data)

            # step 8: quality timepoints
            qt = quality_timepoints(fmri_file, automask=True)
            mean_qt = numpy.mean(qt)

            # step 9: spike detection
            snap_spikes, spikes_file = spike_detector(
                fmri_file, working_directory)

            with open(spikes_file) as _file:
                spikes_dict = json.load(_file)

            # final step: save scores in dict
            scores = {"efc": "{0}".format(r_efc),
                      "fber": "{0}".format(r_fber),
                      "fwhm": "{0}".format(r_fwhm),
                      "outliers": "{0}".format(outliers),
                      "mean_outliers": "{0}".format(mean_outliers),
                      "x_gsr": "{0}".format(gsrs[0]),
                      "y_gsr": "{0}".format(gsrs[1]),
                      "quality": "{0}".format(qt),
                      "mean_quality": "{0}".format(mean_qt)}

            scores.update(spikes_dict)

            scores_file = os.path.join(working_directory, "qa_scores.json")
            with open(scores_file, "w") as _file:
                json.dump(scores, _file, indent=4)

            outputs_queue.put("{0} - Success".format(subj_id))
        except:
            outputs_queue.put("{0} - FAIL:".format(subj_id))
            traceback.print_exc()