def _run_interface(self, runtime): # Get the mean EPI data and get it ready epinii = nb.load(self.inputs.in_epi) epidata = np.nan_to_num(epinii.get_data()) epidata = epidata.astype(np.float32) epidata[epidata < 0] = 0 # Get EPI data (with mc done) and get it ready hmcnii = nb.load(self.inputs.in_hmc) hmcdata = np.nan_to_num(hmcnii.get_data()) hmcdata = hmcdata.astype(np.float32) hmcdata[hmcdata < 0] = 0 # Get brain mask data msknii = nb.load(self.inputs.in_mask) mskdata = np.asanyarray(msknii.dataobj) > 0 mskdata = mskdata.astype(np.uint8) if np.sum(mskdata) < 100: raise RuntimeError( "Detected less than 100 voxels belonging to the brain mask. " "MRIQC failed to process this dataset.") # Summary stats stats = summary_stats(epidata, mskdata, erode=True) self._results["summary"] = stats # SNR self._results["snr"] = snr(stats["fg"]["median"], stats["fg"]["stdv"], stats["fg"]["n"]) # FBER self._results["fber"] = fber(epidata, mskdata) # EFC self._results["efc"] = efc(epidata) # GSR self._results["gsr"] = {} if self.inputs.direction == "all": epidir = ["x", "y"] else: epidir = [self.inputs.direction] for axis in epidir: self._results["gsr"][axis] = gsr(epidata, mskdata, direction=axis) # DVARS dvars_avg = np.loadtxt(self.inputs.in_dvars, skiprows=1, usecols=list(range(3))).mean(axis=0) dvars_col = ["std", "nstd", "vstd"] self._results["dvars"] = { key: float(val) for key, val in zip(dvars_col, dvars_avg) } # tSNR tsnr_data = nb.load(self.inputs.in_tsnr).get_data() self._results["tsnr"] = float(np.median(tsnr_data[mskdata > 0])) # FD fd_data = np.loadtxt(self.inputs.in_fd, skiprows=1) num_fd = np.float((fd_data > self.inputs.fd_thres).sum()) self._results["fd"] = { "mean": float(fd_data.mean()), "num": int(num_fd), "perc": float(num_fd * 100 / (len(fd_data) + 1)), } # FWHM fwhm = np.array(self.inputs.in_fwhm[:3]) / np.array( hmcnii.header.get_zooms()[:3]) self._results["fwhm"] = { "x": float(fwhm[0]), "y": float(fwhm[1]), "z": float(fwhm[2]), "avg": float(np.average(fwhm)), } # Image specs self._results["size"] = { "x": int(hmcdata.shape[0]), "y": int(hmcdata.shape[1]), "z": int(hmcdata.shape[2]), } self._results["spacing"] = { i: float(v) for i, v in zip(["x", "y", "z"], hmcnii.header.get_zooms()[:3]) } try: self._results["size"]["t"] = int(hmcdata.shape[3]) except IndexError: pass try: self._results["spacing"]["tr"] = float( hmcnii.header.get_zooms()[3]) except IndexError: pass self._results["out_qc"] = _flatten_dict(self._results) return runtime
def _run_interface(self, runtime): # pylint: disable=R0914 imnii = nb.load(self.inputs.in_file) imdata = np.nan_to_num(imnii.get_data()) erode = np.all( np.array(imnii.get_header().get_zooms()[:3], dtype=np.float32) < 1.2) # Cast to float32 imdata = imdata.astype(np.float32) # Remove negative values imdata[imdata < 0] = 0 # Load image corrected for INU inudata = np.nan_to_num(nb.load(self.inputs.in_noinu).get_data()) inudata[inudata < 0] = 0 segnii = nb.load(self.inputs.in_segm) segdata = segnii.get_data().astype(np.uint8) airdata = nb.load(self.inputs.air_msk).get_data().astype(np.uint8) artdata = nb.load(self.inputs.artifact_msk).get_data().astype(np.uint8) headdata = nb.load(self.inputs.head_msk).get_data().astype(np.uint8) # SNR snrvals = [] self._results['snr'] = {} for tlabel in ['csf', 'wm', 'gm']: snrvals.append(snr(inudata, segdata, fglabel=tlabel, erode=erode)) self._results['snr'][tlabel] = snrvals[-1] self._results['snr']['total'] = float(np.mean(snrvals)) snrvals = [] self._results['snrd'] = { tlabel: snr_dietrich(inudata, segdata, airdata, fglabel=tlabel, erode=erode) for tlabel in ['csf', 'wm', 'gm'] } self._results['snrd']['total'] = float( np.mean([val for _, val in list(self._results['snrd'].items())])) # CNR self._results['cnr'] = cnr(inudata, segdata) # FBER self._results['fber'] = fber(inudata, headdata) # EFC self._results['efc'] = efc(inudata) # M2WM self._results['wm2max'] = wm2max(imdata, segdata) # Artifacts self._results['qi_1'] = art_qi1(airdata, artdata) # CJV self._results['cjv'] = cjv(inudata, seg=segdata) pvmdata = [] for fname in self.inputs.in_pvms: pvmdata.append(nb.load(fname).get_data().astype(np.float32)) # FWHM fwhm = np.array(self.inputs.in_fwhm[:3]) / np.array( imnii.get_header().get_zooms()[:3]) self._results['fwhm'] = { 'x': float(fwhm[0]), 'y': float(fwhm[1]), 'z': float(fwhm[2]), 'avg': float(np.average(fwhm)) } # ICVs self._results['icvs'] = volume_fraction(pvmdata) # RPVE self._results['rpve'] = rpve(pvmdata, segdata) # Summary stats self._results['summary'] = summary_stats(imdata, pvmdata, airdata) # Image specs self._results['size'] = { 'x': int(imdata.shape[0]), 'y': int(imdata.shape[1]), 'z': int(imdata.shape[2]) } self._results['spacing'] = { i: float(v) for i, v in zip(['x', 'y', 'z'], imnii.get_header().get_zooms()[:3]) } try: self._results['size']['t'] = int(imdata.shape[3]) except IndexError: pass try: self._results['spacing']['tr'] = float( imnii.get_header().get_zooms()[3]) except IndexError: pass # Bias bias = nb.load(self.inputs.in_bias).get_data()[segdata > 0] self._results['inu'] = { 'range': float(np.abs(np.percentile(bias, 95.) - np.percentile(bias, 5.))), 'med': float(np.median(bias)) } #pylint: disable=E1101 mni_tpms = [nb.load(tpm).get_data() for tpm in self.inputs.mni_tpms] in_tpms = [nb.load(tpm).get_data() for tpm in self.inputs.in_pvms] overlap = fuzzy_jaccard(in_tpms, mni_tpms) self._results['tpm_overlap'] = { 'csf': overlap[0], 'gm': overlap[1], 'wm': overlap[2] } # Flatten the dictionary self._results['out_qc'] = _flatten_dict(self._results) return runtime
def _run_interface(self, runtime): # Get the mean EPI data and get it ready epinii = nb.load(self.inputs.in_epi) epidata = np.nan_to_num(epinii.get_data()) epidata = epidata.astype(np.float32) epidata[epidata < 0] = 0 # Get EPI data (with mc done) and get it ready hmcnii = nb.load(self.inputs.in_hmc) hmcdata = np.nan_to_num(hmcnii.get_data()) hmcdata = hmcdata.astype(np.float32) hmcdata[hmcdata < 0] = 0 # Get EPI data (with mc done) and get it ready msknii = nb.load(self.inputs.in_mask) mskdata = np.nan_to_num(msknii.get_data()) mskdata = mskdata.astype(np.uint8) mskdata[mskdata < 0] = 0 mskdata[mskdata > 0] = 1 # SNR self._results['snr'] = float(snr(epidata, mskdata, fglabel=1)) # FBER self._results['fber'] = fber(epidata, mskdata) # EFC self._results['efc'] = efc(epidata) # GSR self._results['gsr'] = {} if self.inputs.direction == 'all': epidir = ['x', 'y'] else: epidir = [self.inputs.direction] for axis in epidir: self._results['gsr'][axis] = gsr(epidata, mskdata, direction=axis) # Summary stats self._results['summary'] = summary_stats(epidata, mskdata) # DVARS dvars_avg = np.loadtxt(self.inputs.in_dvars, skiprows=1, usecols=list(range(3))).mean(axis=0) dvars_col = ['std', 'nstd', 'vstd'] self._results['dvars'] = { key: float(val) for key, val in zip(dvars_col, dvars_avg) } # tSNR tsnr_data = nb.load(self.inputs.in_tsnr).get_data() self._results['tsnr'] = float(np.median(tsnr_data[mskdata > 0])) # GCOR self._results['gcor'] = gcor(hmcdata, mskdata) # FD fd_data = np.loadtxt(self.inputs.in_fd, skiprows=1) num_fd = np.float((fd_data > self.inputs.fd_thres).sum()) self._results['fd'] = { 'mean': float(fd_data.mean()), 'num': int(num_fd), 'perc': float(num_fd * 100 / (len(fd_data) + 1)) } # FWHM fwhm = np.array(self.inputs.in_fwhm[:3]) / np.array( hmcnii.get_header().get_zooms()[:3]) self._results['fwhm'] = { 'x': float(fwhm[0]), 'y': float(fwhm[1]), 'z': float(fwhm[2]), 'avg': float(np.average(fwhm)) } # Image specs self._results['size'] = { 'x': int(hmcdata.shape[0]), 'y': int(hmcdata.shape[1]), 'z': int(hmcdata.shape[2]) } self._results['spacing'] = { i: float(v) for i, v in zip(['x', 'y', 'z'], hmcnii.get_header().get_zooms()[:3]) } try: self._results['size']['t'] = int(hmcdata.shape[3]) except IndexError: pass try: self._results['spacing']['tr'] = float( hmcnii.get_header().get_zooms()[3]) except IndexError: pass self._results['out_qc'] = _flatten_dict(self._results) return runtime
def _run_interface(self, runtime): # pylint: disable=R0914,E1101 imnii = nb.load(self.inputs.in_noinu) erode = np.all( np.array(imnii.header.get_zooms()[:3], dtype=np.float32) < 1.9) # Load image corrected for INU inudata = np.nan_to_num(imnii.get_data()) inudata[inudata < 0] = 0 # Load binary segmentation from FSL FAST segnii = nb.load(self.inputs.in_segm) segdata = segnii.get_data().astype(np.uint8) # Load air, artifacts and head masks airdata = nb.load(self.inputs.air_msk).get_data().astype(np.uint8) artdata = nb.load(self.inputs.artifact_msk).get_data().astype(np.uint8) headdata = nb.load(self.inputs.head_msk).get_data().astype(np.uint8) rotdata = nb.load(self.inputs.rot_msk).get_data().astype(np.uint8) # Load Partial Volume Maps (pvms) from FSL FAST pvmdata = [] for fname in self.inputs.in_pvms: pvmdata.append(nb.load(fname).get_data().astype(np.float32)) # Summary stats stats = summary_stats(inudata, pvmdata, airdata, erode=erode) self._results["summary"] = stats # SNR snrvals = [] self._results["snr"] = {} for tlabel in ["csf", "wm", "gm"]: snrvals.append( snr( stats[tlabel]["median"], stats[tlabel]["stdv"], stats[tlabel]["n"], )) self._results["snr"][tlabel] = snrvals[-1] self._results["snr"]["total"] = float(np.mean(snrvals)) snrvals = [] self._results["snrd"] = { tlabel: snr_dietrich(stats[tlabel]["median"], stats["bg"]["mad"]) for tlabel in ["csf", "wm", "gm"] } self._results["snrd"]["total"] = float( np.mean([val for _, val in list(self._results["snrd"].items())])) # CNR self._results["cnr"] = cnr( stats["wm"]["median"], stats["gm"]["median"], sqrt(sum(stats[k]["stdv"]**2 for k in ["bg", "gm", "wm"])), ) # FBER self._results["fber"] = fber(inudata, headdata, rotdata) # EFC self._results["efc"] = efc(inudata, rotdata) # M2WM self._results["wm2max"] = wm2max(inudata, stats["wm"]["median"]) # Artifacts self._results["qi_1"] = art_qi1(airdata, artdata) # CJV self._results["cjv"] = cjv( # mu_wm, mu_gm, sigma_wm, sigma_gm stats["wm"]["median"], stats["gm"]["median"], stats["wm"]["mad"], stats["gm"]["mad"], ) # FWHM fwhm = np.array(self.inputs.in_fwhm[:3]) / np.array( imnii.header.get_zooms()[:3]) self._results["fwhm"] = { "x": float(fwhm[0]), "y": float(fwhm[1]), "z": float(fwhm[2]), "avg": float(np.average(fwhm)), } # ICVs self._results["icvs"] = volume_fraction(pvmdata) # RPVE self._results["rpve"] = rpve(pvmdata, segdata) # Image specs self._results["size"] = { "x": int(inudata.shape[0]), "y": int(inudata.shape[1]), "z": int(inudata.shape[2]), } self._results["spacing"] = { i: float(v) for i, v in zip(["x", "y", "z"], imnii.header.get_zooms()[:3]) } try: self._results["size"]["t"] = int(inudata.shape[3]) except IndexError: pass try: self._results["spacing"]["tr"] = float(imnii.header.get_zooms()[3]) except IndexError: pass # Bias bias = nb.load(self.inputs.in_bias).get_data()[segdata > 0] self._results["inu"] = { "range": float(np.abs(np.percentile(bias, 95.0) - np.percentile(bias, 5.0))), "med": float(np.median(bias)), } # pylint: disable=E1101 mni_tpms = [nb.load(tpm).get_data() for tpm in self.inputs.mni_tpms] in_tpms = [nb.load(tpm).get_data() for tpm in self.inputs.in_pvms] overlap = fuzzy_jaccard(in_tpms, mni_tpms) self._results["tpm_overlap"] = { "csf": overlap[0], "gm": overlap[1], "wm": overlap[2], } # Flatten the dictionary self._results["out_qc"] = _flatten_dict(self._results) return runtime