def niftidecomp_workflow( decompaxis, datafile, outputroot, datamaskname=None, decomptype="pca", pcacomponents=0.5, icacomponents=None, varnorm=True, demean=True, sigma=0.0, ): print(f"Will perform {decomptype} analysis along the {decompaxis} axis") if decompaxis == "temporal": decompaxisnum = 1 transposeifspatial = lambda *a, **k: None else: decompaxisnum = 0 transposeifspatial = np.transpose # save the command line tide_io.writevec([" ".join(sys.argv)], outputroot + "_commandline.txt") # read in data print("reading in data arrays") ( datafile_img, datafile_data, datafile_hdr, datafiledims, datafilesizes, ) = tide_io.readfromnifti(datafile) if datamaskname is not None: ( datamask_img, datamask_data, datamask_hdr, datamaskdims, datamasksizes, ) = tide_io.readfromnifti(datamaskname) xsize, ysize, numslices, timepoints = tide_io.parseniftidims(datafiledims) xdim, ydim, slicethickness, tr = tide_io.parseniftisizes(datafilesizes) # check dimensions if datamaskname is not None: print("checking mask dimensions") if not tide_io.checkspacedimmatch(datafiledims, datamaskdims): print("input mask spatial dimensions do not match image") exit() if not (tide_io.checktimematch(datafiledims, datamaskdims) or datamaskdims[4] == 1): print("input mask time dimension does not match image") exit() # save the command line tide_io.writevec([" ".join(sys.argv)], outputroot + "_commandline.txt") # smooth the data if sigma > 0.0: print("smoothing data") for i in range(timepoints): datafile_data[:, :, :, i] = tide_filt.ssmooth(xdim, ydim, slicethickness, sigma, datafile_data[:, :, :, i]) # allocating arrays print("reshaping arrays") numspatiallocs = int(xsize) * int(ysize) * int(numslices) rs_datafile = datafile_data.reshape((numspatiallocs, timepoints)) print("masking arrays") if datamaskname is not None: if datamaskdims[4] == 1: proclocs = np.where(datamask_data.reshape(numspatiallocs) > 0.5) else: proclocs = np.where( np.mean(datamask_data.reshape((numspatiallocs, timepoints)), axis=1) > 0.5) rs_mask = datamask_data.reshape( (numspatiallocs, timepoints))[proclocs, :] rs_mask = np.where(rs_mask > 0.5, 1.0, 0.0)[0] else: datamaskdims = [1, xsize, ysize, numslices, 1] themaxes = np.max(rs_datafile, axis=1) themins = np.min(rs_datafile, axis=1) thediffs = (themaxes - themins).reshape(numspatiallocs) proclocs = np.where(thediffs > 0.0) procdata = rs_datafile[proclocs, :][0] print(rs_datafile.shape, procdata.shape) # normalize the individual images if demean: print("demeaning array") themean = np.mean(procdata, axis=decompaxisnum) print("shape of mean", themean.shape) for i in range(procdata.shape[1 - decompaxisnum]): if decompaxisnum == 1: procdata[i, :] -= themean[i] else: procdata[:, i] -= themean[i] else: themean = np.ones(procdata.shape[1 - decompaxisnum]) if varnorm: print("variance normalizing array") thevar = np.var(procdata, axis=decompaxisnum) print("shape of var", thevar.shape) for i in range(procdata.shape[1 - decompaxisnum]): if decompaxisnum == 1: procdata[i, :] /= thevar[i] else: procdata[:, i] /= thevar[i] procdata = np.nan_to_num(procdata) else: thevar = np.ones(procdata.shape[1 - decompaxisnum]) # applying mask if datamaskdims[4] > 1: procdata *= rs_mask # now perform the decomposition if decomptype == "ica": print("performing ica decomposition") if icacomponents is None: print("will return all significant components") else: print("will return", icacomponents, "components") thefit = FastICA(n_components=icacomponents).fit( transposeifspatial(procdata)) # Reconstruct signals if icacomponents is None: thecomponents = transposeifspatial(thefit.components_[:]) print(thecomponents.shape[1], "components found") else: thecomponents = transposeifspatial( thefit.components_[0:icacomponents]) print("returning first", thecomponents.shape[1], "components found") else: print("performing pca decomposition") if pcacomponents < 1.0: print( "will return the components accounting for", pcacomponents * 100.0, "% of the variance", ) else: print("will return", pcacomponents, "components") if decomptype == "pca": thepca = PCA(n_components=pcacomponents) else: thepca = SparsePCA(n_components=pcacomponents) thefit = thepca.fit(transposeifspatial(procdata)) thetransform = thepca.transform(transposeifspatial(procdata)) theinvtrans = transposeifspatial( thepca.inverse_transform(thetransform)) if pcacomponents < 1.0: thecomponents = transposeifspatial(thefit.components_[:]) print("returning", thecomponents.shape[1], "components") else: thecomponents = transposeifspatial( thefit.components_[0:pcacomponents]) # save the eigenvalues print("variance explained by component:", 100.0 * thefit.explained_variance_ratio_) tide_io.writenpvecs( 100.0 * thefit.explained_variance_ratio_, outputroot + "_explained_variance_pct.txt", ) if decompaxis == "temporal": # save the components print("writing component timecourses") tide_io.writenpvecs(thecomponents, outputroot + "_components.txt") # save the singular values print("writing singular values") tide_io.writenpvecs(np.transpose(thesingvals), outputroot + "_singvals.txt") # save the coefficients print("writing out the coefficients") coefficients = thetransform print("coefficients shape:", coefficients.shape) theheader = datafile_hdr theheader["dim"][4] = coefficients.shape[1] tempout = np.zeros((numspatiallocs, coefficients.shape[1]), dtype="float") tempout[proclocs, :] = coefficients[:, :] tide_io.savetonifti( tempout.reshape( (xsize, ysize, numslices, coefficients.shape[1])), datafile_hdr, outputroot + "_coefficients", ) # unnormalize the dimensionality reduced data for i in range(numspatiallocs): theinvtrans[i, :] = thevar[i] * theinvtrans[i, :] + themean[i] else: # save the component images print("writing component images") theheader = datafile_hdr theheader["dim"][4] = thecomponents.shape[1] tempout = np.zeros((numspatiallocs, thecomponents.shape[1]), dtype="float") tempout[proclocs, :] = thecomponents[:, :] tide_io.savetonifti( tempout.reshape( (xsize, ysize, numslices, thecomponents.shape[1])), datafile_hdr, outputroot + "_components", ) # save the coefficients print("writing out the coefficients") coefficients = np.transpose(thetransform) tide_io.writenpvecs(coefficients, outputroot + "_coefficients.txt") # unnormalize the dimensionality reduced data for i in range(timepoints): theinvtrans[:, i] = thevar[i] * theinvtrans[:, i] + themean[i] print("writing fit data") theheader = datafile_hdr theheader["dim"][4] = theinvtrans.shape[1] tempout = np.zeros((numspatiallocs, theinvtrans.shape[1]), dtype="float") tempout[proclocs, :] = theinvtrans[:, :] tide_io.savetonifti( tempout.reshape((xsize, ysize, numslices, theinvtrans.shape[1])), datafile_hdr, outputroot + "_fit", )
def glmfilt(inputfile, numskip, outputroot, evfilename): # initialize some variables evdata = [] evisnifti = [] thedims_in = [] thedims_ev = [] thesizes_ev = [] # read the datafile and the evfiles nim_input, nim_data, nim_header, thedims_in, thesizes_in = tide_io.readfromnifti(inputfile) xdim, ydim, slicedim, tr = tide_io.parseniftisizes(thesizes_in) print(xdim, ydim, slicedim, tr) xsize, ysize, numslices, timepoints = tide_io.parseniftidims(thedims_in) print(xsize, ysize, numslices, timepoints) numregressors = 0 for i in range(0, len(evfilename)): print("file ", i, " has name ", evfilename[i]) # check to see if file is nifti or text fileisnifti = tide_io.checkifnifti(evfilename[i]) fileisparfile = tide_io.checkifparfile(evfilename[i]) if fileisnifti: # if file is nifti print("reading voxel specific regressor from ", evfilename[i]) ( nim_evinput, ev_data, ev_header, thedims_evinput, thesizes_evinput, ) = tide_io.readfromnifti(evfilename[i]) evisnifti.append(True) evdata.append(1.0 * ev_data) thedims_ev.append(thedims_evinput) thesizes_ev.append(thesizes_evinput) numregressors += 1 elif fileisparfile: # check to see if file a par file print("reading 6 global regressors from an FSL parfile") evtimeseries = tide_io.readvecs(evfilename[i]) print("timeseries length = ", len(evtimeseries[0, :])) for j in range(0, 6): evisnifti.append(False) evdata.append(1.0 * evtimeseries[j, :]) thedims_evinput = 1.0 * thedims_in thesizes_evinput = 1.0 * thesizes_in thedims_ev.append(thedims_evinput) thesizes_ev.append(thesizes_evinput) numregressors += 1 else: # if file is text print("reading global regressor from ", evfilename[i]) evtimeseries = tide_io.readvec(evfilename[i]) print("timeseries length = ", len(evtimeseries)) evisnifti.append(False) evdata.append(1.0 * evtimeseries) thedims_evinput = 1.0 * thedims_in thesizes_evinput = 1.0 * thesizes_in thedims_ev.append(thedims_evinput) thesizes_ev.append(thesizes_evinput) numregressors += 1 for j in range(0, numregressors): for i in range(0, 4): if thedims_in[i] != thedims_ev[j][i]: print("Input file and ev file ", j, " dimensions do not match") print("dimension ", i, ":", thedims_in[i], " != ", thedims_ev[j][i]) exit() if timepoints - numskip != thedims_ev[j][4]: print("Input file and ev file ", j, " dimensions do not match") print("dimension ", 4, ":", timepoints, "!= ", thedims_ev[j][4], "+", numskip) exit() print("will perform GLM with ", numregressors, " regressors") meandata = np.zeros((xsize, ysize, numslices), dtype="float") fitdata = np.zeros((xsize, ysize, numslices, numregressors), dtype="float") Rdata = np.zeros((xsize, ysize, numslices), dtype="float") trimmeddata = 1.0 * nim_data[:, :, :, numskip:] for z in range(0, numslices): print("processing slice ", z) for y in range(0, ysize): for x in range(0, xsize): regressorvec = [] for j in range(0, numregressors): if evisnifti[j]: regressorvec.append(evdata[j][x, y, z, :]) else: regressorvec.append(evdata[j]) if np.max(trimmeddata[x, y, z, :]) - np.min(trimmeddata[x, y, z, :]) > 0.0: thefit, R = tide_fit.mlregress(regressorvec, trimmeddata[x, y, z, :]) meandata[x, y, z] = thefit[0, 0] Rdata[x, y, z] = R for j in range(0, numregressors): fitdata[x, y, z, j] = thefit[0, j + 1] # datatoremove[x, y, z, :, j] = thefit[0, j + 1] * regressorvec[j] else: meandata[x, y, z] = 0.0 Rdata[x, y, z] = 0.0 for j in range(0, numregressors): fitdata[x, y, z, j] = 0.0 # datatoremove[x, y, z, :, j] = 0.0 * regressorvec[j] # totaltoremove[x, y, z, :] = np.sum(datatoremove[x, y, z, :, :], axis=1) # filtereddata[x, y, z, :] = trimmeddata[x, y, z, :] - totaltoremove[x, y, z, :] # first save the things with a small numbers of timepoints print("fitting complete: about to save the fit data") theheader = nim_header theheader["dim"][4] = 1 tide_io.savetonifti(meandata, theheader, outputroot + "_mean") for j in range(0, numregressors): tide_io.savetonifti(fitdata[:, :, :, j], theheader, outputroot + "_fit" + str(j)) tide_io.savetonifti(Rdata, theheader, outputroot + "_R") Rdata = None print() print("Now constructing the array of data to remove") # datatoremove = np.zeros((xsize, ysize, numslices, timepoints - numskip, numregressors), dtype='float') totaltoremove = np.zeros((xsize, ysize, numslices, timepoints - numskip), dtype="float") # filtereddata = 1.0 * totaltoremove for z in range(0, numslices): print("processing slice ", z) for y in range(0, ysize): for x in range(0, xsize): if np.max(trimmeddata[x, y, z, :]) - np.min(trimmeddata[x, y, z, :]) > 0.0: for j in range(0, numregressors): totaltoremove[x, y, z, :] += fitdata[x, y, z, j] * regressorvec[j] else: totaltoremove[x, y, z, :] = 0.0 print("Array construction done. Saving files") # now save the things with full timecourses theheader = nim_header theheader["dim"][4] = timepoints - numskip tide_io.savetonifti(totaltoremove, theheader, outputroot + "_totaltoremove") filtereddata = trimmeddata - totaltoremove totaltoremove = None tide_io.savetonifti(trimmeddata, theheader, outputroot + "_trimmed") trimmeddata = None tide_io.savetonifti(filtereddata, theheader, outputroot + "_filtered")
def roisummarize(args): # grab the command line arguments then pass them off. try: args = _get_parser().parse_args() except SystemExit: _get_parser().print_help() raise # set the sample rate if args.samplerate == "auto": args.samplerate = 1.0 else: samplerate = args.samplerate args, thefilter = pf.postprocessfilteropts(args, debug=args.debug) print("loading fmri data") input_img, input_data, input_hdr, thedims, thesizes = tide_io.readfromnifti( args.inputfilename) print("loading template data") template_img, template_data, template_hdr, templatedims, templatesizes = tide_io.readfromnifti( args.templatefile) print("checking dimensions") if not tide_io.checkspacematch(input_hdr, template_hdr): print( "template file does not match spatial coverage of input fmri file") sys.exit() print("reshaping") xsize = thedims[1] ysize = thedims[2] numslices = thedims[3] numtimepoints = thedims[4] numvoxels = int(xsize) * int(ysize) * int(numslices) templatevoxels = np.reshape(template_data, numvoxels).astype(int) if numtimepoints > 1: inputvoxels = np.reshape(input_data, (numvoxels, numtimepoints))[:, args.numskip:] print("filtering") for thevoxel in range(numvoxels): if templatevoxels[thevoxel] > 0: inputvoxels[thevoxel, :] = thefilter.apply( args.samplerate, inputvoxels[thevoxel, :]) print("summarizing") timecourses = summarize4Dbylabel(inputvoxels, templatevoxels, normmethod=args.normmethod, debug=args.debug) print("writing data") tide_io.writenpvecs(timecourses, args.outputfile + "_timecourses") else: inputvoxels = np.reshape(input_data, (numvoxels)) numregions = np.max(templatevoxels) template_hdr["dim"][4] = numregions outputvoxels, regionstats = summarize3Dbylabel(inputvoxels, templatevoxels, debug=args.debug) tide_io.savetonifti( outputvoxels.reshape((xsize, ysize, numslices)), template_hdr, args.outputfile + "_meanvals", ) tide_io.writenpvecs(np.array(regionstats), args.outputfile + "_regionstats.txt")
def polyfitim( datafile, datamask, templatefile, templatemask, outputroot, regionatlas=None, order=1, ): # read in data print("reading in data arrays") ( datafile_img, datafile_data, datafile_hdr, datafiledims, datafilesizes, ) = tide_io.readfromnifti(datafile) ( datamask_img, datamask_data, datamask_hdr, datamaskdims, datamasksizes, ) = tide_io.readfromnifti(datamask) ( templatefile_img, templatefile_data, templatefile_hdr, templatefiledims, templatefilesizes, ) = tide_io.readfromnifti(templatefile) ( templatemask_img, templatemask_data, templatemask_hdr, templatemaskdims, templatemasksizes, ) = tide_io.readfromnifti(templatemask) if regionatlas is not None: ( regionatlas_img, regionatlas_data, regionatlas_hdr, regionatlasdims, regionatlassizes, ) = tide_io.readfromnifti(regionatlas) xsize = datafiledims[1] ysize = datafiledims[2] numslices = datafiledims[3] timepoints = datafiledims[4] # check dimensions print("checking dimensions") if not tide_io.checkspacedimmatch(datafiledims, datamaskdims): print("input mask spatial dimensions do not match image") exit() if datamaskdims[4] == 1: print("using 3d data mask") datamask3d = True else: datamask3d = False if not tide_io.checktimematch(datafiledims, datamaskdims): print("input mask time dimension does not match image") exit() if not tide_io.checkspacedimmatch(datafiledims, templatefiledims): print(templatefiledims, "template file spatial dimensions do not match image") exit() if not templatefiledims[4] == 1: print("template file time dimension is not equal to 1") exit() if not tide_io.checkspacedimmatch(datafiledims, templatemaskdims): print("template mask spatial dimensions do not match image") exit() if not templatemaskdims[4] == 1: print("template mask time dimension is not equal to 1") exit() if regionatlas is not None: if not tide_io.checkspacedimmatch(datafiledims, regionatlasdims): print("template mask spatial dimensions do not match image") exit() if not regionatlasdims[4] == 1: print("regionatlas time dimension is not equal to 1") exit() # allocating arrays print("allocating arrays") numspatiallocs = int(xsize) * int(ysize) * int(numslices) rs_datafile = datafile_data.reshape((numspatiallocs, timepoints)) if datamask3d: rs_datamask = datamask_data.reshape(numspatiallocs) else: rs_datamask = datamask_data.reshape((numspatiallocs, timepoints)) rs_datamask_bin = np.where(rs_datamask > 0.9, 1.0, 0.0) rs_templatefile = templatefile_data.reshape(numspatiallocs) rs_templatemask = templatemask_data.reshape(numspatiallocs) rs_templatemask_bin = np.where(rs_templatemask > 0.1, 1.0, 0.0) if regionatlas is not None: rs_regionatlas = regionatlas_data.reshape(numspatiallocs) numregions = int(np.max(rs_regionatlas)) fitdata = np.zeros((numspatiallocs, timepoints), dtype="float") # residuals = np.zeros((numspatiallocs, timepoints), dtype='float') # newtemplate = np.zeros((numspatiallocs), dtype='float') # newmask = np.zeros((numspatiallocs), dtype='float') if regionatlas is not None: lincoffs = np.zeros((numregions, timepoints), dtype="float") sqrcoffs = np.zeros((numregions, timepoints), dtype="float") offsets = np.zeros((numregions, timepoints), dtype="float") rvals = np.zeros((numregions, timepoints), dtype="float") else: lincoffs = np.zeros(timepoints, dtype="float") sqrcoffs = np.zeros(timepoints, dtype="float") offsets = np.zeros(timepoints, dtype="float") rvals = np.zeros(timepoints, dtype="float") if regionatlas is not None: print("making region masks") regionvoxels = np.zeros((numspatiallocs, numregions), dtype="float") for region in range(0, numregions): thisregion = np.where((rs_regionatlas * rs_templatemask_bin) == (region + 1)) regionvoxels[thisregion, region] = 1.0 # mask everything print("masking template") maskedtemplate = rs_templatefile * rs_templatemask_bin # cycle over all images print("now cycling over all images") for thetime in range(0, timepoints): print("fitting timepoint", thetime) # get the appropriate mask if datamask3d: for i in range(timepoints): thisdatamask = rs_datamask_bin else: thisdatamask = rs_datamask_bin[:, thetime] if regionatlas is not None: for region in range(0, numregions): voxelstofit = np.where( regionvoxels[:, region] * thisdatamask > 0.5) voxelstoreconstruct = np.where(regionvoxels[:, region] > 0.5) if order == 2: thefit, R = tide_fit.mlregress( [ rs_templatefile[voxelstofit], np.square(rs_templatefile[voxelstofit]), ], rs_datafile[voxelstofit, thetime][0], ) else: thefit, R = tide_fit.mlregress( rs_templatefile[voxelstofit], rs_datafile[voxelstofit, thetime][0], ) lincoffs[region, thetime] = thefit[0, 1] offsets[region, thetime] = thefit[0, 0] rvals[region, thetime] = R if order == 2: sqrcoffs[region, thetime] = thefit[0, 2] fitdata[voxelstoreconstruct, thetime] += ( sqrcoffs[region, thetime] * np.square(rs_templatefile[voxelstoreconstruct]) + lincoffs[region, thetime] * rs_templatefile[voxelstoreconstruct] + offsets[region, thetime]) else: fitdata[voxelstoreconstruct, thetime] += (lincoffs[region, thetime] * rs_templatefile[voxelstoreconstruct] + offsets[region, thetime]) # newtemplate += nan_to_num(maskeddata[:, thetime] / lincoffs[region, thetime]) * rs_datamask # newmask += rs_datamask * rs_templatemask_bin else: voxelstofit = np.where(thisdatamask > 0.5) voxelstoreconstruct = np.where(rs_templatemask > 0.5) thefit, R = tide_fit.mlregress( rs_templatefile[voxelstofit], rs_datafile[voxelstofit, thetime][0]) lincoffs[thetime] = thefit[0, 1] offsets[thetime] = thefit[0, 0] rvals[thetime] = R fitdata[voxelstoreconstruct, thetime] = ( lincoffs[thetime] * rs_templatefile[voxelstoreconstruct] + offsets[thetime]) # if datamask3d: # newtemplate += nan_to_num(maskeddata[:, thetime] / lincoffs[thetime]) * rs_datamask # else: # newtemplate += nan_to_num(maskeddata[:, thetime] / lincoffs[thetime]) * rs_datamask[:, thetime] # newmask += rs_datamask[:, thetime] * rs_templatemask_bin residuals = rs_datafile - fitdata # write out the data files print("writing time series") if order == 2: tide_io.writenpvecs(sqrcoffs, outputroot + "_sqrcoffs.txt") tide_io.writenpvecs(lincoffs, outputroot + "_lincoffs.txt") tide_io.writenpvecs(offsets, outputroot + "_offsets.txt") tide_io.writenpvecs(rvals, outputroot + "_rvals.txt") if regionatlas is not None: for region in range(0, numregions): print( "region", region + 1, "slope mean, std:", np.mean(lincoffs[:, region]), np.std(lincoffs[:, region]), ) print( "region", region + 1, "offset mean, std:", np.mean(offsets[:, region]), np.std(offsets[:, region]), ) else: print("slope mean, std:", np.mean(lincoffs), np.std(lincoffs)) print("offset mean, std:", np.mean(offsets), np.std(offsets)) print("writing nifti series") tide_io.savetonifti( fitdata.reshape((xsize, ysize, numslices, timepoints)), datafile_hdr, outputroot + "_fit", ) tide_io.savetonifti( residuals.reshape((xsize, ysize, numslices, timepoints)), datafile_hdr, outputroot + "_residuals", )
def test_io(debug=True, display=False): # create outputdir if it doesn't exist create_dir(get_test_temp_path()) # test checkifnifti assert tide_io.checkifnifti("test.nii") == True assert tide_io.checkifnifti("test.nii.gz") == True assert tide_io.checkifnifti("test.txt") == False # test checkiftext assert tide_io.checkiftext("test.nii") == False assert tide_io.checkiftext("test.nii.gz") == False assert tide_io.checkiftext("test.txt") == True # test getniftiroot assert tide_io.getniftiroot("test.nii") == "test" assert tide_io.getniftiroot("test.nii.gz") == "test" assert tide_io.getniftiroot("test.txt") == "test.txt" # test fmritimeinfo fmritimeinfothresh = 1e-2 tr, timepoints = tide_io.fmritimeinfo( os.path.join(get_examples_path(), "sub-HAPPYTEST.nii.gz")) assert np.fabs(tr - 1.16) < fmritimeinfothresh assert timepoints == 110 tr, timepoints = tide_io.fmritimeinfo( os.path.join(get_examples_path(), "sub-RAPIDTIDETEST.nii.gz")) assert np.fabs(tr - 1.5) < fmritimeinfothresh assert timepoints == 260 # test niftifile reading sizethresh = 1e-3 happy_img, happy_data, happy_hdr, happydims, happysizes = tide_io.readfromnifti( os.path.join(get_examples_path(), "sub-HAPPYTEST.nii.gz")) fmri_img, fmri_data, fmri_hdr, fmridims, fmrisizes = tide_io.readfromnifti( os.path.join(get_examples_path(), "sub-RAPIDTIDETEST.nii.gz")) targetdims = [4, 65, 89, 64, 110, 1, 1, 1] targetsizes = [-1.00, 2.39583, 2.395830, 2.4, 1.16, 0.00, 0.00, 0.00] if debug: print("happydims:", happydims) print("targetdims:", targetdims) print("happysizes:", happysizes) print("targetsizes:", targetsizes) for i in range(len(targetdims)): assert targetdims[i] == happydims[i] assert mse(np.array(targetsizes), np.array(happysizes)) < sizethresh # test file writing datathresh = 2e-3 # relaxed threshold because sub-RAPIDTIDETEST has been converted to INT16 tide_io.savetonifti( fmri_data, fmri_hdr, os.path.join(get_test_temp_path(), "sub-RAPIDTIDETEST_copy.nii.gz")) ( fmricopy_img, fmricopy_data, fmricopy_hdr, fmricopydims, fmricopysizes, ) = tide_io.readfromnifti( os.path.join(get_test_temp_path(), "sub-RAPIDTIDETEST_copy.nii.gz")) assert tide_io.checkspacematch(fmri_hdr, fmricopy_hdr) assert tide_io.checktimematch(fmridims, fmridims) assert mse(fmri_data, fmricopy_data) < datathresh # test file header comparisons assert tide_io.checkspacematch(happy_hdr, happy_hdr) assert not tide_io.checkspacematch(happy_hdr, fmri_hdr) assert tide_io.checktimematch(happydims, happydims) assert not tide_io.checktimematch(happydims, fmridims)