예제 #1
0
#Get timepoints for SUVR window
startTime = timePred[np.where(petPredD > 10)[0][0]]
endTime = startTime + args.window[0]
suvrTime = np.linspace(startTime, endTime, 100)

#Get new basis
suvrBasis = nagini.rSplineBasis(suvrTime, petKnots)

#Get fits at each voxel
voxX, _, _, _ = np.linalg.lstsq(petBasis, petMasked.T)

#Get predictions at each voxel
voxPred = np.dot(suvrBasis, voxX)

#Get WB suvr image
voxSum = np.sum(voxPred, axis=0)
voxSum = voxSum / np.mean(voxSum)

#Write out image
nagini.writeMaskedImage(voxSum, mask.shape, maskData, pet.affine, pet.header,
                        '%s_suvrSpline' % (args.out[0]))

#For comparision calculate non-smoothed SUVR
startFrame = np.where(petTime >= startTime)[0][0]
endFrame = np.where(petTime <= endTime)[0][-1]
petSum = np.sum(petMasked[:, startFrame:(endFrame + 1)], axis=1)
petSum = petSum / np.mean(petSum)
nagini.writeMaskedImage(petSum, mask.shape, maskData, pet.affine, pet.header,
                        '%s_suvr' % (args.out[0]))
예제 #2
0
                                 pet.affine,
                                 header=pet.header)

        #Then do the writing
        try:
            outImg.to_filename('%s.nii.gz' % (pName))
        except (IOError):
            print 'ERROR: Cannot save image at %s.' % (pName)
            sys.exit()

#Write out root mean square images
nib.Nifti1Image(roiParams[:, -1], affine=np.identity(4)).to_filename(
    '%s_%s.nii.gz' % (args.out[0], paramNames[-1]))
if args.noVox != 1:
    nagini.writeMaskedImage(voxParams[:, -1], maskData.shape, maskData,
                            pet.affine, pet.header,
                            '%s_%s' % (args.out[0], paramNames[-1]))

#Write out chosen arguments
nagini.writeArgs(args, args.out[0])

#Convergence output
try:
    #Open file
    cOut = open('%s_convergence.txt' % (args.out[0]), "w")

    #Write out ROI data
    cOut.write('%i of %i\n' % (roiC, nRoi))

    #Write out voxel data if necessary
    if args.noVox != 1:
예제 #3
0
파일: gluAif.py 프로젝트: tblazey/nagini
		#Calculate residual with delay
		fitResid = voxTac - voxFitted

		#Calculate normalized root mean square deviation
		fitRmsd = np.sqrt(np.sum(np.power(fitResid,2))/voxTac.shape[0]) / np.mean(voxTac)

		#Save residual
		fitParams[voxIdx,-1] = fitRmsd

	except(RuntimeError,ValueError):
		noC += 1

#Warn user about lack of convergence
if noC > 0:
	print('Warning: %i of %i voxels did not converge.'%(noC,nVox))

#############
###Output!###
#############
print('Writing out results...')

#Set names for model images
paramNames = ['gef','kOne','kTwo','kThree','kFour','cmrGlu','alphaOne','alphaTwo','betaOne','betaTwo','netEx','influx','DV','conc','delay','nRmsd']

#Do the writing. For now, doesn't write variance images.
for iIdx in range(fitParams.shape[1]-1):
	nagini.writeMaskedImage(fitParams[:,iIdx],maskData.shape,maskData,pet.affine,pet.header,'%s_%s'%(args.out[0],paramNames[iIdx]))
nagini.writeMaskedImage(fitParams[:,-1],maskData.shape,maskData,pet.affine,pet.header,'%s_%s'%(args.out[0],paramNames[-1]))
nagini.writeArgs(args,args.out[0])
예제 #4
0
파일: cbfAif.py 프로젝트: tblazey/nagini
#Set names for model images
if args.poly is True:
    paramNames = ['CBF']
    fitParams = voxCbf[:, np.newaxis]
else:
    paramNames = wbLabels
    if args.fModel != 1:
        paramNames.remove('Dispersion')
        paramNames.remove('Delay')
    paramNames.append('nRmsd')

#Output images
for iIdx in range(fitParams.shape[1]):
    nagini.writeMaskedImage(fitParams[:, iIdx], brainData.shape, brainData,
                            pet.affine, pet.header,
                            '%s_%s' % (args.out[0], paramNames[iIdx]))

#Compute extraction if necessary
if args.ps is not None:
    if args.poly is True:
        voxEx = voxCbf / cbfMasked
    else:
        voxEx = 1.0 - np.exp(fitParams[:, 0] / -cbfMasked)
    nagini.writeMaskedImage(voxEx, brainData.shape, brainData, pet.affine,
                            pet.header, '%s_extraction' % (args.out[0]))

    #Compute PSw if we used polynomial regression method
    if args.poly is True:
        voxPSw = -cbfMasked * np.log(1.0 - voxEx)
        nagini.writeMaskedImage(voxPSw, brainData.shape, brainData, pet.affine,
예제 #5
0
파일: gluIdaif.py 프로젝트: tblazey/nagini
			fitParams[voxIdx,4] = ((voxFit[0][0]*cbfMasked[voxIdx]*voxFit[0][2])/(voxFit[0][1]+voxFit[0][2]))*gluScale

			#Save parameter variances. Use delta method to get cmrGlu and k1 variance
			fitParams[voxIdx,5:8] = np.diag(voxFit[1])
			fitParams[voxIdx,8] = voxFit[1][0][0] * np.power(cbfMasked[voxIdx],2)
			gluGrad = np.array([(gluScale*cbfMasked[voxIdx]*voxFit[0][2])/(voxFit[0][1]+voxFit[0][2]),
				    	    (-1*voxFit[0][0]*voxFit[0][2]*gluScale*cbfMasked[voxIdx])/np.power(voxFit[0][1]+voxFit[0][2],2),
				            (voxFit[0][0]*voxFit[0][1]*gluScale*cbfMasked[voxIdx])/np.power(voxFit[0][1]+voxFit[0][2],2)])
			fitParams[voxIdx,9] = np.dot(np.dot(gluGrad.T,voxFit[1]),gluGrad)

			#Get normalized root mean square deviation
	 		fitResid = voxTac - voxFunc(fitX,voxFit[0][0],voxFit[0][1],voxFit[0][2])
			fitRmsd = np.sqrt(np.sum(np.power(fitResid,2))/voxTac.shape[0])
			fitParams[voxIdx,10] = fitRmsd / np.mean(voxTac)

	except(RuntimeError):
		noC += 1

#Warn user about lack of convergence
if noC > 0:
	print('Warning: %i of %i voxels did not converge.'%(noC,nVox))

#############
###Output!###
#############
print('Writing out results...')

#Write out two parameter model images
for iIdx in range(len(imgNames)):
	nagini.writeMaskedImage(fitParams[:,iIdx],brain.shape,brainData,pet.affine,pet.header,'%s_%s'%(args.out[0],imgNames[iIdx]))
예제 #6
0
                 cbfMasked[voxIdx]) / np.power(voxFit[0][1] + voxFit[0][2], 2),
                (voxFit[0][0] * voxFit[0][1] * gluScale * cbfMasked[voxIdx]) /
                np.power(voxFit[0][1] + voxFit[0][2], 2)
            ])
            fitParams[voxIdx, 9] = np.dot(np.dot(gluGrad.T, voxFit[1]),
                                          gluGrad)

            #Get normalized root mean square deviation
            fitResid = voxTac - voxFunc(fitX, voxFit[0][0], voxFit[0][1],
                                        voxFit[0][2])
            fitRmsd = np.sqrt(np.sum(np.power(fitResid, 2)) / voxTac.shape[0])
            fitParams[voxIdx, 10] = fitRmsd / np.mean(voxTac)

    except (RuntimeError):
        noC += 1

#Warn user about lack of convergence
if noC > 0:
    print('Warning: %i of %i voxels did not converge.' % (noC, nVox))

#############
###Output!###
#############
print('Writing out results...')

#Write out two parameter model images
for iIdx in range(len(imgNames)):
    nagini.writeMaskedImage(fitParams[:, iIdx], brain.shape, brainData,
                            pet.affine, pet.header,
                            '%s_%s' % (args.out[0], imgNames[iIdx]))
예제 #7
0
endTime = startTime + args.window[0]
suvrTime = np.linspace(startTime,endTime,100)

#Get new basis
suvrBasis = nagini.rSplineBasis(suvrTime,petKnots)

#Get fits at each voxel
voxX,_,_,_ = np.linalg.lstsq(petBasis,petMasked.T)

#Get predictions at each voxel
voxPred = np.dot(suvrBasis,voxX)

#Get WB suvr image
voxSum = np.sum(voxPred,axis=0); voxSum = voxSum / np.mean(voxSum)

#Write out image
nagini.writeMaskedImage(voxSum,mask.shape,maskData,pet.affine,pet.header,'%s_suvrSpline'%(args.out[0]))

#For comparision calculate non-smoothed SUVR
startFrame = np.where(petTime>=startTime)[0][0]
endFrame = np.where(petTime<=endTime)[0][-1]
petSum =  np.sum(petMasked[:,startFrame:(endFrame+1)],axis=1)
petSum = petSum / np.mean(petSum)
nagini.writeMaskedImage(petSum,mask.shape,maskData,pet.affine,pet.header,'%s_suvr'%(args.out[0]))






예제 #8
0
	#Write out polynomial matrix
	np.savetxt('%s_polyMat.txt'%(args.out[0]),np.hstack((polyX,cbfPred[:,np.newaxis])))
	
	#Polynomial regression coefficients
	coefOut = open('%s_polyCoef.txt'%(args.out[0]), "w")
	coefOut.write(coefString)
	coefOut.close()

except(IOError):
	print 'ERROR: Cannot write in output directory. Exiting...'
	sys.exit()

#Don't do voxelwise estimation if user says not to
if args.wbOnly == 1:
	sys.exit()

#Integrate all the pet data
petInt = np.trapz(petMasked,petTime,axis=1)

#Get matrix for flow predictions
petMat = np.stack((petInt,np.power(petInt,2)),axis=1)

#Get flow predictions at each voxel
petFlow = petMat.dot(polyCoef)

#Write out fow image
nagini.writeMaskedImage(petFlow,brain.shape,brainData,pet.affine,pet.header,'%s_flow'%(args.out[0]))



예제 #9
0
#Don't do voxelwise estimation if user says not to
if args.wbOnly == 1:
	sys.exit()

#Integreate pet tacs
voxInt = np.trapz(petMasked,petTime,axis=1)

#Create arrays for results
voxOef = np.zeros_like(cbfMasked)
voxOxy = np.zeros_like(cbfMasked)
voxResid = np.zeros_like(cbfMasked)

#Loop through voxels
for voxIdx in tqdm(range(cbfMasked.shape[0])):

	#Make regressors for voxel
	voxX = ((1 - (0.835*cbfMasked[voxIdx]/lmbda))*cbfMasked[voxIdx]*aifOxyInt) - \
	(0.835*cbvMasked[voxIdx]*aifOxyInterp)
	voxY = petMasked[voxIdx,:] + (cbfMasked[voxIdx]/lmbda*voxInt[voxIdx]) - (cbvMasked[voxIdx]*aifOxyInterp) \
	- (cbvMasked[voxIdx]*cbvMasked[voxIdx]/lmbda*aifOxyInt) - (cbfMasked[voxIdx]*aifWaterInt)

	#Run voxelwise regression
	voxOef[voxIdx],voxResid[voxIdx] = opt.nnls(voxX[:,np.newaxis],voxY)

	#Get CMRO2
	voxOxy[voxIdx] = voxOef[voxIdx] * cbfMasked[voxIdx] * oxyScale

#Write out images
nagini.writeMaskedImage(voxOef,brain.shape,brainData,pet.affine,pet.header,'%s_oefnn'%(args.out[0]))
nagini.writeMaskedImage(voxOxy,brain.shape,brainData,pet.affine,pet.header,'%s_cmrOxynn'%(args.out[0]))
예제 #10
0
파일: cbvAif.py 프로젝트: tblazey/nagini
#Create aif figure
try:
    plt.clf()
    fig = plt.figure(1)
    plt.scatter(aifTime, aifC, s=40, c="black")
    plt.plot(aifTime,
             np.dot(aifBasis, aifCoefs),
             linewidth=5,
             label='Spline Fit')
    plt.xlabel('Time (seconds)')
    plt.ylabel('Counts')
    plt.title('Arterial Sampled Input function')
    plt.legend(loc='upper right')
    plt.suptitle(args.out[0])
    plt.savefig('%s_aifPlot.jpeg' % (args.out[0]), bbox_inches='tight')
except (RuntimeError, IOError):
    print 'ERROR: Could not save figure. Moving on...'

#Don't do voxelwise estimation if user says not to
if args.wbOnly == 1:
    nagini.writeArgs(args, args.out[0])
    sys.exit()

#Calculate voxelwise CBV
cbvData = petMasked * cbvScale

#Write out CBV image
nagini.writeMaskedImage(cbvData, brainData.shape, brainData, pet.affine,
                        pet.header, '%s_cbv' % (args.out[0]))
nagini.writeArgs(args, args.out[0])
예제 #11
0
    #Write out polynomial matrix
    np.savetxt('%s_polyMat.txt' % (args.out[0]),
               np.hstack((polyX, cbfPred[:, np.newaxis])))

    #Polynomial regression coefficients
    coefOut = open('%s_polyCoef.txt' % (args.out[0]), "w")
    coefOut.write(coefString)
    coefOut.close()

except (IOError):
    print 'ERROR: Cannot write in output directory. Exiting...'
    sys.exit()

#Don't do voxelwise estimation if user says not to
if args.wbOnly == 1:
    sys.exit()

#Integrate all the pet data
petInt = np.trapz(petMasked, petTime, axis=1)

#Get matrix for flow predictions
petMat = np.stack((petInt, np.power(petInt, 2)), axis=1)

#Get flow predictions at each voxel
petFlow = petMat.dot(polyCoef)

#Write out fow image
nagini.writeMaskedImage(petFlow, brain.shape, brainData, pet.affine,
                        pet.header, '%s_flow' % (args.out[0]))
예제 #12
0
파일: cbvIdaif.py 프로젝트: tblazey/nagini
#Load image headers
pet = nagini.loadHeader(args.pet[0])
brain = nagini.loadHeader(args.brain[0]) 

#Check to make sure image dimensions match
if pet.shape[0:3] != brain.shape[0:3]:
	print 'ERROR: Image dimensions do not match. Please check data...'
	sys.exit()

#Get the image data
petData = pet.get_data()
brainData = brain.get_data()

#Load in the idaif.
idaif = nagini.loadIdaif(args.idaif[0])

#Flatten the PET images and then mask
petMasked = petData.flatten()[brainData.flatten()>0]

############
###Output###
############

#Calculate CBV in mL/hG
cbvData = (petMasked * 100.0 ) / (args.r*args.d*idaif)

#Write out CBV image
nagini.writeMaskedImage(cbvData,brain.shape,brainData,pet.affine,pet.header,'%s_cbv'%(args.out[0]))


예제 #13
0
    sys.exit()

#Integreate pet tacs
voxInt = np.trapz(petMasked, petTime, axis=1)

#Create arrays for results
voxOef = np.zeros_like(cbfMasked)
voxOxy = np.zeros_like(cbfMasked)
voxResid = np.zeros_like(cbfMasked)

#Loop through voxels
for voxIdx in tqdm(range(cbfMasked.shape[0])):

    #Make regressors for voxel
    voxX = ((1 - (0.835*cbfMasked[voxIdx]/lmbda))*cbfMasked[voxIdx]*aifOxyInt) - \
    (0.835*cbvMasked[voxIdx]*aifOxyInterp)
    voxY = petMasked[voxIdx,:] + (cbfMasked[voxIdx]/lmbda*voxInt[voxIdx]) - (cbvMasked[voxIdx]*aifOxyInterp) \
    - (cbvMasked[voxIdx]*cbvMasked[voxIdx]/lmbda*aifOxyInt) - (cbfMasked[voxIdx]*aifWaterInt)

    #Run voxelwise regression
    voxOef[voxIdx], voxResid[voxIdx] = opt.nnls(voxX[:, np.newaxis], voxY)

    #Get CMRO2
    voxOxy[voxIdx] = voxOef[voxIdx] * cbfMasked[voxIdx] * oxyScale

#Write out images
nagini.writeMaskedImage(voxOef, brain.shape, brainData, pet.affine, pet.header,
                        '%s_oefnn' % (args.out[0]))
nagini.writeMaskedImage(voxOxy, brain.shape, brainData, pet.affine, pet.header,
                        '%s_cmrOxynn' % (args.out[0]))