def __init__(self, inputArray, targetFH, nlinOutputDir, avgPrefix, nlin_protocol, resolution=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Prefix to pre-pend to averages at each generation""" self.avgPrefix = avgPrefix """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" self.fileRes = resolution if (nlin_protocol == None and resolution == None): print "\nError: NLIN module was initialized without a protocol, and without a resolution for the registrations to be run at. Please specify one of the two. Exiting\n" sys.exit() if (nlin_protocol and resolution): # we should have the nlin_protocol be able to overwrite the given resolution: self.fileRes = None # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin")
def __init__(self, inputFH, targetFH, lsq12_protocol=None, nlin_protocol=None, includeLinear=True, subject_matter=None, defaultDir="tmp"): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.lsq12_protocol = lsq12_protocol self.nlin_protocol = nlin_protocol self.includeLinear = includeLinear self.subject_matter = subject_matter self.defaultDir = defaultDir if ((self.lsq12_protocol == None and self.subject_matter == None) or self.nlin_protocol == None): # the resolution of the registration should be based on the target self.fileRes = rf.returnFinestResolution(self.targetFH) else: self.fileRes = None self.buildPipeline()
def __init__(self, inputFH, targetFH, blurs=[0.3, 0.2, 0.15], step=[1, 0.5, 0.333333333333333], gradient=[False, True, False], simplex=[3, 1.5, 1], w_translations=[0.4, 0.4, 0.4], defaultDir="tmp"): # TO DO: Might want to take this out and pass in # of generations, since # checking happens there. if len(blurs) == len(step) == len(simplex): # do nothing - all lengths are the same and we're therefore happy pass else: logger.error( "The same number of entries are required for blurs, step, and simplex in LSQ12" ) sys.exit() self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = blurs self.step = step self.blurs = blurs self.gradient = gradient self.simplex = simplex self.w_translations = w_translations self.defaultDir = defaultDir self.blurFiles() self.buildPipeline()
def __init__(self, inputPipeFH, templatePipeFH, name="initial", createMask=False): self.p = Pipeline() self.name = name if createMask: resampleDefault = "tmp" labelsDefault = "tmp" else: resampleDefault = "resampled" labelsDefault = "labels" # Resample all inputLabels inputLabelArray = templatePipeFH.returnLabels(True) if len(inputLabelArray) > 0: """ for the initial registration, resulting labels should be added to inputLabels array for subsequent pairwise registration otherwise labels should be added to labels array for voting """ if self.name == "initial": addOutputToInputLabels = True else: addOutputToInputLabels = False for i in range(len(inputLabelArray)): """Note: templatePipeFH and inputPipeFH have the reverse order from how they are passed into this function. This is intentional because the mincresample classes use the first argument as the one from which to get the file to be resampled. Here, either the mask or labels to be resampled come from the template.""" if createMask: resampleStage = ma.mincresampleMask( templatePipeFH, inputPipeFH, defaultDir=labelsDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH, labelIndex=i, setInputLabels=addOutputToInputLabels) else: resampleStage = ma.mincresampleLabels( templatePipeFH, inputPipeFH, defaultDir=labelsDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH, labelIndex=i, setInputLabels=addOutputToInputLabels) self.p.addStage(resampleStage) # resample files resampleStage = ma.mincresample(templatePipeFH, inputPipeFH, defaultDir=resampleDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH) self.p.addStage(resampleStage)
def __init__( self, targetOutputDir, #Output directory for files related to initial target (often _lsq12) inputFiles, nlinDir, avgPrefix, #Prefix for nlin-1.mnc, ... nlin-k.mnc createAvg=True, #True=call mincAvg, False=targetAvg already exists targetAvg=None, #Optional path to initial target - passing name does not guarantee existence targetMask=None, #Optional path to mask for initial target nlin_protocol=None, reg_method=None): self.p = Pipeline() self.targetOutputDir = targetOutputDir self.inputFiles = inputFiles self.nlinDir = nlinDir self.avgPrefix = avgPrefix self.createAvg = createAvg self.targetAvg = targetAvg self.targetMask = targetMask self.nlin_protocol = nlin_protocol self.reg_method = reg_method # setup initialTarget (if needed) and initialize non-linear module self.setupTarget() self.initNlinModule() #iterate through non-linear registration and setup averages self.nlinModule.iterate() self.p.addPipeline(self.nlinModule.p) self.nlinAverages = self.nlinModule.nlinAverages self.nlinParams = self.nlinModule.nlinParams
def maskFiles(FH, isAtlas, numAtlases=1): """ Assume that if there is more than one atlas, multiple masks were generated and we need to perform a voxel_vote. Otherwise, assume we are using inputLabels from crossing with only one atlas. """ #MF TODO: Make this more general to handle pairwise option. p = Pipeline() if not isAtlas: if numAtlases > 1: voxel = voxelVote(FH, False, True) p.addStage(voxel) mincMathInput = voxel.outputFiles[0] else: mincMathInput = FH.returnLabels(True)[0] FH.setMask(mincMathInput) else: mincMathInput = FH.getMask() mincMathOutput = fh.createBaseName(FH.resampledDir, FH.basename) mincMathOutput += "_masked.mnc" logFile = fh.logFromFile(FH.logDir, mincMathOutput) cmd = ["mincmath"] + ["-clobber"] + ["-mult"] cmd += [InputFile(mincMathInput)] + [InputFile(FH.getLastBasevol())] cmd += [OutputFile(mincMathOutput)] mincMath = CmdStage(cmd) mincMath.setLogFile(LogFile(logFile)) p.addStage(mincMath) FH.setLastBasevol(mincMathOutput) return(p)
class SetResolution: def __init__(self, filesToResample, resolution): """During initialization make sure all files are resampled at resolution we'd like to use for each pipeline stage """ self.p = Pipeline() for FH in filesToResample: dirForOutput = self.getOutputDirectory(FH) currentRes = volumeFromFile(FH.getLastBasevol()).separations if not abs(abs(currentRes[0]) - abs(resolution)) < 0.01: crop = ma.autocrop(resolution, FH, defaultDir=dirForOutput) self.p.addStage(crop) mask = FH.getMask() if mask: #Need to resample the mask as well. cropMask = ma.mincresampleMask(FH, FH, outputLocation=FH, likeFile=FH) self.p.addStage(cropMask) def getOutputDirectory(self, FH): """Sets output directory based on whether or not we have a full RegistrationPipeFH class or we are just using RegistrationFHBase""" if isinstance(FH, rfh.RegistrationPipeFH): outputDir = "resampled" else: outputDir = FH.basedir return outputDir
class SetResolution: def __init__(self, filesToResample, resolution): """During initialization make sure all files are resampled at resolution we'd like to use for each pipeline stage """ self.p = Pipeline() for FH in filesToResample: dirForOutput = self.getOutputDirectory(FH) currentRes = volumeFromFile(FH.getLastBasevol()).separations if not abs(abs(currentRes[0]) - abs(resolution)) < 0.01: crop = ma.autocrop(resolution, FH, defaultDir=dirForOutput) self.p.addStage(crop) mask = FH.getMask() if mask: #Need to resample the mask as well. cropMask = ma.mincresampleMask(FH, FH, outputLocation=FH, likeFile=FH) self.p.addStage(cropMask) def getOutputDirectory(self, FH): """Sets output directory based on whether or not we have a full RegistrationPipeFH class or we are just using RegistrationFHBase""" if isinstance(FH, rfh.RegistrationPipeFH): outputDir = "resampled" else: outputDir = FH.basedir return outputDir
def resampleToCommon(xfm, FH, statsGroup, statsKernels, nlinFH): blurs = [] if isinstance(statsKernels, list): blurs = statsKernels elif isinstance(statsKernels, str): for i in statsKernels.split(","): blurs.append(float(i)) else: print "Improper type of blurring kernels specified for stats calculation: " + str(statsKernels) sys.exit() pipeline = Pipeline() outputDirectory = FH.statsDir filesToResample = [] for b in blurs: filesToResample.append(statsGroup.relativeJacobians[b]) if statsGroup.absoluteJacobians: filesToResample.append(statsGroup.absoluteJacobians[b]) for f in filesToResample: outputBase = fh.removeBaseAndExtension(f).split(".mnc")[0] outputFile = fh.createBaseName(outputDirectory, outputBase + "_common" + ".mnc") logFile = fh.logFromFile(FH.logDir, outputFile) targetAndLike=nlinFH.getLastBasevol() res = ma.mincresample(f, targetAndLike, likeFile=targetAndLike, transform=xfm, output=outputFile, logFile=logFile, argArray=["-sinc"]) pipeline.addStage(res) return pipeline
def resampleToCommon(xfm, FH, statsGroup, statsKernels, nlinFH): blurs = [] if isinstance(statsKernels, list): blurs = statsKernels elif isinstance(statsKernels, str): for i in statsKernels.split(","): blurs.append(float(i)) else: print("Improper type of blurring kernels specified for stats calculation: " + str(statsKernels)) sys.exit() pipeline = Pipeline() outputDirectory = FH.statsDir filesToResample = [] for b in blurs: filesToResample.append(statsGroup.relativeJacobians[b]) if statsGroup.absoluteJacobians: filesToResample.append(statsGroup.absoluteJacobians[b]) for f in filesToResample: outputBase = removeBaseAndExtension(f).split(".mnc")[0] outputFile = createBaseName(outputDirectory, outputBase + "_common" + ".mnc") logFile = fh.logFromFile(FH.logDir, outputFile) targetAndLike=nlinFH.getLastBasevol() res = ma.mincresample(f, targetAndLike, likeFile=targetAndLike, transform=xfm, output=outputFile, logFile=logFile, argArray=["-sinc"]) pipeline.addStage(res) return pipeline
class LabelAndFileResampling: def __init__(self, inputPipeFH, templatePipeFH, name="initial", createMask=False): self.p = Pipeline() self.name = name if createMask: resampleDefault = "tmp" labelsDefault = "tmp" else: resampleDefault = "resampled" labelsDefault = "labels" # Resample all inputLabels inputLabelArray = templatePipeFH.returnLabels(True) if len(inputLabelArray) > 0: """ for the initial registration, resulting labels should be added to inputLabels array for subsequent pairwise registration otherwise labels should be added to labels array for voting """ if self.name == "initial": addOutputToInputLabels = True else: addOutputToInputLabels = False for i in range(len(inputLabelArray)): """Note: templatePipeFH and inputPipeFH have the reverse order from how they are passed into this function. This is intentional because the mincresample classes use the first argument as the one from which to get the file to be resampled. Here, either the mask or labels to be resampled come from the template.""" if createMask: resampleStage = ma.mincresampleMask(templatePipeFH, inputPipeFH, defaultDir=labelsDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH, labelIndex=i, setInputLabels=addOutputToInputLabels) else: resampleStage = ma.mincresampleLabels(templatePipeFH, inputPipeFH, defaultDir=labelsDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH, labelIndex=i, setInputLabels=addOutputToInputLabels) self.p.addStage(resampleStage) # resample files resampleStage = ma.mincresample(templatePipeFH, inputPipeFH, defaultDir=resampleDefault, likeFile=inputPipeFH, argArray=["-invert"], outputLocation=inputPipeFH) self.p.addStage(resampleStage)
def __init__(self, inputs, dirs, options, avgPrefix=None, initModel=None): self.inputs = inputs self.dirs = dirs self.options = options self.avgPrefix = avgPrefix self.initModel = initModel self.nlinFH = None self.p = Pipeline() self.buildPipeline()
def __init__(self, subjects, timePoint, nlinFH, statsKernels, commonName): self.subjects = subjects self.timePoint = timePoint self.nlinFH = nlinFH self.blurs = [] self.setupBlurs(statsKernels) self.commonName = commonName self.p = Pipeline() self.buildPipeline()
def __init__(self, inputArray, outputDir, likeFile=None, maxPairs=None, lsq12_protocol=None, subject_matter=None, resolution=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Output directory should be _nlin """ self.lsq12Dir = outputDir """likeFile for resampling""" self.likeFile = likeFile """Maximum number of pairs to calculate""" self.maxPairs = maxPairs """Final lsq12 average""" self.lsq12Avg = None """Final lsq12 average file handler (e.g. the file handler associated with lsq12Avg)""" self.lsq12AvgFH = None """ Dictionary of lsq12 average transforms, which will include one per input. Key is input file handler and value is string pointing to final average lsq12 transform for that particular subject. These xfms may be used subsequently for statistics calculations. """ self.lsq12AvgXfms = {} self.fileRes = None """Create the blurring resolution from the file resolution""" if (subject_matter == None and resolution == None): print "\nError: the FullLSQ12 module was called without specifying the resolution that it should be run at, and without specifying a subject matter. Please indicate one of the two. Exiting...\n" sys.exit() elif (subject_matter and resolution): # subject matter has precedence over resolution self.fileRes = None elif resolution: self.fileRes = resolution """"Set up parameter array""" self.lsq12Params = mp.setLSQ12MinctraccParams( self.fileRes, subject_matter=subject_matter, reg_protocol=lsq12_protocol) self.blurs = self.lsq12Params.blurs self.stepSize = self.lsq12Params.stepSize self.useGradient = self.lsq12Params.useGradient self.simplex = self.lsq12Params.simplex self.w_translations = self.lsq12Params.w_translations self.generations = self.lsq12Params.generations # Create new lsq12 group for each input prior to registration for input in self.inputs: input.newGroup(groupName="lsq12")
def MAGeTRegister(inputFH, templateFH, regMethod, name="initial", createMask=False, lsq12_protocol=None, nlin_protocol=None): p = Pipeline() if createMask: defaultDir = "tmp" else: defaultDir = "transforms" if regMethod == "minctracc": sp = HierarchicalMinctracc(inputFH, templateFH, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol, defaultDir=defaultDir) p.addPipeline(sp.p) elif regMethod == "mincANTS": register = LSQ12ANTSNlin(inputFH, templateFH, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol, defaultDir=defaultDir) p.addPipeline(register.p) rp = LabelAndFileResampling(inputFH, templateFH, name=name, createMask=createMask) p.addPipeline(rp.p) return (p)
def getXfms(nlinFH, subjects, space, mbmDir, time=None): """For each file in the build-model registration (associated with the specified time point), do the following: 1. Find the to-native.xfm for that file. 2. Find the matching subject at the specified time point 3. Set this xfm to be the last xfm from nlin average to subject from step #2. 4. Find the -from-native.xfm file. 5. Set this xfm to be the last xfm from subject to nlin. Note: assume that the names in processedDir match beginning file names for each subject We are also assuming subjects is either a dictionary or a list. """ """First handle subjects if dictionary or list""" if isinstance(subjects, list): inputs = subjects elif isinstance(subjects, dict): inputs = [] for s in subjects: inputs.append(subjects[s][time]) else: logger.error("getXfms only takes a dictionary or list of subjects. Incorrect type has been passed. Exiting...") sys.exit() pipeline = Pipeline() baseNames = walk(mbmDir).next()[1] for b in baseNames: if space == "lsq6": xfmToNative = abspath(mbmDir + "/" + b + "/transforms/" + b + "-final-to_lsq6.xfm") elif space == "lsq12": xfmToNative = abspath(mbmDir + "/" + b + "/transforms/" + b + "-final-nlin.xfm") xfmFromNative = abspath(mbmDir + "/" + b + "/transforms/" + b + "_inv_nonlinear.xfm") elif space == "native": xfmToNative = abspath(mbmDir + "/" + b + "/transforms/" + b + "-to-native.xfm") xfmFromNative = abspath(mbmDir + "/" + b + "/transforms/" + b + "-from-native.xfm") else: logger.error("getXfms can only retrieve transforms to and from native, lsq6 or lsq12 space. Invalid parameter has been passed.") sys.exit() for inputFH in inputs: if fnmatch.fnmatch(inputFH.getLastBasevol(), "*" + b + "*"): if space=="lsq6": ix = ma.xfmInvert(xfmToNative, inputFH) pipeline.addStage(ix) xfmFromNative = ix.outputFiles[0] nlinFH.setLastXfm(inputFH, xfmToNative) inputFH.setLastXfm(nlinFH, xfmFromNative) return pipeline
def __init__(self, inputFH, targetFH, blurs=[0.3, 0.2, 0.15], step=[1,0.5,0.333333333333333], gradient=[False,True,False], simplex=[3,1.5,1], defaultDir="tmp"): # TO DO: Might want to take this out and pass in # of generations, since # checking happens there. if len(blurs) == len(step) == len(simplex): # do nothing - all lengths are the same and we're therefore happy pass else: logger.error("The same number of entries are required for blurs, step, and simplex in LSQ12") sys.exit() self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = blurs self.step = step self.blurs = blurs self.gradient = gradient self.simplex = simplex self.defaultDir = defaultDir self.blurFiles() self.buildPipeline()
def __init__(self, inputFH, targetFH, lsq12_protocol=None, nlin_protocol=None, includeLinear = True, subject_matter = None, defaultDir="tmp"): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.lsq12_protocol = lsq12_protocol self.nlin_protocol = nlin_protocol self.includeLinear = includeLinear self.subject_matter = subject_matter self.defaultDir = defaultDir if ((self.lsq12_protocol == None and self.subject_matter==None) or self.nlin_protocol == None): # the resolution of the registration should be based on the target self.fileRes = rf.returnFinestResolution(self.targetFH) else: self.fileRes = None self.buildPipeline()
def __init__(self, inputArray, targetFH, nlinOutputDir, avgPrefix, nlin_protocol, resolution=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Prefix to pre-pend to averages at each generation""" self.avgPrefix = avgPrefix """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" self.fileRes = resolution # hack: self.generations = 0 if (nlin_protocol==None and resolution == None): print("\nError: NLIN module was initialized without a protocol, and without a resolution for the registrations to be run at. Please specify one of the two. Exiting\n") sys.exit() if (nlin_protocol and resolution): # we should have the nlin_protocol be able to overwrite the given resolution: self.fileRes = None # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin")
def __init__(self, targetOutputDir, #Output directory for files related to initial target (often _lsq12) inputFiles, nlinDir, avgPrefix, #Prefix for nlin-1.mnc, ... nlin-k.mnc createAvg=True, #True=call mincAvg, False=targetAvg already exists targetAvg=None, #Optional path to initial target - passing name does not guarantee existence targetMask=None, #Optional path to mask for initial target nlin_protocol=None, reg_method=None): self.p = Pipeline() self.targetOutputDir = targetOutputDir self.inputFiles = inputFiles self.nlinDir = nlinDir self.avgPrefix = avgPrefix self.createAvg = createAvg self.targetAvg = targetAvg self.targetMask = targetMask self.nlin_protocol = nlin_protocol self.reg_method = reg_method # setup initialTarget (if needed) and initialize non-linear module self.setupTarget() self.initNlinModule() #iterate through non-linear registration and setup averages self.nlinModule.iterate() self.p.addPipeline(self.nlinModule.p) self.nlinAverages = self.nlinModule.nlinAverages self.nlinParams = self.nlinModule.nlinParams
def MAGeTMask(atlases, inputs, numAtlases, regMethod, lsq12_protocol=None, nlin_protocol=None): """ Masking algorithm is as follows: 1. Run HierarchicalMinctracc or mincANTS with mask=True, using masks instead of labels. 2. Do voxel voting to find the best mask. (Or, if single atlas, use that transform) 3. mincMath to multiply original input by mask to get _masked.mnc file (This is done for both atlases and inputs, though for atlases, voxel voting is not required.) 4. Replace lastBasevol with masked version, since once we have created mask, we no longer care about unmasked version. 5. Clear out labels arrays, which were used to keep track of masks, as we want to re-set them for actual labels. Note: All data will be placed in a newly created masking directory to keep it separate from data generated during actual MAGeT. """ p = Pipeline() for atlasFH in atlases: maskDirectoryStructure(atlasFH, masking=True) for inputFH in inputs: maskDirectoryStructure(inputFH, masking=True) for atlasFH in atlases: sp = MAGeTRegister(inputFH, atlasFH, regMethod, name="initial", createMask=True, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol) p.addPipeline(sp) """ Prior to final masking, set log and tmp directories as they were.""" for atlasFH in atlases: """Retrieve labels for use in new group. Assume only one""" labels = atlasFH.returnLabels(True) maskDirectoryStructure(atlasFH, masking=False) mp = maskFiles(atlasFH, True) p.addPipeline(mp) atlasFH.newGroup() atlasFH.addLabels(labels[0], inputLabel=True) for inputFH in inputs: maskDirectoryStructure(inputFH, masking=False) mp = maskFiles(inputFH, False, numAtlases) p.addPipeline(mp) # this will remove the "inputLabels"; labels that # come directly from the atlas library inputFH.clearLabels(True) # this will remove the "labels"; second generation # labels. I.e. labels from labels from the atlas library inputFH.clearLabels(False) inputFH.newGroup() return (p)
def __init__(self, inputFH, targetFH, statsKernels, additionalXfm=None): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = [] self.setupBlurs(statsKernels) self.statsGroup = StatsGroup() self.setupXfms() """ additionalXfm is an optional transform that may be specified. If it is, it is concatenated with the lastXfm from input to target. This additional transform must also be in the same direction as the lastXfm (e.g. input to target) Example usage: if the lastXfm from input to target goes from lsq12 to nlin space and you would like to calculate the absolute jacobians to lsq6 space, the additional transform specified may be the lsq6 to lsq12 transform from input to target. """ self.additionalXfm = additionalXfm self.fullStatsCalc()
def __init__(self, inFile, targetFile, nameForStage=None, **kwargs): self.p = Pipeline() self.outputFiles = [ ] # this will contain the outputFiles from the mincresample of the main MINC file self.outputFilesMask = [ ] # this will contain the outputFiles from the mincresample of the mask belonging to the main MINC file # the first step is to simply run the mincresample command: fileRS = mincresample(inFile, targetFile, **kwargs) if (nameForStage): fileRS.name = nameForStage self.p.addStage(fileRS) self.outputFiles = fileRS.outputFiles # initialize the array of outputs for the mask in case there is none to be resampled self.outputFilesMask = [None] * len(self.outputFiles) # next up, is this a file handler, and if so is there a mask that needs to be resampled? if (isFileHandler(inFile)): if (inFile.getMask()): # there is a mask associated with this file, should be updated # we have to watch out in terms of interpolation arguments, if # the original resample command asked for "-sinc" or "-tricubic" # for instance, we should remove that argument for the mask resampling # these options would reside in the argArray... maskArgs = copy.deepcopy(kwargs) if maskArgs.has_key("argArray"): argList = maskArgs["argArray"] for i in range(len(argList)): if (re.match("-sinc", argList[i]) or re.match("-trilinear", argList[i]) or re.match("-tricubic", argList[i])): del argList[i] maskArgs["argArray"] = argList # if the output file for the mincresample command was already # specified, add "_mask.mnc" to it if maskArgs.has_key("output"): maskArgs["output"] = re.sub(".mnc", "_mask.mnc", maskArgs["output"]) maskRS = mincresampleMask(inFile, targetFile, **maskArgs) if (nameForStage): maskRS.name = nameForStage + "--mask--" self.p.addStage(maskRS) self.outputFilesMask = maskRS.outputFiles
def __init__(self, inputArray, outputDir, likeFile=None, maxPairs=None, lsq12_protocol=None, subject_matter=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Output directory should be _nlin """ self.lsq12Dir = outputDir """likeFile for resampling""" self.likeFile = likeFile """Maximum number of pairs to calculate""" self.maxPairs = maxPairs """Final lsq12 average""" self.lsq12Avg = None """Final lsq12 average file handler (e.g. the file handler associated with lsq12Avg)""" self.lsq12AvgFH = None """ Dictionary of lsq12 average transforms, which will include one per input. Key is input file handler and value is string pointing to final average lsq12 transform for that particular subject. These xfms may be used subsequently for statistics calculations. """ self.lsq12AvgXfms = {} """Create the blurring resolution from the file resolution""" if (subject_matter == None and lsq12_protocol == None): self.fileRes = rf.returnFinestResolution(self.inputs[0]) else: self.fileRes = None """"Set up parameter array""" self.lsq12Params = mp.setLSQ12MinctraccParams( self.fileRes, subject_matter=subject_matter, reg_protocol=lsq12_protocol) self.blurs = self.lsq12Params.blurs self.stepSize = self.lsq12Params.stepSize self.useGradient = self.lsq12Params.useGradient self.simplex = self.lsq12Params.simplex self.w_translations = self.lsq12Params.w_translations self.generations = self.lsq12Params.generations # Create new lsq12 group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="lsq12")
def MAGeTRegister(inputFH, templateFH, regMethod, name="initial", createMask=False, lsq12_protocol=None, nlin_protocol=None): p = Pipeline() if createMask: defaultDir="tmp" else: defaultDir="transforms" if regMethod == "minctracc": sp = HierarchicalMinctracc(inputFH, templateFH, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol, defaultDir=defaultDir) p.addPipeline(sp.p) elif regMethod == "mincANTS": register = LSQ12ANTSNlin(inputFH, templateFH, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol, defaultDir=defaultDir) p.addPipeline(register.p) rp = LabelAndFileResampling(inputFH, templateFH, name=name, createMask=createMask) p.addPipeline(rp.p) return(p)
def __init__(self, inputPipeFH, templatePipeFH, blurs=[1, 0.5, 0.3]): self.p = Pipeline() self.inputPipeFH = inputPipeFH self.templatePipeFH = templatePipeFH self.blurFiles(blurs)
def __init__(self, filesToResample, resolution): """During initialization make sure all files are resampled at resolution we'd like to use for each pipeline stage """ self.p = Pipeline() for FH in filesToResample: dirForOutput = self.getOutputDirectory(FH) currentRes = volumeFromFile(FH.getLastBasevol()).separations if not abs(abs(currentRes[0]) - abs(resolution)) < 0.01: crop = ma.autocrop(resolution, FH, defaultDir=dirForOutput) self.p.addStage(crop) mask = FH.getMask() if mask: #Need to resample the mask as well. cropMask = ma.mincresampleMask(FH, FH, outputLocation=FH, likeFile=FH) self.p.addStage(cropMask)
class LinearHierarchicalMinctracc: """Default LinearHierarchicalMinctracc class Assumes lsq6 registration using the identity transform""" def __init__(self, inputPipeFH, templatePipeFH, blurs=[1, 0.5, 0.3]): self.p = Pipeline() self.inputPipeFH = inputPipeFH self.templatePipeFH = templatePipeFH self.blurFiles(blurs) def blurFiles(self, blurs): for b in blurs: if b != -1: tblur = ma.blur(self.templatePipeFH, b, gradient=True) iblur = ma.blur(self.inputPipeFH, b, gradient=True) self.p.addStage(tblur) self.p.addStage(iblur)
def __init__(self, subjects, timePoint, nlinFH, statsKernels, commonName): self.subjects = subjects self.timePoint = timePoint self.nlinFH = nlinFH self.blurs = [] self.setupBlurs(statsKernels) self.commonName = commonName self.p = Pipeline() self.buildPipeline()
def maskFiles(FH, isAtlas, numAtlases=1): """ Assume that if there is more than one atlas, multiple masks were generated and we need to perform a voxel_vote. Otherwise, assume we are using inputLabels from crossing with only one atlas. """ #MF TODO: Make this more general to handle pairwise option. p = Pipeline() if not isAtlas: if numAtlases > 1: voxel = voxelVote(FH, False, True) p.addStage(voxel) mincMathInput = voxel.outputFiles[0] else: mincMathInput = FH.returnLabels(True)[0] FH.setMask(mincMathInput) else: mincMathInput = FH.getMask() mincMathOutput = fh.createBaseName(FH.resampledDir, FH.basename) mincMathOutput += "_masked.mnc" logFile = fh.logFromFile(FH.logDir, mincMathOutput) cmd = ["mincmath"] + ["-clobber"] + ["-mult"] cmd += [InputFile(mincMathInput)] + [InputFile(FH.getLastBasevol())] cmd += [OutputFile(mincMathOutput)] mincMath = CmdStage(cmd) mincMath.setLogFile(LogFile(logFile)) p.addStage(mincMath) FH.setLastBasevol(mincMathOutput) return (p)
def __init__(self, inFile, targetFile, nameForStage=None, **kwargs): self.p = Pipeline() self.outputFiles = [] # this will contain the outputFiles from the mincresample of the main MINC file self.outputFilesMask = [] # this will contain the outputFiles from the mincresample of the mask belonging to the main MINC file # the first step is to simply run the mincresample command: fileRS = mincresample(inFile, targetFile, **kwargs) if(nameForStage): fileRS.name = nameForStage self.p.addStage(fileRS) self.outputFiles = fileRS.outputFiles # initialize the array of outputs for the mask in case there is none to be resampled self.outputFilesMask = [None] * len(self.outputFiles) # next up, is this a file handler, and if so is there a mask that needs to be resampled? if(isFileHandler(inFile)): if(inFile.getMask()): # there is a mask associated with this file, should be updated # we have to watch out in terms of interpolation arguments, if # the original resample command asked for "-sinc" or "-tricubic" # for instance, we should remove that argument for the mask resampling # these options would reside in the argArray... maskArgs = copy.deepcopy(kwargs) if(maskArgs["argArray"]): argList = maskArgs["argArray"] for i in range(len(argList)): if(re.match("-sinc", argList[i]) or re.match("-trilinear", argList[i]) or re.match("-tricubic", argList[i]) ): del argList[i] maskArgs["argArray"] = argList # if the output file for the mincresample command was already # specified, add "_mask.mnc" to it if(maskArgs["output"]): maskArgs["output"] = re.sub(".mnc", "_mask.mnc", maskArgs["output"]) maskRS = mincresampleMask(inFile, targetFile, **maskArgs) if(nameForStage): maskRS.name = nameForStage + "--mask--" self.p.addStage(maskRS) self.outputFilesMask = maskRS.outputFiles
def __init__(self, inputFH, targetFH, lsq12_protocol=None, nlin_protocol=None, subject_matter=None, defaultDir="tmp"): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.lsq12_protocol = lsq12_protocol self.nlin_protocol = nlin_protocol self.subject_matter = subject_matter self.defaultDir = defaultDir if ((self.lsq12_protocol == None and self.subject_matter==None) or self.nlin_protocol == None): self.fileRes = rf.returnFinestResolution(self.inputFH) else: self.fileRes = None self.buildPipeline()
def __init__(self, inputArray, targetFH, nlinOutputDir, avgPrefix, nlin_protocol): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Prefix to pre-pend to averages at each generation""" self.avgPrefix = avgPrefix """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" if nlin_protocol == None: self.fileRes = rf.returnFinestResolution(self.inputs[0]) else: self.fileRes = None # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin")
def __init__(self, inputArray, outputDir, likeFile=None, maxPairs=None, lsq12_protocol=None, subject_matter=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Output directory should be _nlin """ self.lsq12Dir = outputDir """likeFile for resampling""" self.likeFile=likeFile """Maximum number of pairs to calculate""" self.maxPairs = maxPairs """Final lsq12 average""" self.lsq12Avg = None """Final lsq12 average file handler (e.g. the file handler associated with lsq12Avg)""" self.lsq12AvgFH = None """ Dictionary of lsq12 average transforms, which will include one per input. Key is input file handler and value is string pointing to final average lsq12 transform for that particular subject. These xfms may be used subsequently for statistics calculations. """ self.lsq12AvgXfms = {} # what sort of subject matter do we deal with? self.subject_matter = subject_matter """Create the blurring resolution from the file resolution""" try: self.fileRes = rf.getFinestResolution(self.inputs[0]) except: # if this fails (because file doesn't exist when pipeline is created) grab from # initial input volume, which should exist. self.fileRes = rf.getFinestResolution(self.inputs[0].inputFileName) """ Similarly to LSQ6 and NLIN modules, an optional SEMI-COLON delimited csv may be specified to override the default registration protocol. An example protocol may be found in: Note that if no protocol is specified, then defaults will be used. Based on the length of these parameter arrays, the number of generations is set. """ self.defaultParams() if lsq12_protocol: self.setParams(lsq12_protocol) self.generations = self.getGenerations() # Create new lsq12 group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="lsq12")
def __init__(self, inputArray, targetFH, nlinOutputDir, nlin_protocol=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" try: # the attempt to access the minc volume will fail if it doesn't yet exist at pipeline creation self.fileRes = rf.getFinestResolution(self.target) except: # if it indeed failed, get resolution from the original file specified for # one of the input files, which should exist. # Can be overwritten by the user through specifying a nonlinear protocol. self.fileRes = rf.getFinestResolution(self.inputs[0].inputFileName) """ Set default parameters before checking to see if a non-linear protocol has been specified. This is done first, since a non-linear protocol may specify only a subset of the parameters, but all parameters must be set for the registration to run properly. After default parameters are set, check for a specified non-linear protocol and override these parameters accordingly. Currently, this protocol must be a csv file that uses a SEMI-COLON to separate the fields. Examples are: pydpiper_apps_testing/test_data/minctracc_example_protocol.csv pydpiper_apps_testing/test_data/mincANTS_example_protocol.csv Each row in the csv is a different input to the either a minctracc or mincANTS call Although the number of entries in each row (e.g. generations) is variable, the specific parameters are fixed. For example, one could specify a subset of the allowed parameters (e.g. blurs only) but could not rename any parameters or use additional ones that haven't already been defined without subclassing. See documentation for additional details. Note that if no protocol is specified, then defaults will be used. Based on the length of these parameter arrays, the number of generations is set. """ self.defaultParams() if nlin_protocol: self.setParams(nlin_protocol) self.generations = self.getGenerations() # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin")
def MAGeTMask(atlases, inputs, numAtlases, regMethod, lsq12_protocol=None, nlin_protocol=None): """ Masking algorithm is as follows: 1. Run HierarchicalMinctracc or mincANTS with mask=True, using masks instead of labels. 2. Do voxel voting to find the best mask. (Or, if single atlas, use that transform) 3. mincMath to multiply original input by mask to get _masked.mnc file (This is done for both atlases and inputs, though for atlases, voxel voting is not required.) 4. Replace lastBasevol with masked version, since once we have created mask, we no longer care about unmasked version. 5. Clear out labels arrays, which were used to keep track of masks, as we want to re-set them for actual labels. Note: All data will be placed in a newly created masking directory to keep it separate from data generated during actual MAGeT. """ p = Pipeline() for atlasFH in atlases: maskDirectoryStructure(atlasFH, masking=True) for inputFH in inputs: maskDirectoryStructure(inputFH, masking=True) for atlasFH in atlases: sp = MAGeTRegister(inputFH, atlasFH, regMethod, name="initial", createMask=True, lsq12_protocol=lsq12_protocol, nlin_protocol=nlin_protocol) p.addPipeline(sp) """ Prior to final masking, set log and tmp directories as they were.""" for atlasFH in atlases: """Retrieve labels for use in new group. Assume only one""" labels = atlasFH.returnLabels(True) maskDirectoryStructure(atlasFH, masking=False) mp = maskFiles(atlasFH, True) p.addPipeline(mp) atlasFH.newGroup() atlasFH.addLabels(labels[0], inputLabel=True) for inputFH in inputs: maskDirectoryStructure(inputFH, masking=False) mp = maskFiles(inputFH, False, numAtlases) p.addPipeline(mp) # this will remove the "inputLabels"; labels that # come directly from the atlas library inputFH.clearLabels(True) # this will remove the "labels"; second generation # labels. I.e. labels from labels from the atlas library inputFH.clearLabels(False) inputFH.newGroup() return(p)
class FullIterativeLSQ12Nlin: """Does a full iterative LSQ12 and NLIN. Basically iterative model building starting from LSQ6 and without stats at the end. Designed to be called as part of a larger application. Specifying an initModel is optional, all other arguments are mandatory.""" def __init__(self, inputs, dirs, options, avgPrefix=None, initModel=None): self.inputs = inputs self.dirs = dirs self.options = options self.avgPrefix = avgPrefix self.initModel = initModel self.nlinFH = None self.p = Pipeline() self.buildPipeline() def buildPipeline(self): lsq12LikeFH = None if self.initModel: lsq12LikeFH = self.initModel[0] elif self.options.lsq12_likeFile: lsq12LikeFH = self.options.lsq12_likeFile lsq12module = lsq12.FullLSQ12(self.inputs, self.dirs.lsq12Dir, likeFile=lsq12LikeFH, maxPairs=self.options.lsq12_max_pairs, lsq12_protocol=self.options.lsq12_protocol, subject_matter=self.options.lsq12_subject_matter) lsq12module.iterate() self.p.addPipeline(lsq12module.p) self.lsq12Params = lsq12module.lsq12Params if lsq12module.lsq12AvgFH.getMask()== None: if self.initModel: lsq12module.lsq12AvgFH.setMask(self.initModel[0].getMask()) if not self.avgPrefix: self.avgPrefix = self.options.pipeline_name nlinModule = nlin.initializeAndRunNLIN(self.dirs.lsq12Dir, self.inputs, self.dirs.nlinDir, avgPrefix=self.avgPrefix, createAvg=False, targetAvg=lsq12module.lsq12AvgFH, nlin_protocol=self.options.nlin_protocol, reg_method=self.options.reg_method) self.p.addPipeline(nlinModule.p) self.nlinFH = nlinModule.nlinAverages[-1] self.nlinParams = nlinModule.nlinParams self.initialTarget = nlinModule.initialTarget # Now we need the full transform to go back to LSQ6 space for i in self.inputs: linXfm = lsq12module.lsq12AvgXfms[i] nlinXfm = i.getLastXfm(self.nlinFH) outXfm = st.createOutputFileName(i, nlinXfm, "transforms", "_with_additional.xfm") xc = ma.xfmConcat([linXfm, nlinXfm], outXfm, fh.logFromFile(i.logDir, outXfm)) self.p.addStage(xc) i.addAndSetXfmToUse(self.nlinFH, outXfm)
def maskFiles(FH, isAtlas, numAtlases=1): """ Assume that if there is more than one atlas, multiple masks were generated and we need to perform a voxel_vote. Otherwise, assume we are using inputLabels from crossing with only one atlas. """ #MF TODO: Make this more general to handle pairwise option. p = Pipeline() if not isAtlas: if numAtlases > 1: voxel = voxelVote(FH, False, True) p.addStage(voxel) mincMathInput = voxel.outputFiles[0] else: mincMathInput = FH.returnLabels(True)[0] FH.setMask(mincMathInput) else: mincMathInput = FH.getMask() mincMathOutput = fh.createBaseName(FH.resampledDir, FH.basename) mincMathOutput += "_masked.mnc" logFile = fh.logFromFile(FH.logDir, mincMathOutput) cmd = ["mincmath"] + ["-clobber"] + ["-mult"] # In response to issue #135 # the order of the input files to mincmath matters. By default the # first input files is used as a "like file" for the output file. # We should make sure that the mask is not used for that, because # it has an image range from 0 to 1; not something we want to be # set for the masked output file # average mask cmd += [InputFile(FH.getLastBasevol())] + [InputFile(mincMathInput)] cmd += [OutputFile(mincMathOutput)] mincMath = CmdStage(cmd) mincMath.setLogFile(LogFile(logFile)) p.addStage(mincMath) FH.setLastBasevol(mincMathOutput) return(p)
def __init__(self, inputFH, targetFH, statsKernels, additionalXfm=None): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = [] self.setupBlurs(statsKernels) self.statsGroup = StatsGroup() self.setupXfms() """ additionalXfm is an optional transform that may be specified. If it is, it is concatenated with the lastXfm from input to target. This additional transform must also be in the same direction as the lastXfm (e.g. input to target) Example usage: if the lastXfm from input to target goes from lsq12 to nlin space and you would like to calculate the absolute jacobians to lsq6 space, the additional transform specified may be the lsq6 to lsq12 transform from input to target. """ self.additionalXfm = additionalXfm self.fullStatsCalc()
def __init__(self, inputPipeFH, templatePipeFH, steps=[1,0.5,0.5,0.2,0.2,0.1], blurs=[0.25,0.25,0.25,0.25,0.25, -1], gradients=[False, False, True, False, True, False], iterations=[60,60,60,10,10,4], simplexes=[3,3,3,1.5,1.5,1], w_translations=0.2, linearparams = {'type' : "lsq12", 'simplex' : 1, 'step' : 1}, defaultDir="tmp"): self.p = Pipeline() for b in blurs: #MF TODO: -1 case is also handled in blur. Need here for addStage. #Fix this redundancy and/or better design? if b != -1: tblur = ma.blur(templatePipeFH, b, gradient=True) iblur = ma.blur(inputPipeFH, b, gradient=True) self.p.addStage(tblur) self.p.addStage(iblur) # Do standard LSQ12 alignment prior to non-linear stages lsq12reg = lsq12.LSQ12(inputPipeFH, templatePipeFH, defaultDir=defaultDir) self.p.addPipeline(lsq12reg.p) # create the nonlinear registrations for i in range(len(steps)): """For the final stage, make sure the output directory is transforms.""" if i == (len(steps) - 1): defaultDir = "transforms" nlinStage = ma.minctracc(inputPipeFH, templatePipeFH, defaultDir=defaultDir, blur=blurs[i], gradient=gradients[i], iterations=iterations[i], step=steps[i], similarity=0.8, w_translations=w_translations, simplex=simplexes[i]) self.p.addStage(nlinStage)
def __init__(self, filesToResample, resolution): """During initialization make sure all files are resampled at resolution we'd like to use for each pipeline stage """ self.p = Pipeline() for FH in filesToResample: dirForOutput = self.getOutputDirectory(FH) currentRes = volumeFromFile(FH.getLastBasevol()).separations if not abs(abs(currentRes[0]) - abs(resolution)) < 0.01: crop = ma.autocrop(resolution, FH, defaultDir=dirForOutput) self.p.addStage(crop) mask = FH.getMask() if mask: #Need to resample the mask as well. cropMask = ma.mincresampleMask(FH, FH, outputLocation=FH, likeFile=FH) self.p.addStage(cropMask)
def __init__(self, inputArray, targetFH, nlinOutputDir, avgPrefix, nlin_protocol): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Prefix to pre-pend to averages at each generation""" self.avgPrefix = avgPrefix """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" if nlin_protocol==None: self.fileRes = rf.returnFinestResolution(self.inputs[0]) else: self.fileRes = None # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin")
def __init__(self, inputFH, targetFH, blurs, inputArray=None, scalingFactor=None): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = blurs self.statsGroup = StatsGroup() self.setupXfms() """Optional inputArray used to calculate an average displacement and use for recentering.""" if inputArray: self.dispToAvg = [] self.setupDispArray(inputArray) else: self.dispToAvg = None """ Specify an optional xfm to be used when calculating the scaled jacobians. This jacobian will then be concatenated with the self.linearXfm, the linear portion of the final non-linear transform from input to target. A """ self.scalingFactor = scalingFactor
def maskFiles(FH, isAtlas, numAtlases=1): """ Assume that if there is more than one atlas, multiple masks were generated and we need to perform a voxel_vote. Otherwise, assume we are using inputLabels from crossing with only one atlas. """ #MF TODO: Make this more general to handle pairwise option. p = Pipeline() if not isAtlas: if numAtlases > 1: voxel = voxelVote(FH, False, True) p.addStage(voxel) mincMathInput = voxel.outputFiles[0] else: mincMathInput = FH.returnLabels(True)[0] FH.setMask(mincMathInput) else: mincMathInput = FH.getMask() mincMathOutput = fh.createBaseName(FH.resampledDir, FH.basename) mincMathOutput += "_masked.mnc" logFile = fh.logFromFile(FH.logDir, mincMathOutput) cmd = ["mincmath"] + ["-clobber"] + ["-mult"] # In response to issue #135 # the order of the input files to mincmath matters. By default the # first input files is used as a "like file" for the output file. # We should make sure that the mask is not used for that, because # it has an image range from 0 to 1; not something we want to be # set for the masked output file # average mask cmd += [InputFile(FH.getLastBasevol())] + [InputFile(mincMathInput)] cmd += [OutputFile(mincMathOutput)] mincMath = CmdStage(cmd) mincMath.setLogFile(LogFile(logFile)) p.addStage(mincMath) FH.setLastBasevol(mincMathOutput) return (p)
class AbstractApplication(object): """Framework class for writing applications for PydPiper. This class defines the default behaviour for accepting common command-line options, and executing the application under various queueing systems. Subclasses should extend the following methods: setup_appName() setup_logger() [optional, default method is defined here] setup_options() run() Usage: class MyApplication(AbstractApplication): ... if __name__ == "__main__": application = MyApplication() application.start() """ def __init__(self): # use an environment variable to look for a default config file # Alternately, we could use a default location for the file # (say `files = ['/etc/pydpiper.cfg', '~/pydpiper.cfg', './pydpiper.cfg']`) default_config_file = os.getenv("PYDPIPER_CONFIG_FILE") if default_config_file is not None: files = [default_config_file] else: files = [] self.parser = MyParser(default_config_files=files) self.__version__ = get_distribution("pydpiper").version # pylint: disable=E1101 def _setup_options(self): # PydPiper options addExecutorArgumentGroup(self.parser) addApplicationArgumentGroup(self.parser) def _print_version(self): if self.options.show_version: print(self.__version__) sys.exit() def _setup_pipeline(self, options): self.pipeline = Pipeline(options) # FIXME check that only one server is running with a given output directory def _setup_directories(self): """Output and backup directories setup here.""" if not self.options.output_directory: self.outputDir = os.getcwd() else: self.outputDir = makedirsIgnoreExisting(self.options.output_directory) self.pipeline.setBackupFileLocation(self.outputDir) def reconstructCommand(self): reconstruct = ' '.join(sys.argv) logger.info("Command is: " + reconstruct) logger.info("Command version : " + self.__version__) # also, because this is probably a better file for it (also has similar # naming conventions as the pipeline-stages.txt file: fileForCommandAndVersion = self.options.pipeline_name + "-command-and-version-" + time.strftime("%d-%m-%Y-at-%H-%m-%S") + ".sh" pf = open(fileForCommandAndVersion, "w") pf.write("#!/usr/bin/env bash\n") pf.write("# Command version is: " + self.__version__ + "\n") pf.write("# Command was: \n") pf.write(reconstruct + '\n') pf.write("# options were: \n# %s" % self.options) pf.close() def start(self): logger.info("Calling `start`") self._setup_options() self.setup_options() self.options = self.parser.parse_args() self.args = self.options.files self._print_version() # Check to make sure some executors have been specified if we are # actually going to run: if self.options.execute: noExecSpecified(self.options.num_exec) self._setup_pipeline(self.options) self._setup_directories() self.appName = self.setup_appName() self.setup_logger() # TODO this doesn't capture environment variables # or contents of any config file so isn't really complete self.reconstructCommand() pbs_submit = self.options.queue_type == "pbs" \ and not self.options.local # --create-graph causes the pipeline to be constructed # both at PBS submit time and on the grid; this may be an extremely # expensive duplication if (self.options.execute and not pbs_submit) or self.options.create_graph: logger.debug("Calling `run`") self.run() logger.debug("Calling `initialize`") self.pipeline.initialize() self.pipeline.printStages(self.options.pipeline_name) if self.options.create_graph: logger.debug("Writing dot file...") nx.write_dot(self.pipeline.G, str(self.options.pipeline_name) + "_labeled-tree.dot") logger.debug("Done.") if not self.options.execute: print("Not executing the command (--no-execute is specified).\nDone.") return if pbs_submit: roq = runOnQueueingSystem(self.options, sys.argv) roq.createAndSubmitPbsScripts() logger.info("Finished submitting PBS job scripts...quitting") return #pipelineDaemon runs pipeline, launches Pyro client/server and executors (if specified) # if use_ns is specified, Pyro NameServer must be started. logger.info("Starting pipeline daemon...") pipelineDaemon(self.pipeline, self.options, sys.argv[0]) logger.info("Server has stopped. Quitting...") def setup_appName(self): """sets the name of the application""" pass def setup_logger(self): """sets logging info specific to application""" FORMAT = '%(asctime)-15s %(name)s %(levelname)s %(process)d/%(threadName)s: %(message)s' now = datetime.now().strftime("%Y-%m-%d-at-%H:%M:%S") FILENAME = str(self.appName) + "-" + now + '-pid-' + str(os.getpid()) + ".log" logging.basicConfig(filename=FILENAME, format=FORMAT, level=logging.DEBUG) def setup_options(self): """Set up the self.options option parser with options this application needs.""" pass def run(self): """Run this application. """ pass
def _setup_pipeline(self, options): self.pipeline = Pipeline(options)
class LSQ12ANTSNlin: """Class that runs a basic LSQ12 registration, followed by a single mincANTS call. Currently used in MAGeT, registration_chain and pairwise_nlin.""" def __init__(self, inputFH, targetFH, lsq12_protocol=None, nlin_protocol=None, subject_matter=None, defaultDir="tmp"): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.lsq12_protocol = lsq12_protocol self.nlin_protocol = nlin_protocol self.subject_matter = subject_matter self.defaultDir = defaultDir if ((self.lsq12_protocol == None and self.subject_matter==None) or self.nlin_protocol == None): self.fileRes = rf.returnFinestResolution(self.inputFH) else: self.fileRes = None self.buildPipeline() def buildPipeline(self): # Run lsq12 registration prior to non-linear self.lsq12Params = mp.setLSQ12MinctraccParams(self.fileRes, subject_matter=self.subject_matter, reg_protocol=self.lsq12_protocol) lsq12reg = lsq12.LSQ12(self.inputFH, self.targetFH, blurs=self.lsq12Params.blurs, step=self.lsq12Params.stepSize, gradient=self.lsq12Params.useGradient, simplex=self.lsq12Params.simplex, w_translations=self.lsq12Params.w_translations, defaultDir=self.defaultDir) self.p.addPipeline(lsq12reg.p) #Resample using final LSQ12 transform and reset last base volume. res = ma.mincresample(self.inputFH, self.targetFH, likeFile=self.targetFH, argArray=["-sinc"]) self.p.addStage(res) self.inputFH.setLastBasevol(res.outputFiles[0]) lsq12xfm = self.inputFH.getLastXfm(self.targetFH) #Get registration parameters from nlin protocol, blur and register #Assume a SINGLE generation here. self.nlinParams = mp.setOneGenMincANTSParams(self.fileRes, reg_protocol=self.nlin_protocol) for b in self.nlinParams.blurs: for j in b: #Note that blurs for ANTS params in an array of arrays. if j != -1: self.p.addStage(ma.blur(self.targetFH, j, gradient=True)) self.p.addStage(ma.blur(self.inputFH, j, gradient=True)) sp = ma.mincANTS(self.inputFH, self.targetFH, defaultDir=self.defaultDir, blur=self.nlinParams.blurs[0], gradient=self.nlinParams.gradient[0], similarity_metric=self.nlinParams.similarityMetric[0], weight=self.nlinParams.weight[0], iterations=self.nlinParams.iterations[0], radius_or_histo=self.nlinParams.radiusHisto[0], transformation_model=self.nlinParams.transformationModel[0], regularization=self.nlinParams.regularization[0], useMask=self.nlinParams.useMask[0]) self.p.addStage(sp) nlinXfm = sp.outputFiles[0] #Reset last base volume to original input for future registrations. self.inputFH.setLastBasevol(setToOriginalInput=True) #Concatenate transforms to get final lsq12 + nlin. Register volume handles naming and setting of lastXfm output = self.inputFH.registerVolume(self.targetFH, "transforms") xc = ma.xfmConcat([lsq12xfm, nlinXfm], output, fh.logFromFile(self.inputFH.logDir, output)) self.p.addStage(xc)
class LSQ12(object): """Basic LSQ12 class. This class takes an input FileHandler and a targetFileHandler as required inputs. A series of minctracc calls will then produce the 12-parameter alignment. The number of minctracc calls and their parameters are controlled by four further arguments to the constructor: blurs: an array of floats containing the FWHM of the blurring kernel to be used for each call gradient: an array of booleans stating whether we should use the blur (False) or gradient (True) of each blur step: an array of floats containing the step used by minctracc in each call simplex: an array of floats containing the simplex used by minctracc in each call. The number of entries in those three (blurs, step, simplex) input arguments determines the number of minctracc calls executed in this module. For example, the following call: LSQ12(inputFH, targetFH, blurs=[10,5,2], gradient=[False,True,True], step=[4,4,4], simplex=[20,20,20]) will result in three successive minctracc calls, each initialized with the output transform of the previous call. """ def __init__(self, inputFH, targetFH, blurs=[0.3, 0.2, 0.15], step=[1, 0.5, 0.333333333333333], gradient=[False, True, False], simplex=[3, 1.5, 1], w_translations=[0.4, 0.4, 0.4], defaultDir="tmp"): # TO DO: Might want to take this out and pass in # of generations, since # checking happens there. if len(blurs) == len(step) == len(simplex): # do nothing - all lengths are the same and we're therefore happy pass else: logger.error( "The same number of entries are required for blurs, step, and simplex in LSQ12" ) sys.exit() self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = blurs self.step = step self.blurs = blurs self.gradient = gradient self.simplex = simplex self.w_translations = w_translations self.defaultDir = defaultDir self.blurFiles() self.buildPipeline() def blurFiles(self): for b in self.blurs: if b != -1: tblur = ma.blur(self.targetFH, b, gradient=True) iblur = ma.blur(self.inputFH, b, gradient=True) self.p.addStage(tblur) self.p.addStage(iblur) def buildPipeline(self): for i in range(len(self.blurs)): linearStage = ma.minctracc(self.inputFH, self.targetFH, blur=self.blurs[i], defaultDir=self.defaultDir, gradient=self.gradient[i], linearparam="lsq12", step=self.step[i], w_translations=self.w_translations[i], simplex=self.simplex[i]) self.p.addStage(linearStage)
class initializeAndRunNLIN(object): """Class to setup target average (if needed), instantiate correct version of NLIN class, and run NLIN registration.""" def __init__( self, targetOutputDir, #Output directory for files related to initial target (often _lsq12) inputFiles, nlinDir, avgPrefix, #Prefix for nlin-1.mnc, ... nlin-k.mnc createAvg=True, #True=call mincAvg, False=targetAvg already exists targetAvg=None, #Optional path to initial target - passing name does not guarantee existence targetMask=None, #Optional path to mask for initial target nlin_protocol=None, reg_method=None): self.p = Pipeline() self.targetOutputDir = targetOutputDir self.inputFiles = inputFiles self.nlinDir = nlinDir self.avgPrefix = avgPrefix self.createAvg = createAvg self.targetAvg = targetAvg self.targetMask = targetMask self.nlin_protocol = nlin_protocol self.reg_method = reg_method # setup initialTarget (if needed) and initialize non-linear module self.setupTarget() self.initNlinModule() #iterate through non-linear registration and setup averages self.nlinModule.iterate() self.p.addPipeline(self.nlinModule.p) self.nlinAverages = self.nlinModule.nlinAverages self.nlinParams = self.nlinModule.nlinParams def setupTarget(self): if self.targetAvg: if isinstance(self.targetAvg, str): self.initialTarget = RegistrationPipeFH( self.targetAvg, mask=self.targetMask, basedir=self.targetOutputDir) self.outputAvg = self.targetAvg elif isinstance(self.targetAvg, RegistrationPipeFH): self.initialTarget = self.targetAvg self.outputAvg = self.targetAvg.getLastBasevol() if not self.initialTarget.getMask(): if self.targetMask: self.initialTarget.setMask(self.targetMask) else: print "You have passed a target average that is neither a string nor a file handler: " + str( self.targetAvg) print "Exiting..." else: self.targetAvg = abspath( self.targetOutputDir) + "/" + "initial-target.mnc" self.initialTarget = RegistrationPipeFH( self.targetAvg, mask=self.targetMask, basedir=self.targetOutputDir) self.outputAvg = self.targetAvg if self.createAvg: avg = mincAverage(self.inputFiles, self.initialTarget, output=self.outputAvg, defaultDir=self.targetOutputDir) self.p.addStage(avg) def initNlinModule(self): if self.reg_method == "mincANTS": self.nlinModule = NLINANTS(self.inputFiles, self.initialTarget, self.nlinDir, self.avgPrefix, self.nlin_protocol) elif self.reg_method == "minctracc": self.nlinModule = NLINminctracc(self.inputFiles, self.initialTarget, self.nlinDir, self.avgPrefix, self.nlin_protocol) else: logger.error("Incorrect registration method specified: " + self.reg_method) sys.exit()
class FullLSQ12(object): """ This class takes an array of input file handlers along with an optionally specified protocol and does 12-parameter alignment and averaging of all of the pairs. Required arguments: inputArray = array of file handlers to be registered outputDir = an output directory to place the final average from this registration Optional arguments include: --likeFile = a file handler that can be used as a likeFile for resampling each input into the final lsq12 space. If none is specified, the input will be used --maxPairs = maximum number of pairs to register. If this pair is specified, then each subject will only be registered to a subset of the other subjects. --lsq2_protocol = an optional csv file to specify a protocol that overrides the defaults. --subject_matter = currently supports "mousebrain". If this is specified, the parameter for the minctracc registrations are set based on defaults for mouse brains instead of the file resolution. """ def __init__(self, inputArray, outputDir, likeFile=None, maxPairs=None, lsq12_protocol=None, subject_matter=None): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Output directory should be _nlin """ self.lsq12Dir = outputDir """likeFile for resampling""" self.likeFile = likeFile """Maximum number of pairs to calculate""" self.maxPairs = maxPairs """Final lsq12 average""" self.lsq12Avg = None """Final lsq12 average file handler (e.g. the file handler associated with lsq12Avg)""" self.lsq12AvgFH = None """ Dictionary of lsq12 average transforms, which will include one per input. Key is input file handler and value is string pointing to final average lsq12 transform for that particular subject. These xfms may be used subsequently for statistics calculations. """ self.lsq12AvgXfms = {} """Create the blurring resolution from the file resolution""" if (subject_matter == None and lsq12_protocol == None): self.fileRes = rf.returnFinestResolution(self.inputs[0]) else: self.fileRes = None """"Set up parameter array""" self.lsq12Params = mp.setLSQ12MinctraccParams( self.fileRes, subject_matter=subject_matter, reg_protocol=lsq12_protocol) self.blurs = self.lsq12Params.blurs self.stepSize = self.lsq12Params.stepSize self.useGradient = self.lsq12Params.useGradient self.simplex = self.lsq12Params.simplex self.w_translations = self.lsq12Params.w_translations self.generations = self.lsq12Params.generations # Create new lsq12 group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="lsq12") def iterate(self): if not self.maxPairs: xfmsToAvg = {} lsq12ResampledFiles = {} for inputFH in self.inputs: """Create an array of xfms, to compute an average lsq12 xfm for each input""" xfmsToAvg[inputFH] = [] for targetFH in self.inputs: if inputFH != targetFH: lsq12 = LSQ12(inputFH, targetFH, blurs=self.blurs, step=self.stepSize, gradient=self.useGradient, simplex=self.simplex, w_translations=self.w_translations) self.p.addPipeline(lsq12.p) xfmsToAvg[inputFH].append(inputFH.getLastXfm(targetFH)) """Create average xfm for inputFH using xfmsToAvg array""" cmd = ["xfmavg"] for i in range(len(xfmsToAvg[inputFH])): cmd.append(InputFile(xfmsToAvg[inputFH][i])) avgXfmOutput = createBaseName( inputFH.transformsDir, inputFH.basename + "-avg-lsq12.xfm") cmd.append(OutputFile(avgXfmOutput)) xfmavg = CmdStage(cmd) xfmavg.setLogFile( LogFile(logFromFile(inputFH.logDir, avgXfmOutput))) self.p.addStage(xfmavg) self.lsq12AvgXfms[inputFH] = avgXfmOutput """ resample brain and add to array for mincAveraging""" if not self.likeFile: likeFile = inputFH else: likeFile = self.likeFile rslOutput = createBaseName( inputFH.resampledDir, inputFH.basename + "-resampled-lsq12.mnc") res = ma.mincresample(inputFH, inputFH, transform=avgXfmOutput, likeFile=likeFile, output=rslOutput, argArray=["-sinc"]) self.p.addStage(res) lsq12ResampledFiles[inputFH] = rslOutput """ After all registrations complete, setLastBasevol for each subject to be resampled file in lsq12 space. We can then call mincAverage on fileHandlers, as it will use the lastBasevol for each by default.""" for inputFH in self.inputs: inputFH.setLastBasevol(lsq12ResampledFiles[inputFH]) """ mincAverage all resampled brains and put in lsq12Directory""" self.lsq12Avg = abspath(self.lsq12Dir) + "/" + basename( self.lsq12Dir) + "-pairs.mnc" self.lsq12AvgFH = RegistrationPipeFH(self.lsq12Avg, basedir=self.lsq12Dir) avg = ma.mincAverage(self.inputs, self.lsq12AvgFH, output=self.lsq12Avg, defaultDir=self.lsq12Dir) self.p.addStage(avg) else: print "Registration using a specified number of max pairs not yet working. Check back soon!" sys.exit()
def _setup_pipeline(self): self.pipeline = Pipeline()
class AbstractApplication(object): """Framework class for writing applications for PydPiper. This class defines the default behaviour for accepting common command-line options, and executing the application under various queueing systems. Subclasses should extend the following methods: setup_appName() setup_logger() [optional, default method is defined here] setup_options() run() Usage: class MyApplication(AbstractApplication): ... if __name__ == "__main__": application = ConcreteApplication() application.start() """ def __init__(self): Pyro.config.PYRO_MOBILE_CODE = 1 self.parser = MyParser() self.__version__ = get_distribution("pydpiper").version def _setup_options(self): # PydPiper options addExecutorOptionGroup(self.parser) addApplicationOptionGroup(self.parser) def _print_version(self): if self.options.show_version: print self.__version__ sys.exit() def _setup_pipeline(self): self.pipeline = Pipeline() def _setup_directories(self): """Output and backup directories setup here.""" if not self.options.output_directory: self.outputDir = os.getcwd() else: self.outputDir = makedirsIgnoreExisting( self.options.output_directory) self.pipeline.setBackupFileLocation(self.outputDir) def reconstructCommand(self): reconstruct = "" for i in range(len(sys.argv)): reconstruct += sys.argv[i] + " " logger.info("Command is: " + reconstruct) def start(self): self._setup_options() self.setup_options() self.options, self.args = self.parser.parse_args() self._print_version() #Check to make sure some executors have been specified. noExecSpecified(self.options.num_exec) self._setup_pipeline() self._setup_directories() self.appName = self.setup_appName() self.setup_logger() if self.options.queue == "pbs": roq = runOnQueueingSystem(self.options, sys.argv) roq.createPbsScripts() return if self.options.restart: logger.info("Restarting pipeline from pickled files.") self.pipeline.restart() self.pipeline.initialize() self.pipeline.printStages(self.appName) else: self.reconstructCommand() self.run() self.pipeline.initialize() self.pipeline.printStages(self.appName) if self.options.create_graph: logger.debug("Writing dot file...") nx.write_dot(self.pipeline.G, "labeled-tree.dot") logger.debug("Done.") if not self.options.execute: print "Not executing the command (--no-execute is specified).\nDone." return #pipelineDaemon runs pipeline, launches Pyro client/server and executors (if specified) # if use_ns is specified, Pyro NameServer must be started. logger.info("Starting pipeline daemon...") pipelineDaemon(self.pipeline, self.options, sys.argv[0]) logger.info("Server has stopped. Quitting...") def setup_appName(self): """sets the name of the application""" pass def setup_logger(self): """sets logging info specific to application""" FORMAT = '%(asctime)-15s %(name)s %(levelname)s: %(message)s' now = datetime.now() FILENAME = str( self.appName) + "-" + now.strftime("%Y%m%d-%H%M%S%f") + ".log" logging.basicConfig(filename=FILENAME, format=FORMAT, level=logging.DEBUG) def setup_options(self): """Set up the self.options option parser with options this application needs.""" pass def run(self): """Run this application. """ pass
class NLINBase(object): """ This is the parent class for any iterative non-linear registration. Subclasses should extend the following methods: addBlurStage() regAndResample() """ def __init__(self, inputArray, targetFH, nlinOutputDir, avgPrefix, nlin_protocol): self.p = Pipeline() """Initial inputs should be an array of fileHandlers with lastBasevol in lsq12 space""" self.inputs = inputArray """Initial target should be the file handler for the lsq12 average""" self.target = targetFH """Output directory should be _nlin """ self.nlinDir = nlinOutputDir """Prefix to pre-pend to averages at each generation""" self.avgPrefix = avgPrefix """Empty array that we will fill with averages as we create them""" self.nlinAverages = [] """Create the blurring resolution from the file resolution""" if nlin_protocol == None: self.fileRes = rf.returnFinestResolution(self.inputs[0]) else: self.fileRes = None # Create new nlin group for each input prior to registration for i in range(len(self.inputs)): self.inputs[i].newGroup(groupName="nlin") def addBlurStage(self): """ Add blurs to pipeline. Because blurs are handled differently by parameter arrays in minctracc and mincANTS subclasses, they are added to the pipeline via function call. """ pass def regAndResample(self): """Registration and resampling calls""" pass def iterate(self): for i in range(self.generations): outputName = "nlin-%g.mnc" % (i + 1) if self.avgPrefix: outputName = str(self.avgPrefix) + "-" + outputName nlinOutput = abspath(self.nlinDir) + "/" + outputName nlinFH = RegistrationPipeFH(nlinOutput, mask=self.target.getMask(), basedir=self.nlinDir) self.addBlurStage(self.target, i) filesToAvg = [] for inputFH in self.inputs: self.addBlurStage(inputFH, i) self.regAndResample(inputFH, i, filesToAvg, nlinFH) """Because we don't reset lastBasevol on each inputFH, call mincAverage with files only. We create fileHandler first though, so we have log directory. This solution seems a bit hackish--may want to modify? Additionally, we are currently using the full RegistrationPipeFH class, but ultimately we'll want to create a third class that is somewhere between a full and base class. """ logBase = removeBaseAndExtension(nlinOutput) avgLog = createLogFile(nlinFH.logDir, logBase) avg = mincAverage(filesToAvg, nlinOutput, logFile=avgLog) self.p.addStage(avg) """Reset target for next iteration and add to array""" self.target = nlinFH self.nlinAverages.append(nlinFH) """Create a final nlin group to add to the inputFH. lastBasevol = by default, will grab the lastBasevol used in these calculations (e.g. lsq12) setLastXfm between final nlin average and inputFH will be set for stats calculations. """ if i == (self.generations - 1): for inputFH in self.inputs: """NOTE: The last xfm being set below is NOT the result of a registration between inputFH and nlinFH, but rather is the output transform from the previous generation's average.""" finalXfm = inputFH.getLastXfm( self.nlinAverages[self.generations - 2]) inputFH.newGroup(groupName="final") inputFH.setLastXfm(nlinFH, finalXfm)
class mincresampleFileAndMask(object): """ If the input file to mincresample(CmdStage) is a file handler, and there is a mask associated with the file, the most intuitive thing to do is to resample both the file and the mask. However, a true atom/command stage can only create a single stage, and a such mincresample(CmdStage) can not resample both. When using a file handler, the mask file associated with it is used behind the scenes without the user explicitly specifying this behaviour. That's why it is important that the mask always remains current/up-to-date. The best way to do that is to automatically resample the associated mask when the main file is being resampled. And that is where this class comes in. It serves as a wrapper around mincresample(CmdStage) and mincresampleMask(CmdStage). It will check whether the input file is a file handler, and if so, will resample the mask that is associated with it (if it exists). This class is not truly an atom/command stage, so technically should not live in the minc_atoms module. It is still kept here because in essence it serves as a single indivisible stage. (and because the user is more likely to call/find it when looking for the mincresample stage) """ def __init__(self, inFile, targetFile, nameForStage=None, **kwargs): self.p = Pipeline() self.outputFiles = [ ] # this will contain the outputFiles from the mincresample of the main MINC file self.outputFilesMask = [ ] # this will contain the outputFiles from the mincresample of the mask belonging to the main MINC file # the first step is to simply run the mincresample command: fileRS = mincresample(inFile, targetFile, **kwargs) if (nameForStage): fileRS.name = nameForStage self.p.addStage(fileRS) self.outputFiles = fileRS.outputFiles # initialize the array of outputs for the mask in case there is none to be resampled self.outputFilesMask = [None] * len(self.outputFiles) # next up, is this a file handler, and if so is there a mask that needs to be resampled? if (isFileHandler(inFile)): if (inFile.getMask()): # there is a mask associated with this file, should be updated # we have to watch out in terms of interpolation arguments, if # the original resample command asked for "-sinc" or "-tricubic" # for instance, we should remove that argument for the mask resampling # these options would reside in the argArray... maskArgs = copy.deepcopy(kwargs) if maskArgs.has_key("argArray"): argList = maskArgs["argArray"] for i in range(len(argList)): if (re.match("-sinc", argList[i]) or re.match("-trilinear", argList[i]) or re.match("-tricubic", argList[i])): del argList[i] maskArgs["argArray"] = argList # if the output file for the mincresample command was already # specified, add "_mask.mnc" to it if maskArgs.has_key("output"): maskArgs["output"] = re.sub(".mnc", "_mask.mnc", maskArgs["output"]) maskRS = mincresampleMask(inFile, targetFile, **maskArgs) if (nameForStage): maskRS.name = nameForStage + "--mask--" self.p.addStage(maskRS) self.outputFilesMask = maskRS.outputFiles
class LSQ12ANTSNlin: """Class that runs a basic LSQ12 registration, followed by a single mincANTS call. Currently used in MAGeT, registration_chain and pairwise_nlin.""" def __init__(self, inputFH, targetFH, lsq12_protocol=None, nlin_protocol=None, subject_matter=None, defaultDir="tmp"): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.lsq12_protocol = lsq12_protocol self.nlin_protocol = nlin_protocol self.subject_matter = subject_matter self.defaultDir = defaultDir if ((self.lsq12_protocol == None and self.subject_matter==None) or self.nlin_protocol == None): # always base the resolution to be used on the target for the registrations self.fileRes = rf.returnFinestResolution(self.targetFH) else: self.fileRes = None self.buildPipeline() def buildPipeline(self): # Run lsq12 registration prior to non-linear self.lsq12Params = mp.setLSQ12MinctraccParams(self.fileRes, subject_matter=self.subject_matter, reg_protocol=self.lsq12_protocol) lsq12reg = lsq12.LSQ12(self.inputFH, self.targetFH, blurs=self.lsq12Params.blurs, step=self.lsq12Params.stepSize, gradient=self.lsq12Params.useGradient, simplex=self.lsq12Params.simplex, w_translations=self.lsq12Params.w_translations, defaultDir=self.defaultDir) self.p.addPipeline(lsq12reg.p) #Resample using final LSQ12 transform and reset last base volume. res = ma.mincresample(self.inputFH, self.targetFH, likeFile=self.targetFH, argArray=["-sinc"]) self.p.addStage(res) self.inputFH.setLastBasevol(res.outputFiles[0]) lsq12xfm = self.inputFH.getLastXfm(self.targetFH) #Get registration parameters from nlin protocol, blur and register #Assume a SINGLE generation here. self.nlinParams = mp.setOneGenMincANTSParams(self.fileRes, reg_protocol=self.nlin_protocol) for b in self.nlinParams.blurs: for j in b: #Note that blurs for ANTS params in an array of arrays. if j != -1: self.p.addStage(ma.blur(self.targetFH, j, gradient=True)) self.p.addStage(ma.blur(self.inputFH, j, gradient=True)) sp = ma.mincANTS(self.inputFH, self.targetFH, defaultDir=self.defaultDir, blur=self.nlinParams.blurs[0], gradient=self.nlinParams.gradient[0], similarity_metric=self.nlinParams.similarityMetric[0], weight=self.nlinParams.weight[0], iterations=self.nlinParams.iterations[0], radius_or_histo=self.nlinParams.radiusHisto[0], transformation_model=self.nlinParams.transformationModel[0], regularization=self.nlinParams.regularization[0], useMask=self.nlinParams.useMask[0]) self.p.addStage(sp) nlinXfm = sp.outputFiles[0] #Reset last base volume to original input for future registrations. self.inputFH.setLastBasevol(setToOriginalInput=True) #Concatenate transforms to get final lsq12 + nlin. Register volume handles naming and setting of lastXfm output = self.inputFH.registerVolume(self.targetFH, "transforms") xc = ma.xfmConcat([lsq12xfm, nlinXfm], output, fh.logFromFile(self.inputFH.logDir, output)) self.p.addStage(xc)
class CalcStats(object): """Statistics calculation between an input and target. This class calculates multiple displacement fields, relative and absolute jacobians. General functionality as follows: 1. Class instantiated with input, target and statsKernels. Note that here, the statsKernels specified are blurs used to smooth the displacement fields prior to additional calculations. They may be a string of comma separated values or an array of doubles. 2. An additional transform may also be included to calculate absolute jacobians to a different space, as is described in the __init__ function, documentation and elsewhere in the code. 3. If needed, invert transform between input and target in setupXfms(). This is necessary as this class assumes that the target is the reference space, from which all stats are calculated. 4. Call fullStatsCalc. This calculates linear and pure nonlinear displacement before calculating jacobians. 5. Ability to recenter displacements using an average may be re-added in the future. """ def __init__(self, inputFH, targetFH, statsKernels, additionalXfm=None): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = [] self.setupBlurs(statsKernels) self.statsGroup = StatsGroup() self.setupXfms() """ additionalXfm is an optional transform that may be specified. If it is, it is concatenated with the lastXfm from input to target. This additional transform must also be in the same direction as the lastXfm (e.g. input to target) Example usage: if the lastXfm from input to target goes from lsq12 to nlin space and you would like to calculate the absolute jacobians to lsq6 space, the additional transform specified may be the lsq6 to lsq12 transform from input to target. """ self.additionalXfm = additionalXfm self.fullStatsCalc() def setupBlurs(self, statsKernels): if isinstance(statsKernels, list): self.blurs = statsKernels elif isinstance(statsKernels, str): for i in statsKernels.split(","): self.blurs.append(float(i)) else: print("Improper type of blurring kernels specified for stats calculation: " + str(statsKernels)) sys.exit() def setupXfms(self): self.xfm = self.inputFH.getLastXfm(self.targetFH) if not self.xfm: print("Cannot calculate statistics. No transform between input and target specified.") print("Input: " + self.inputFH.getLastBasevol()) print("Target: " + self.targetFH.getLastBasevol()) sys.exit() else: self.invXfm = self.targetFH.getLastXfm(self.inputFH) if not self.invXfm: xi = xfmInvert(self.xfm, FH=self.inputFH) self.p.addStage(xi) self.invXfm = xi.outputFiles[0] def fullStatsCalc(self): self.linAndNlinDisplacement() self.calcDetAndLogDet(useFullDisp=False) # Calculate relative jacobians self.calcDetAndLogDet(useFullDisp=True) # Calculate absolute jacobians def calcFullDisplacement(self): """Calculate full displacement from target to input. If an additionaXfm is specified, it is concatenated to self.xfm here """ if self.additionalXfm: outXfm = createOutputFileName(self.inputFH, self.xfm, "transforms", "_with_additional.xfm") xc = xfmConcat([self.additionalXfm, self.xfm], outXfm, fh.logFromFile(self.inputFH.logDir, outXfm)) self.p.addStage(xc) xi = xfmInvert(xc.outputFiles[0], FH=self.inputFH) self.p.addStage(xi) fullDisp = mincDisplacement(self.targetFH, self.inputFH, transform=xi.outputFiles[0]) else: fullDisp = mincDisplacement(self.targetFH, self.inputFH, transform=self.invXfm) self.p.addStage(fullDisp) self.fullDisp = fullDisp.outputFiles[0] def calcNlinDisplacement(self): """Calculate pure non-linear displacement from target to input 1. Concatenate self.invXfm (target to input xfm) and self.linearPartOfNlinXfm 2. Compute mincDisplacement on this transform. """ pureNlinXfm = createOutputFileName(self.inputFH, self.invXfm, "transforms", "_pure_nlin.xfm") xc = xfmConcat([self.invXfm, self.linearPartOfNlinXfm], pureNlinXfm, fh.logFromFile(self.inputFH.logDir, pureNlinXfm)) self.p.addStage(xc) nlinDisp = mincDisplacement(self.targetFH, self.inputFH, transform=pureNlinXfm) self.p.addStage(nlinDisp) self.nlinDisp = nlinDisp.outputFiles[0] def linAndNlinDisplacement(self): """ Calculation of full and pure non-linear displacements. The former is used to calculate absolute jacobians, the latter to calculate relative. The direction of the transforms and displacements is defined in each subclass. """ #1. Calculate linear part of non-linear xfm from input to target. # This is necessary prior to calculating the pure nonlinear displacement lpnl = linearPartofNlin(self.inputFH, self.targetFH) self.p.addStage(lpnl) self.linearPartOfNlinXfm = lpnl.outputFiles[0] # 2. Calculate the pure non-linear displacement self.calcNlinDisplacement() # 3. Calculate the full displacement self.calcFullDisplacement() def calcDetAndLogDet(self, useFullDisp=False): if useFullDisp: dispToUse = self.fullDisp #absolute jacobians else: dispToUse = self.nlinDisp #relative jacobians """Insert -1 at beginning of blurs array to include the calculation of unblurred jacobians.""" self.blurs.insert(0,-1) for b in self.blurs: """Create base name for determinant calculation.""" outputBase = fh.removeBaseAndExtension(dispToUse).split("_displacement")[0] """Calculate smoothed deformation field for all blurs other than -1""" if b != -1: fwhm = "--fwhm=" + str(b) outSmooth = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_smooth_displacement_fwhm" + str(b) + ".mnc") cmd = ["smooth_vector", "--clobber", "--filter", fwhm, InputFile(dispToUse), OutputFile(outSmooth)] smoothVec = CmdStage(cmd) smoothVec.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outSmooth))) self.p.addStage(smoothVec) """Set input for determinant calculation.""" inputDet = outSmooth nameAddendum = "_fwhm" + str(b) else: inputDet = dispToUse nameAddendum = "" outputDet = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_determinant" + nameAddendum + ".mnc") outDetShift = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_det_plus1" + nameAddendum + ".mnc") if useFullDisp: #absolute jacobians outLogDet = fh.createBaseName(self.inputFH.statsDir, outputBase + "_absolute_log_determinant" + nameAddendum + ".mnc") else: #relative jacobians outLogDet = fh.createBaseName(self.inputFH.statsDir, outputBase + "_relative_log_determinant" + nameAddendum + ".mnc") """Calculate the determinant, then add 1 (per mincblob weirdness)""" cmd = ["mincblob", "-clobber", "-determinant", InputFile(inputDet), OutputFile(outputDet)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outputDet))) self.p.addStage(det) cmd = ["mincmath", "-clobber", "-2", "-const", str(1), "-add", InputFile(outputDet), OutputFile(outDetShift)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outDetShift))) self.p.addStage(det) """Calculate log determinant (jacobian) and add to statsGroup.""" cmd = ["mincmath", "-clobber", "-2", "-log", InputFile(outDetShift), OutputFile(outLogDet)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outLogDet))) self.p.addStage(det) if useFullDisp: self.statsGroup.absoluteJacobians[b] = outLogDet else: self.statsGroup.relativeJacobians[b] = outLogDet
class CalcStats(object): """Statistics calculation between an input and target. This class calculates multiple displacement fields, relative and absolute jacobians. General functionality as follows: 1. Class instantiated with input, target and statsKernels. Note that here, the statsKernels specified are blurs used to smooth the displacement fields prior to additional calculations. They may be a string of comma separated values or an array of doubles. 2. An additional transform may also be included to calculate absolute jacobians to a different space, as is described in the __init__ function, documentation and elsewhere in the code. 3. If needed, invert transform between input and target in setupXfms(). This is necessary as this class assumes that the target is the reference space, from which all stats are calculated. 4. Call fullStatsCalc. This calculates linear and pure nonlinear displacement before calculating jacobians. 5. Ability to recenter displacements using an average may be re-added in the future. """ def __init__(self, inputFH, targetFH, statsKernels, additionalXfm=None): self.p = Pipeline() self.inputFH = inputFH self.targetFH = targetFH self.blurs = [] self.setupBlurs(statsKernels) self.statsGroup = StatsGroup() self.setupXfms() """ additionalXfm is an optional transform that may be specified. If it is, it is concatenated with the lastXfm from input to target. This additional transform must also be in the same direction as the lastXfm (e.g. input to target) Example usage: if the lastXfm from input to target goes from lsq12 to nlin space and you would like to calculate the absolute jacobians to lsq6 space, the additional transform specified may be the lsq6 to lsq12 transform from input to target. """ self.additionalXfm = additionalXfm self.fullStatsCalc() def setupBlurs(self, statsKernels): if isinstance(statsKernels, list): self.blurs = statsKernels elif isinstance(statsKernels, str): for i in statsKernels.split(","): self.blurs.append(float(i)) else: print "Improper type of blurring kernels specified for stats calculation: " + str(statsKernels) sys.exit() def setupXfms(self): self.xfm = self.inputFH.getLastXfm(self.targetFH) if not self.xfm: print "Cannot calculate statistics. No transform between input and target specified." print "Input: " + self.inputFH.getLastBasevol() print "Target: " + self.targetFH.getLastBasevol() sys.exit() else: self.invXfm = self.targetFH.getLastXfm(self.inputFH) if not self.invXfm: xi = xfmInvert(self.xfm, FH=self.inputFH) self.p.addStage(xi) self.invXfm = xi.outputFiles[0] def fullStatsCalc(self): self.linAndNlinDisplacement() self.calcDetAndLogDet(useFullDisp=False) # Calculate relative jacobians self.calcDetAndLogDet(useFullDisp=True) # Calculate absolute jacobians def calcFullDisplacement(self): """Calculate full displacement from target to input. If an additionaXfm is specified, it is concatenated to self.xfm here """ if self.additionalXfm: outXfm = createOutputFileName(self.inputFH, self.xfm, "transforms", "_with_additional.xfm") xc = xfmConcat([self.additionalXfm, self.xfm], outXfm, fh.logFromFile(self.inputFH.logDir, outXfm)) self.p.addStage(xc) xi = xfmInvert(xc.outputFiles[0], FH=self.inputFH) self.p.addStage(xi) fullDisp = mincDisplacement(self.targetFH, self.inputFH, transform=xi.outputFiles[0]) else: fullDisp = mincDisplacement(self.targetFH, self.inputFH, transform=self.invXfm) self.p.addStage(fullDisp) self.fullDisp = fullDisp.outputFiles[0] def calcNlinDisplacement(self): """Calculate pure non-linear displacement from target to input 1. Concatenate self.invXfm (target to input xfm) and self.linearPartOfNlinXfm 2. Compute mincDisplacement on this transform. """ pureNlinXfm = createOutputFileName(self.inputFH, self.invXfm, "transforms", "_pure_nlin.xfm") xc = xfmConcat([self.invXfm, self.linearPartOfNlinXfm], pureNlinXfm, fh.logFromFile(self.inputFH.logDir, pureNlinXfm)) self.p.addStage(xc) nlinDisp = mincDisplacement(self.targetFH, self.inputFH, transform=pureNlinXfm) self.p.addStage(nlinDisp) self.nlinDisp = nlinDisp.outputFiles[0] def linAndNlinDisplacement(self): """ Calculation of full and pure non-linear displacements. The former is used to calculate absolute jacobians, the latter to calculate relative. The direction of the transforms and displacements is defined in each subclass. """ #1. Calculate linear part of non-linear xfm from input to target. # This is necessary prior to calculating the pure nonlinear displacement lpnl = linearPartofNlin(self.inputFH, self.targetFH) self.p.addStage(lpnl) self.linearPartOfNlinXfm = lpnl.outputFiles[0] # 2. Calculate the pure non-linear displacement self.calcNlinDisplacement() # 3. Calculate the full displacement self.calcFullDisplacement() def calcDetAndLogDet(self, useFullDisp=False): if useFullDisp: dispToUse = self.fullDisp #absolute jacobians else: dispToUse = self.nlinDisp #relative jacobians """Insert -1 at beginning of blurs array to include the calculation of unblurred jacobians.""" self.blurs.insert(0,-1) for b in self.blurs: """Create base name for determinant calculation.""" outputBase = fh.removeBaseAndExtension(dispToUse).split("_displacement")[0] """Calculate smoothed deformation field for all blurs other than -1""" if b != -1: fwhm = "--fwhm=" + str(b) outSmooth = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_smooth_displacement_fwhm" + str(b) + ".mnc") cmd = ["smooth_vector", "--clobber", "--filter", fwhm, InputFile(dispToUse), OutputFile(outSmooth)] smoothVec = CmdStage(cmd) smoothVec.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outSmooth))) self.p.addStage(smoothVec) """Set input for determinant calculation.""" inputDet = outSmooth nameAddendum = "_fwhm" + str(b) else: inputDet = dispToUse nameAddendum = "" outputDet = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_determinant" + nameAddendum + ".mnc") outDetShift = fh.createBaseName(self.inputFH.tmpDir, outputBase + "_det_plus1" + nameAddendum + ".mnc") if useFullDisp: #absolute jacobians outLogDet = fh.createBaseName(self.inputFH.statsDir, outputBase + "_absolute_log_determinant" + nameAddendum + ".mnc") else: #relative jacobians outLogDet = fh.createBaseName(self.inputFH.statsDir, outputBase + "_relative_log_determinant" + nameAddendum + ".mnc") """Calculate the determinant, then add 1 (per mincblob weirdness)""" cmd = ["mincblob", "-clobber", "-determinant", InputFile(inputDet), OutputFile(outputDet)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outputDet))) self.p.addStage(det) cmd = ["mincmath", "-clobber", "-2", "-const", str(1), "-add", InputFile(outputDet), OutputFile(outDetShift)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outDetShift))) self.p.addStage(det) """Calculate log determinant (jacobian) and add to statsGroup.""" cmd = ["mincmath", "-clobber", "-2", "-log", InputFile(outDetShift), OutputFile(outLogDet)] det = CmdStage(cmd) det.setLogFile(LogFile(fh.logFromFile(self.inputFH.logDir, outLogDet))) self.p.addStage(det) if useFullDisp: self.statsGroup.absoluteJacobians[b] = outLogDet else: self.statsGroup.relativeJacobians[b] = outLogDet