def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) p.add_argument('--mesh-outfile', nargs=1, type=str, default='', help='Output label mesh file') p.add_argument('--mesh-infiles', nargs='*', type=str, default='', help='Input label mesh file (calculate stats / show plots only)') p.add_argument('--reduce-frac', nargs=1, type=float, default=[0.2], metavar=('PERC'), help='Reduce fraction for reducing meshes (decimate pro)') #p.add_argument('--reduce-spacing', nargs=3, type=float, default=[10.0, 10.0, 5.0], metavar=('SPC'), # help='Voxel spacing to use for mesh decimation (quadric clustering)') p.add_argument('--reduce-nbins', nargs=1, type=int, default=[10], metavar=('NBINS'), help='Number of bins to use for mesh decimation (quadric clustering)') p.add_argument('--min-faces', nargs=1, type=int, default=[6], metavar=('NFACES'), help='Minimum number of faces for each mesh (decimate pro, <= 0 to use update method)') p.add_argument('--smooth', nargs=3, type=int, default=[3,3,3], metavar=('X', 'Y', 'Z'), help='Size of smoothing kernel (zeros for none)') p.add_argument('--contour-lvl', nargs=1, type=float, default=[0.25], metavar=('LVL'), help='Level [0,1] to use to create mesh isocontours') p.add_argument('--center-origin', action='store_true', dest='center_origin', help='Do the weird "origin centering" transformation') # p.add_argument('--no-flip-faces', action='store_false', dest='flip_faces', # help='Do change the face order coming from vtk') p.add_argument('--flip-faces', action='store_true', dest='flip_faces', help='Change the face order coming from vtk') p.add_argument('--seed-range', nargs=2, type=int, default=[-1,-1], metavar=('BEG', 'END'), help='Subset of seeds to process (< 0 for beg/end)') p.add_argument('--no-decimatePro', action='store_false', dest='decimatePro', help='Do not use decimate pro from vtk for meshing (use quadric clustering instead)') #p.add_argument('--decimatePro', action='store_true', dest='decimatePro', # help='Use decimate pro from vtk for meshing (default quadric clustering)') p.add_argument('--set-voxel-scale', action='store_true', dest='set_voxel_scale', help='Use the voxel scale to set the data spacing to vtk (vertices in nm)') p.add_argument('--doplots', action='store_true', help='Debugging plotting enabled for each supervoxel') p.add_argument('--dpLabelMesher-verbose', action='store_true', help='Debugging output for dpLabelMesher')
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) # possible actions, suggest running one at a time since no easy way to specify the order # 3d smoothing of labels (done per label) p.add_argument('--smooth', action='store_true', help='Perform 3d smoothing on labels') # remove components smaller than size (using voxel counts only) p.add_argument('--minsize', nargs=1, type=int, default=[-1], metavar=('size'), help='Minimum label size in voxels to keep') p.add_argument('--minsize_fill', action='store_true', help='Whether to nearest neighbor fill labels scrubbed with minsize') # remove adjacencies p.add_argument('--remove_adjacencies', action='store_true', help='Perform 3d adjacency removal using fg-connectivity') # remove cavities p.add_argument('--cavity-fill', action='store_true', help='Remove all BG not connected to cube faces') # rerun labeling (connected components) p.add_argument('--relabel', action='store_true', help='Re-label components (run connected components)') # recompute voxel type based on majority winner for each supervoxel p.add_argument('--get-svox-type', action='store_true', help='Recompute supervoxel type using majority method') p.add_argument('--write-voxel-type', action='store_true', help='Perform get-svox-type and also write out voxel-type based on supervoxels') # other options p.add_argument('--fg-connectivity', nargs=1, type=int, default=[1], choices=[1,2,3], help='Connectivity for foreground (where applicable)') p.add_argument('--bg-connectivity', nargs=1, type=int, default=[1], choices=[1,2,3], help='Connectivity for background (where applicable)') p.add_argument('--ECS-label', nargs=1, type=int, default=[1], metavar=('size'), help='Specify which label is ECS (== 0 means none, < 0 means max label)') p.add_argument('--dpCleanLabels-verbose', action='store_true', help='Debugging output for dpCleanLabels')
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) dpCubeIter.addArgs(p) p.add_argument('--upsample', action='store_true', help='Upsample mode (default downsampling)') p.add_argument( '--downsample-op', nargs=1, type=str, default=['none'], metavar='OP', choices=['none', 'labels', 'mean', 'median'], help='Specify which operation to use for downsampling method') p.add_argument( '--factor', nargs=1, type=int, default=[2], metavar=('F'), help= 'Integer factor to resample, must divide size of resampled dims') p.add_argument('--resample-dims', nargs=3, type=int, default=[1, 1, 1], metavar=('X', 'Y', 'Z'), help='Boolean specifying which dimensions to resample') p.add_argument('--dpResample-verbose', action='store_true', help='Debugging output for dpResample')
def writeLabels(cls, outfile, chunk, offset, size, datasize, chunksize, fillvalue=None, data=None, inraw='', strbits='32', outraw='', attrs={}, subgroups=[], verbose=False): assert( data is not None or inraw ) parser = argparse.ArgumentParser(description='class:emProbabilities', formatter_class=argparse.ArgumentDefaultsHelpFormatter) dpWriteh5.addArgs(parser); arg_str = '' arg_str += ' --srcfile ' + outfile arg_str += ' --chunk %d %d %d ' % tuple(chunk) arg_str += ' --offset %d %d %d ' % tuple(offset) arg_str += ' --size %d %d %d ' % tuple(size) arg_str += ' --chunksize %d %d %d' % tuple(chunksize) arg_str += ' --datasize %d %d %d' % tuple(datasize) arg_str += ' --data-type %s ' % ('uint' + strbits) if subgroups: arg_str += ' --subgroups ' + ' '.join(subgroups) if fillvalue: arg_str += ' --fillvalue ' + str(fillvalue) if inraw: arg_str += ' --inraw ' + inraw if outraw: arg_str += ' --outraw ' + outraw #if verbose: arg_str += ' --dpWriteh5-verbose --dpLoadh5-verbose ' if verbose: arg_str += ' --dpWriteh5-verbose ' if verbose: print(arg_str) args = parser.parse_args(arg_str.split()) writeh5 = cls(args); writeh5.data_attrs = attrs if inraw: writeh5.writeFromRaw() else: writeh5.writeCube(data) return writeh5
def addArgs(p): dpWriteh5.addArgs(p) dpCubeIter.addArgs(p) p.add_argument('--concatenate_only', action='store_true', help='Just concatenate volumes, no stitching') p.add_argument('--two_pass', action='store_true', help='Use two pass method') p.add_argument('--two_pass_load', nargs=1, type=str, default='', help='Raw file to load first pass') p.add_argument('--two_pass_save', nargs=1, type=str, default='', help='Raw file to export first pass') p.add_argument('--dpCubeStitcher-verbose', action='store_true', help='Debugging output for dpCubeStitcher')
def readVoxType(cls, srcfile, chunk, offset, size, verbose=False): parser = argparse.ArgumentParser(description='class:emVoxelType', formatter_class=argparse.ArgumentDefaultsHelpFormatter) dpWriteh5.addArgs(parser); arg_str = '' arg_str += ' --srcfile ' + srcfile arg_str += ' --chunk %d %d %d ' % tuple(chunk) arg_str += ' --offset %d %d %d ' % tuple(offset) arg_str += ' --size %d %d %d ' % tuple(size) if verbose: arg_str += ' --dpLoadh5-verbose ' if verbose: print(arg_str) args = parser.parse_args(arg_str.split()) loadh5 = cls(args); loadh5.readCubeToBuffers() return loadh5
def readProbs(cls, srcfile, probName, chunk, offset, size, verbose=False): parser = argparse.ArgumentParser(description='class:emProbabilities', formatter_class=argparse.ArgumentDefaultsHelpFormatter) dpWriteh5.addArgs(parser); arg_str = '' arg_str += ' --srcfile ' + srcfile arg_str += ' --chunk %d %d %d ' % tuple(chunk) arg_str += ' --offset %d %d %d ' % tuple(offset) arg_str += ' --size %d %d %d ' % tuple(size) arg_str += ' --dataset ' + emProbabilities.PROBS_DATASET + str(probName) if verbose: arg_str += ' --dpLoadh5-verbose ' if verbose: print(arg_str) args = parser.parse_args(arg_str.split()) loadh5 = cls(args); loadh5.readCubeToBuffers() return loadh5
def addArgs(p): dpWriteh5.addArgs(p) dpCubeIter.addArgs(p) p.add_argument( '--annotation-file-glob', nargs=1, type=str, default='', help='Glob for a list of input annotation files from knossos') p.add_argument( '--labels-path', nargs=1, type=str, default='', help='Input path for superchunked supervoxel label files (to merge)' ) p.add_argument( '--dsfactor', nargs=1, type=int, default=[1], metavar=('F'), help='Downsample factor, mode to write out a single label file') #p.add_argument('--segmentation-levels', nargs=1, type=int, default=[4], metavar=('NLVLS'), # help='Number of available segmentation levels') p.add_argument( '--segmentation-values', nargs='+', type=str, default=[], help='Mapping from segmentation levels to parameter value in hdf5') p.add_argument('--smooth', nargs=3, type=int, default=[7, 7, 7], metavar=('X', 'Y', 'Z'), help='Size of smoothing kernel (zeros for none)') p.add_argument( '--contour-lvl', nargs=1, type=float, default=[0.2], metavar=('LVL'), help='Level [0,1] to use to binarize after smoothing applied') p.add_argument('--dpLabelMerger-verbose', action='store_true', help='Debugging output for dpLabelMerger')
def readLabels(cls, srcfile, chunk, offset, size, data_type=None, subgroups=[], verbose=False): if not data_type: data_type = cls.LBLS_STR_DTYPE parser = argparse.ArgumentParser(description='class:emLabels', formatter_class=argparse.ArgumentDefaultsHelpFormatter) dpWriteh5.addArgs(parser); arg_str = '' arg_str += ' --srcfile ' + srcfile arg_str += ' --chunk %d %d %d ' % tuple(chunk) arg_str += ' --offset %d %d %d ' % tuple(offset) arg_str += ' --size %d %d %d ' % tuple(size) if data_type: arg_str += ' --data-type ' + data_type if subgroups: arg_str += ' --subgroups ' + ' '.join(subgroups) if verbose: arg_str += ' --dpLoadh5-verbose ' if verbose: print(arg_str) args = parser.parse_args(arg_str.split()) loadh5 = cls(args); loadh5.readCubeToBuffers() return loadh5
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) p.add_argument('--inrawpath', nargs=1, type=str, default='', metavar='PATH', help='Raw inputs path') # pertaining to voxel types p.add_argument('--types', nargs='+', type=str, default=['ICS','ECS','MEM'], metavar='TYPE', help='Names of the voxel types (prefix for raw file)') p.add_argument('--agg-ops-types', nargs='*', type=str, default=[], metavar='OP', help='Specify which operations to be done for each type, 0 in list delimits types') # pertaining to probs to be merged p.add_argument('--nmerge', nargs=1, type=int, default=[1], metavar='N', help='Number of network probability output rawfiles to merge') p.add_argument('--weightings', nargs='*', type=float, default=[], metavar='W', help='Weightings for probabilities specified in each rawfile (un-reordered), default to equal weightings') p.add_argument('--dim-orderings', nargs='*', type=str, default=[], choices=('xyz','xzy','zyx'), metavar='ORD', help='Specify the reslice ordering of the rawfile inputs (default all xyz)') p.add_argument('--dpAggProbs-verbose', action='store_true', help='Debugging output for dpAggProbs')
def writeVoxType(cls, outfile, chunk, offset, size, datasize, chunksize, fillvalue=None, data=None, inraw='', outraw='', attrs={}, verbose=False): assert( data is not None or inraw ) parser = argparse.ArgumentParser(description='class:emVoxelType', formatter_class=argparse.ArgumentDefaultsHelpFormatter) dpWriteh5.addArgs(parser); arg_str = '' arg_str += ' --srcfile ' + outfile arg_str += ' --chunk %d %d %d ' % tuple(chunk) arg_str += ' --offset %d %d %d ' % tuple(offset) arg_str += ' --size %d %d %d ' % tuple(size) arg_str += ' --chunksize %d %d %d' % tuple(chunksize) arg_str += ' --datasize %d %d %d' % tuple(datasize) if fillvalue: arg_str += ' --fillvalue ' + str(fillvalue) if inraw: arg_str += ' --inraw ' + inraw if outraw: arg_str += ' --outraw ' + outraw if verbose: arg_str += ' --dpWriteh5-verbose ' if verbose: print(arg_str) args = parser.parse_args(arg_str.split()) writeh5 = cls(args); writeh5.data_attrs = attrs if inraw: writeh5.writeFromRaw() else: writeh5.writeCube(data) return writeh5
def addArgs(p): dpWriteh5.addArgs(p) # adds arguments required for this object to specified ArgumentParser object p.add_argument('--rawfile', nargs=1, type=str, default='raw.h5', help='Path/name of hdf5 raw EM (input) file') p.add_argument('--raw-dataset', nargs=1, type=str, default='data', help='Name of the raw EM dataset to read') p.add_argument('--dpWarp-verbose', action='store_true', help='Debugging output for dpWarp')
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) p.add_argument('--probfile', nargs=1, type=str, default='', help='Path/name of hdf5 probability (input) file') p.add_argument( '--prob_types', nargs='+', type=str, default=['MEM'], metavar='TYPE', help= 'Dataset names of the voxel types to use from the probabilities') p.add_argument('--typefile', nargs=1, type=str, default='', help='Path/name of hdf5 with voxel types') p.add_argument('--train-chunks', nargs='+', type=int, default=[0, 0, 0], metavar='X_Y_Z', help='Training chunks (specify x0 y0 z0 x1 y1 ...) ') p.add_argument( '--train-offsets', nargs='*', type=int, default=[], metavar='X_Y_Z', help= 'Training offsets (specify x0 y0 z0 x1 y1 ..., default all zero) ') p.add_argument('--train-size', nargs=3, type=int, default=[0, 0, 0], metavar=('X', 'Y', 'Z'), help='Size of training chunks (default chunksize)') p.add_argument('--test-size', nargs=2, type=int, default=[64, 64], metavar='S', help='Size of the sliding correlation windows') p.add_argument('--savefile', nargs=1, type=str, default='', help='Path/name npz file to save outputs to') p.add_argument('--loadfile', nargs=1, type=str, default='', help='Load previous run saved in npz for plotting') p.add_argument('--nthreads', nargs=1, type=int, default=[8], help='Number of threads to use in fftw') # arguments for concatenate mode (concatenate runs over savefiles that were created for independent volumes # and reduces down to specified size) p.add_argument('--loadfiles-path', nargs=1, type=str, default='', help='Path to saved runs to concatenate') p.add_argument('--concat-chunk', nargs=3, type=int, default=[0, 0, 0], metavar=('X', 'Y', 'Z'), help='Starting chunk alignment for concatenate') p.add_argument('--concat-nchunks', nargs=3, type=int, default=[0, 0, 0], metavar=('X', 'Y', 'Z'), help='Total area for concatenation (chunks)') p.add_argument( '--reduce-size', nargs=3, type=int, default=[0, 0, 0], metavar=('X', 'Y', 'Z'), help='Block size to reduce down to after concantenation (voxels)') p.add_argument( '--concat-loadfile', nargs=1, type=str, default='', help='Load previous concat saved in npz for analysis/plotting') p.add_argument('--dpVolumeXcorr-verbose', action='store_true', help='Debugging output for dpVolumeXcorr')
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) # possible actions, suggest running one at a time since no easy way to specify the order # 3d smoothing of labels (done per label) p.add_argument('--smooth', action='store_true', help='Perform 3d smoothing on labels') p.add_argument('--smooth-size', nargs=3, type=int, default=[3, 3, 3], metavar=('X', 'Y', 'Z'), help='Size of smoothing kernel') p.add_argument( '--contour-lvl', nargs='*', type=float, default=[0.25], metavar=('LVL'), help= 'Level [0,1] to use to create mesh isocontours, specify range for "auto" mode' ) # remove components smaller than size (using voxel counts only) p.add_argument('--minsize', nargs=1, type=int, default=[-1], metavar=('size'), help='Minimum label size in voxels to keep') p.add_argument( '--minsize_fill', action='store_true', help='Whether to nearest neighbor fill labels scrubbed with minsize' ) # remove adjacencies p.add_argument( '--remove_adjacencies', nargs=1, type=int, default=[0], metavar=('NBHD'), help= 'Perform 3d adjacency removal using specified neighborhood size') # remove cavities p.add_argument('--cavity-fill', action='store_true', help='Remove all BG not connected to cube faces') # remove any labels less than specified size that are within cavities p.add_argument( '--cavity-fill-minsize', nargs=1, type=int, default=[1], metavar=('SIZE'), help= 'Minimum label size to replace labels in cavities (force cavity fill)' ) # rerun labeling (connected components) p.add_argument('--relabel', action='store_true', help='Re-label components (run connected components)') # write background (membrane mask) using the voxel type p.add_argument( '--apply-bg-mask', action='store_true', help='Write voxel-type background (membrane) mask to supervoxels') # recompute voxel type based on majority winner for each supervoxel p.add_argument('--get-svox-type', action='store_true', help='Recompute supervoxel type using majority method') p.add_argument( '--write-voxel-type', action='store_true', help= 'Perform get-svox-type and also write out voxel-type based on supervoxels' ) p.add_argument('--replace-ECS', action='store_true', help='Replace all ECS supervoxels with ECS-label') # make overlay that traces minpath between particular label value p.add_argument( '--minpath', nargs=1, type=int, default=[-1], metavar=('label'), help= 'Calculate min foreground path between specified label (default off)' ) p.add_argument( '--minpath-perc', nargs=1, type=float, default=[1.01], metavar=('perc'), help='Percentage away from bwmin to label in minpath overlay') p.add_argument( '--minpath-skel', action='store_true', help= 'Whether to skeletonize min foreground path (for use with minpath)' ) # used to set min label value for both relabel and minsize p.add_argument( '--min-label', nargs=1, type=int, default=[1], metavar=('min'), help='First label after relabel if relabeling (also minsize)') # xxx - new switch intended to have actions performed per label instead of the whole volume at once. # intended for medium-large volumes but in situations where superchunking is not desireable. # currently supported for: cavity_fill p.add_argument('--labelwise', action='store_true', help='Perform operations on each label sequentially') # other options p.add_argument('--fg-connectivity', nargs=1, type=int, default=[1], choices=[1, 2, 3], help='Connectivity for foreground (where applicable)') p.add_argument('--bg-connectivity', nargs=1, type=int, default=[1], choices=[1, 2, 3], help='Connectivity for background (where applicable)') p.add_argument( '--ECS-label', nargs=1, type=int, default=[1], metavar=('size'), help= 'Specify which label is ECS (== 0 means none, < 0 means max label)' ) p.add_argument('--dpCleanLabels-verbose', action='store_true', help='Debugging output for dpCleanLabels')
def addArgs(p): # adds arguments required for this object to specified ArgumentParser object dpWriteh5.addArgs(p) p.add_argument('--inrawpath', nargs=1, type=str, default='', metavar='PATH', help='Raw inputs path') # pertaining to voxel types p.add_argument('--types', nargs='+', type=str, default=['ICS', 'ECS', 'MEM'], metavar='TYPE', help='Names of the voxel types (prefix for raw file)') p.add_argument( '--agg-ops-types', nargs='*', type=str, default=[], metavar='OP', help= 'Specify which operations to be done for each type, 0 in list delimits types' ) # pertaining to probs to be merged p.add_argument( '--nmerge', nargs=1, type=int, default=[1], metavar='N', help='Number of network probability output rawfiles to merge') p.add_argument( '--weightings', nargs='*', type=float, default=[], metavar='W', help= 'Weightings for probabilities specified in each rawfile (un-reordered), default to equal weightings' ) p.add_argument( '--dim-orderings', nargs='*', type=str, default=[], choices=('xyz', 'xzy', 'zyx'), metavar='ORD', help= 'Specify the reslice ordering of the rawfile inputs (default all xyz)' ) # added this feature to optimize overlap support p.add_argument( '--overlap', nargs=3, type=int, default=[0, 0, 0], metavar=('X', 'Y', 'Z'), help='Select out portion of raw cube to support overlaps') p.add_argument('--dpAggProbs-verbose', action='store_true', help='Debugging output for dpAggProbs')