Example #1
0
  def filtering( self, inputs, outputs, length, cortex_only ):
    '''
    Filter the mapped fibers.
    '''

    # check if we have all required input data
    # we need at least: 
    #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
    if not os.path.exists( outputs['fibers_mapped'] ):
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_mapped'] + Colors.RED + ' but we really need it to start with stage 4!!' + Colors._CLEAR )
      sys.exit( 2 )

    # find the order of the mapped scalars
    header = io.loadTrkHeaderOnly( outputs['fibers_mapped'] )
    scalars = list( header['scalar_name'] )

    # split the length range
    length = length.split( ' ' )
    min_length = int( length[0] )
    max_length = int( length[1] )

    # length filtering

    c.info( Colors.YELLOW + '  Filtering ' + Colors.PURPLE + 'fiber length' + Colors.YELLOW + ' to be ' + Colors.PURPLE + '>' + str( min_length ) + ' and <' + str( max_length ) + Colors.YELLOW + ' for ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped'] )[1] + Colors.YELLOW + ' and store as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
    fyborg.fyborg( outputs['fibers_mapped'], outputs['fibers_mapped_length_filtered'], [fyborg.FyFilterLengthAction( scalars.index( 'length' ), min_length, max_length )] )

    header = io.loadTrkHeaderOnly( outputs['fibers_mapped_length_filtered'] )
    new_count = header['n_count']

    c.info( Colors.YELLOW + '  Number of tracks after ' + Colors.PURPLE + 'length filtering' + Colors.YELLOW + ': ' + str( new_count ) + Colors.YELLOW + Colors._CLEAR )

    if cortex_only:

      # special cortex filtering

      c.info( Colors.YELLOW + '  Filtering for ' + Colors.PURPLE + 'valid cortex structures' + Colors.YELLOW + ' in ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + ' and store as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered_cortex_only'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
      c.info( Colors.PURPLE + '    Conditions for valid fibers:' + Colors._CLEAR )
      c.info( Colors.PURPLE + '    1.' + Colors.YELLOW + ' The fiber track has to pass through the cerebral white matter. (Label values: ' + Colors.PURPLE + '[2, 41]' + Colors.YELLOW + ')' + Colors._CLEAR )
      c.info( Colors.PURPLE + '    2.' + Colors.YELLOW + ' The fiber track shall only touch sub-cortical structures not more than ' + Colors.PURPLE + '5 times' + Colors.YELLOW + '. (Label values: ' + Colors.PURPLE + '[10, 49, 16, 28, 60, 4, 43]' + Colors.YELLOW + ')' + Colors._CLEAR )
      c.info( Colors.PURPLE + '    3.' + Colors.YELLOW + ' The track shall not pass through the corpus callosum (Labels: ' + Colors.PURPLE + '[251, 255]' + Colors.YELLOW + ') and end in the same hemisphere (Labels: ' + Colors.PURPLE + '[1000-1035]' + Colors.YELLOW + ' for left, ' + Colors.PURPLE + '[2000-2035]' + Colors.YELLOW + ' for right).' + Colors._CLEAR )

      fyborg.fyborg( outputs['fibers_mapped_length_filtered'], outputs['fibers_mapped_length_filtered_cortex_only'], [fyborg.FyFilterCortexAction( scalars.index( 'segmentation' ) )] )

      header = io.loadTrkHeaderOnly( outputs['fibers_mapped_length_filtered_cortex_only'] )
      new_count = header['n_count']

      c.info( Colors.YELLOW + '  Number of tracks after ' + Colors.PURPLE + 'cortex filtering' + Colors.YELLOW + ': ' + str( new_count ) + Colors.YELLOW + Colors._CLEAR )

      c.info( Colors.YELLOW + '  Copied filtered tracks from ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered_cortex_only'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
      shutil.copyfile( outputs['fibers_mapped_length_filtered_cortex_only'], outputs['fibers_final'] )

    else:

      c.info( Colors.YELLOW + '  Info: ' + Colors.PURPLE + 'Cortical _and_ sub-cortical structures ' + Colors.YELLOW + 'will be included..' + Colors._CLEAR )

      c.info( Colors.YELLOW + '  Copied filtered tracks from ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
      shutil.copyfile( outputs['fibers_mapped_length_filtered'], outputs['fibers_final'] )
Example #2
0
  def mapping( self, inputs, outputs, radius ):
    '''
    Map all detected scalar volumes to each fiber.
    '''

    # check if we have all required input data
    # we need at least: 
    #  - outputs['fibers'] == Track file in T1 space
    #  - outputs['segmentation'] == Label Map
    if not os.path.exists( outputs['fibers'] ):
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers'] + Colors.RED + ' but we really need it to start with stage 3!!' + Colors._CLEAR )
      sys.exit( 2 )
    if not os.path.exists( outputs['segmentation'] ):
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['segmentation'] + Colors.RED + ' but we really need it to start with stage 3!!' + Colors._CLEAR )
      sys.exit( 2 )

    actions = []

    for i in inputs:

      if i == 'fibers' or i == 'segmentation' or i == 'T1' or i == 'b0':
        # we do not map these
        continue

      if not os.path.exists( outputs[i + '_T1_space'] ):
        # we can't map this since we didn't find the file
        continue

      # for normal scalars: append it to the actions
      actions.append( fyborg.FyMapAction( i, outputs[i + '_T1_space'] ) )

      c.info( Colors.YELLOW + '  Configuring mapping of ' + Colors.PURPLE + os.path.split( outputs[i + '_T1_space'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )

    # now the segmentation with the lookaround radius
    actions.append( fyborg.FyRadiusMapAction( 'segmentation', outputs['segmentation'], radius ) )
    c.info( Colors.YELLOW + '  Configuring mapping of ' + Colors.PURPLE + os.path.split( outputs['segmentation'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )

    # and also the fiber length
    actions.append( fyborg.FyLengthAction() )
    c.info( Colors.YELLOW + '  Configuring mapping of ' + Colors.PURPLE + 'fiber length' + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )

    # run, forest, run!!
    c.info( Colors.YELLOW + '  Performing configured mapping for ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + ' and storing as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped'] )[1] + Colors.YELLOW + ' (~ 30 minutes)!' + Colors._CLEAR )
    if self.__debug:
      fyborg.fyborg( outputs['fibers'], outputs['fibers_mapped'], actions, 'debug' )
    else:
      fyborg.fyborg( outputs['fibers'], outputs['fibers_mapped'], actions )
Example #3
0
    def roi_extract(self, inputs, outputs):
        '''
    '''
        # check if we have all required input data
        # we need at least:
        #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
        if not os.path.exists(outputs['fibers_final']):
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    outputs['fibers_final'] + Colors.RED +
                    ' but we really need it to start with stage 6!!' +
                    Colors._CLEAR)
            sys.exit(2)

        s = io.loadTrk(outputs['fibers_final'])
        tracks = s[0]
        header = s[1]

        scalarNames = header['scalar_name'].tolist()
        labels = {}

        # check if the segmentation is mapped
        try:
            seg_index = scalarNames.index('segmentation')
        except:
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    'segmentation' + Colors.RED +
                    ' as a mapped scalar but we really need it!')
            sys.exit(2)

        # create the roi subfolder
        if not os.path.exists(outputs['roi']):
            os.mkdir(outputs['roi'])

        # parse the color table
        lut = fyborg.colortable.freesurfer.split('\n')
        colors = {}
        for color in lut:
            if not color or color[0] == '#':
                continue

            splitted_line = color.split(' ')
            splitted_line = filter(None, splitted_line)
            colors[splitted_line[0]] = splitted_line[1]

        # loop through tracks
        for i, t in enumerate(tracks):

            tCoordinates = t[0]
            tScalars = t[1]

            # grab the scalars for each point
            for scalar in tScalars:

                # but only the label value
                label_value = str(int(scalar[seg_index]))

                if not label_value in labels:

                    labels[label_value] = []

                if not i in labels[label_value]:
                    # store the unique fiber id for this label
                    labels[label_value].append(i)

        # now loop through all detected labels
        for l in labels:

            new_tracks = []

            for t_id in labels[l]:
                # grab the fiber + scalars
                current_fiber = tracks[t_id]

                new_tracks.append(current_fiber)

            # now store the trk file
            trk_outputfile = l + '_' + colors[l] + '.trk'
            nii_outputfile = l + '_' + colors[l] + '.nii.gz'
            c.info(Colors.YELLOW + '  Creating fiber ROI ' + Colors.PURPLE +
                   trk_outputfile + Colors.YELLOW + '!' + Colors._CLEAR)
            io.saveTrk(os.path.join(outputs['roi'], trk_outputfile),
                       new_tracks, header, None, True)

            # also create a roi label volume for this label value
            c.info(Colors.YELLOW + '  Creating NII ROI ' + Colors.PURPLE +
                   nii_outputfile + Colors.YELLOW + '!' + Colors._CLEAR)
            cmd = 'ss;'
            cmd += 'chb-fsstable;'
            cmd += 'mri_binarize --i ' + outputs[
                'segmentation'] + ' --o ' + os.path.join(
                    outputs['roi'],
                    nii_outputfile) + ' --match ' + l + ' --binval ' + l + ';'
            self.__logger.debug(cmd)
            sp = subprocess.Popen(["/bin/bash", "-i", "-c", cmd],
                                  stdout=sys.stdout)
            sp.communicate()
Example #4
0
    def connectivity(self, inputs, outputs, cortex_only):
        '''
    Generate connectivity matrices using mapped values.
    '''
        # check if we have all required input data
        # we need at least:
        #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
        if not os.path.exists(outputs['fibers_final']):
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    outputs['fibers_final'] + Colors.RED +
                    ' but we really need it to start with stage 5!!' +
                    Colors._CLEAR)
            sys.exit(2)

        s = io.loadTrk(outputs['fibers_final'])
        tracks = s[0]
        header = s[1]

        scalarNames = header['scalar_name'].tolist()
        matrix = {}
        indices = {}

        # check if the segmentation is mapped
        try:
            indices['segmentation'] = scalarNames.index('segmentation')
        except:
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    'segmentation' + Colors.RED +
                    ' as a mapped scalar but we really need it!')
            sys.exit(2)

        if cortex_only:
            labels = [
                2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024,
                2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025,
                2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015,
                2001, 2030, 2034, 2035, 1012, 1019, 1032, 1014, 1020, 1018,
                1027, 1028, 1003, 1024, 1017, 1026, 1002, 1023, 1010, 1022,
                1031, 1029, 1008, 1025, 1005, 1021, 1011, 1013, 1007, 1016,
                1006, 1033, 1009, 1015, 1001, 1030, 1034, 1035
            ]
        else:
            labels = [
                2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024,
                2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025,
                2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015,
                2001, 2030, 2034, 2035, 49, 50, 51, 52, 58, 53, 54, 1012, 1019,
                1032, 1014, 1020, 1018, 1027, 1028, 1003, 1024, 1017, 1026,
                1002, 1023, 1010, 1022, 1031, 1029, 1008, 1025, 1005, 1021,
                1011, 1013, 1007, 1016, 1006, 1033, 1009, 1015, 1001, 1030,
                1034, 1035, 10, 11, 12, 13, 26, 17, 18, 16
            ]

        c.info(
            Colors.YELLOW +
            '  Getting ready to create connectivity matrices for the following labels: '
            + Colors.PURPLE + str(labels) + Colors._CLEAR)
        c.info(
            Colors.YELLOW +
            '  Note: Mapped scalar values along the points will be averaged for each fiber track.'
            + Colors._CLEAR)

        # create matrices for the attached scalars
        for i, s in enumerate(scalarNames):

            if i >= header['n_scalars']:
                break

            if not s or s == 'segmentation':
                continue

            # this is a scalar value for which a matrix will be created
            matrix[s] = np.zeros([len(labels), len(labels)])
            indices[s] = scalarNames.index(s)
            c.info(Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE +
                   '[' + str(len(labels)) + 'x' + str(len(labels)) + ']' +
                   Colors.YELLOW + ') for ' + Colors.PURPLE + s +
                   Colors.YELLOW + ' values!' + Colors._CLEAR)

            if s == 'adc':
                s = 'inv_adc'
                matrix[s] = np.zeros([len(labels), len(labels)])
                indices[s] = scalarNames.index('adc')
                c.info(Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE +
                       '[' + str(len(labels)) + 'x' + str(len(labels)) + ']' +
                       Colors.YELLOW + ') for ' + Colors.PURPLE + s +
                       Colors.YELLOW + ' values!' + Colors._CLEAR)

        # always create one for the fiber counts
        matrix['fibercount'] = np.zeros([len(labels), len(labels)])
        indices['fibercount'] = 0
        c.info(Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE + '[' +
               str(len(labels)) + 'x' + str(len(labels)) + ']' +
               Colors.YELLOW + ') for ' + Colors.PURPLE + 'fibercount' +
               Colors.YELLOW + ' values!' + Colors._CLEAR)

        c.info(Colors.YELLOW + '  Analyzing fibers of ' + Colors.PURPLE +
               os.path.split(outputs['fibers_final'])[1] + Colors.YELLOW +
               '..' + Colors._CLEAR)
        for tCounter, t in enumerate(tracks):

            tCoordinates = t[0]
            tScalars = t[1]

            # find the segmentation labels for the start and end points
            start_label = tScalars[0, indices['segmentation']]
            end_label = tScalars[-1, indices['segmentation']]

            try:
                # now grab the index of the labels in our label list
                start_index = labels.index(start_label)
                end_index = labels.index(end_label)
            except:
                # this label is not monitored, so ignore this track
                continue

            # loop through all different scalars
            for m in matrix:

                # calculate the mean for each track
                value = np.mean(tScalars[:, indices[m]])

                if m == 'inv_adc':
                    # invert the value since it is 1-ADC
                    value = 1 / value
                elif m == 'fibercount':
                    # in the case of fibercount, add 1
                    value = 1

                # store value in the matrix
                matrix[m][start_index, end_index] += value
                if not start_index == end_index:
                    matrix[m][end_index, start_index] += value

        # fiber loop is done, all values are stored
        # now normalize the matrices
        np.seterr(all='ignore')  # avoid div by 0 warnings
        cbar = None
        for m in matrix:
            if not m == 'fibercount':
                # normalize it
                matrix[m][:] /= matrix['fibercount']
                matrix[m] = np.nan_to_num(matrix[m])

            # store the matrix
            c.info(Colors.YELLOW + '  Storing ' + Colors.PURPLE + m +
                   Colors.YELLOW + ' connectivity matrix as ' + Colors.PURPLE +
                   os.path.split(outputs['matrix_' + m])[1] + Colors.YELLOW +
                   '!' + Colors._CLEAR)
            np.savetxt(outputs['matrix_' + m], matrix[m], delimiter='\t')

            # store a picture
            picture_path = os.path.splitext(
                os.path.split(outputs['matrix_' + m])[1])[0] + '.png'
            c.info(Colors.YELLOW + '  Generating ' + Colors.PURPLE + m +
                   ' image' + Colors.YELLOW + ' as ' + Colors.PURPLE +
                   picture_path + Colors.YELLOW + '!' + Colors._CLEAR)
            img = plot.imshow(matrix[m], interpolation='nearest')
            img.set_cmap('jet')
            img.set_norm(LogNorm())
            img.axes.get_xaxis().set_visible(False)
            img.axes.get_yaxis().set_visible(False)
            if not cbar:
                cbar = plot.colorbar()
            cbar.set_label(m)
            cbar.set_ticks([])
            plot.savefig(
                os.path.join(
                    os.path.split(outputs['matrix_' + m])[0], picture_path))

        np.seterr(all='warn')  # reactivate div by 0 warnings

        # now store the matlab version as well
        c.info(Colors.YELLOW + '  Storing ' + Colors.PURPLE +
               'matlab data bundle' + Colors.YELLOW + ' containing ' +
               Colors.PURPLE + 'all matrices' + Colors.YELLOW + ' as ' +
               Colors.PURPLE + os.path.split(outputs['matrix_all'])[1] +
               Colors.YELLOW + '!' + Colors._CLEAR)
        scipy.io.savemat(outputs['matrix_all'], matrix, oned_as='row')
Example #5
0
    def filtering(self, inputs, outputs, length, cortex_only):
        '''
    Filter the mapped fibers.
    '''

        # check if we have all required input data
        # we need at least:
        #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
        if not os.path.exists(outputs['fibers_mapped']):
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    outputs['fibers_mapped'] + Colors.RED +
                    ' but we really need it to start with stage 4!!' +
                    Colors._CLEAR)
            sys.exit(2)

        # find the order of the mapped scalars
        header = io.loadTrkHeaderOnly(outputs['fibers_mapped'])
        scalars = list(header['scalar_name'])

        # split the length range
        length = length.split(' ')
        min_length = int(length[0])
        max_length = int(length[1])

        # length filtering

        c.info(Colors.YELLOW + '  Filtering ' + Colors.PURPLE +
               'fiber length' + Colors.YELLOW + ' to be ' + Colors.PURPLE +
               '>' + str(min_length) + ' and <' + str(max_length) +
               Colors.YELLOW + ' for ' + Colors.PURPLE +
               os.path.split(outputs['fibers_mapped'])[1] + Colors.YELLOW +
               ' and store as ' + Colors.PURPLE +
               os.path.split(outputs['fibers_mapped_length_filtered'])[1] +
               Colors.YELLOW + '!' + Colors._CLEAR)
        fyborg.fyborg(outputs['fibers_mapped'],
                      outputs['fibers_mapped_length_filtered'], [
                          fyborg.FyFilterLengthAction(scalars.index('length'),
                                                      min_length, max_length)
                      ])

        header = io.loadTrkHeaderOnly(outputs['fibers_mapped_length_filtered'])
        new_count = header['n_count']

        c.info(Colors.YELLOW + '  Number of tracks after ' + Colors.PURPLE +
               'length filtering' + Colors.YELLOW + ': ' + str(new_count) +
               Colors.YELLOW + Colors._CLEAR)

        if cortex_only:

            # special cortex filtering

            c.info(
                Colors.YELLOW + '  Filtering for ' + Colors.PURPLE +
                'valid cortex structures' + Colors.YELLOW + ' in ' +
                Colors.PURPLE +
                os.path.split(outputs['fibers_mapped_length_filtered'])[1] +
                Colors.YELLOW + ' and store as ' + Colors.PURPLE +
                os.path.split(
                    outputs['fibers_mapped_length_filtered_cortex_only'])[1] +
                Colors.YELLOW + '!' + Colors._CLEAR)
            c.info(Colors.PURPLE + '    Conditions for valid fibers:' +
                   Colors._CLEAR)
            c.info(
                Colors.PURPLE + '    1.' + Colors.YELLOW +
                ' The fiber track has to pass through the cerebral white matter. (Label values: '
                + Colors.PURPLE + '[2, 41]' + Colors.YELLOW + ')' +
                Colors._CLEAR)
            c.info(
                Colors.PURPLE + '    2.' + Colors.YELLOW +
                ' The fiber track shall only touch sub-cortical structures not more than '
                + Colors.PURPLE + '5 times' + Colors.YELLOW +
                '. (Label values: ' + Colors.PURPLE +
                '[10, 49, 16, 28, 60, 4, 43]' + Colors.YELLOW + ')' +
                Colors._CLEAR)
            c.info(
                Colors.PURPLE + '    3.' + Colors.YELLOW +
                ' The track shall not pass through the corpus callosum (Labels: '
                + Colors.PURPLE + '[251, 255]' + Colors.YELLOW +
                ') and end in the same hemisphere (Labels: ' + Colors.PURPLE +
                '[1000-1035]' + Colors.YELLOW + ' for left, ' + Colors.PURPLE +
                '[2000-2035]' + Colors.YELLOW + ' for right).' + Colors._CLEAR)

            fyborg.fyborg(
                outputs['fibers_mapped_length_filtered'],
                outputs['fibers_mapped_length_filtered_cortex_only'],
                [fyborg.FyFilterCortexAction(scalars.index('segmentation'))])

            header = io.loadTrkHeaderOnly(
                outputs['fibers_mapped_length_filtered_cortex_only'])
            new_count = header['n_count']

            c.info(Colors.YELLOW + '  Number of tracks after ' +
                   Colors.PURPLE + 'cortex filtering' + Colors.YELLOW + ': ' +
                   str(new_count) + Colors.YELLOW + Colors._CLEAR)

            c.info(
                Colors.YELLOW + '  Copied filtered tracks from ' +
                Colors.PURPLE + os.path.split(
                    outputs['fibers_mapped_length_filtered_cortex_only'])[1] +
                Colors.YELLOW + ' to ' + Colors.PURPLE +
                os.path.split(outputs['fibers_final'])[1] + Colors.YELLOW +
                '!' + Colors._CLEAR)
            shutil.copyfile(
                outputs['fibers_mapped_length_filtered_cortex_only'],
                outputs['fibers_final'])

        else:

            c.info(Colors.YELLOW + '  Info: ' + Colors.PURPLE +
                   'Cortical _and_ sub-cortical structures ' + Colors.YELLOW +
                   'will be included..' + Colors._CLEAR)

            c.info(Colors.YELLOW + '  Copied filtered tracks from ' +
                   Colors.PURPLE +
                   os.path.split(outputs['fibers_mapped_length_filtered'])[1] +
                   Colors.YELLOW + ' to ' + Colors.PURPLE +
                   os.path.split(outputs['fibers_final'])[1] + Colors.YELLOW +
                   '!' + Colors._CLEAR)
            shutil.copyfile(outputs['fibers_mapped_length_filtered'],
                            outputs['fibers_final'])
Example #6
0
    def mapping(self, inputs, outputs, radius):
        '''
    Map all detected scalar volumes to each fiber.
    '''

        # check if we have all required input data
        # we need at least:
        #  - outputs['fibers'] == Track file in T1 space
        #  - outputs['segmentation'] == Label Map
        if not os.path.exists(outputs['fibers']):
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    outputs['fibers'] + Colors.RED +
                    ' but we really need it to start with stage 3!!' +
                    Colors._CLEAR)
            sys.exit(2)
        if not os.path.exists(outputs['segmentation']):
            c.error(Colors.RED + 'Could not find ' + Colors.YELLOW +
                    outputs['segmentation'] + Colors.RED +
                    ' but we really need it to start with stage 3!!' +
                    Colors._CLEAR)
            sys.exit(2)

        actions = []

        for i in inputs:

            if i == 'fibers' or i == 'segmentation' or i == 'T1' or i == 'b0' or i == 'T1toB0matrix' or i == 'b0_resampled':
                # we do not map these
                continue

            if not os.path.exists(outputs[i]):
                # we can't map this since we didn't find the file
                continue

            # for normal scalars: append it to the actions
            actions.append(fyborg.FyMapAction(i, outputs[i]))

            c.info(Colors.YELLOW + '  Configuring mapping of ' +
                   Colors.PURPLE + os.path.split(outputs[i])[1] +
                   Colors.YELLOW + ' to ' + Colors.PURPLE +
                   os.path.split(outputs['fibers'])[1] + Colors.YELLOW + '!' +
                   Colors._CLEAR)

        # now the segmentation with the lookaround radius
        actions.append(
            fyborg.FyRadiusMapAction('segmentation', outputs['segmentation'],
                                     radius))
        c.info(Colors.YELLOW + '  Configuring mapping of ' + Colors.PURPLE +
               os.path.split(outputs['segmentation'])[1] + Colors.YELLOW +
               ' to ' + Colors.PURPLE + os.path.split(outputs['fibers'])[1] +
               Colors.YELLOW + '!' + Colors._CLEAR)

        # and also the fiber length
        actions.append(fyborg.FyLengthAction())
        c.info(Colors.YELLOW + '  Configuring mapping of ' + Colors.PURPLE +
               'fiber length' + Colors.YELLOW + ' to ' + Colors.PURPLE +
               os.path.split(outputs['fibers'])[1] + Colors.YELLOW + '!' +
               Colors._CLEAR)

        # run, forest, run!!
        c.info(Colors.YELLOW + '  Performing configured mapping for ' +
               Colors.PURPLE + os.path.split(outputs['fibers'])[1] +
               Colors.YELLOW + ' and storing as ' + Colors.PURPLE +
               os.path.split(outputs['fibers_mapped'])[1] + Colors.YELLOW +
               ' (~ 30 minutes)!' + Colors._CLEAR)
        if self.__debug:
            fyborg.fyborg(outputs['fibers'], outputs['fibers_mapped'], actions,
                          'debug')
        else:
            fyborg.fyborg(outputs['fibers'], outputs['fibers_mapped'], actions)
Example #7
0
                        action='store_true',
                        dest='verbose',
                        help='Show verbose output')

    # always show the help if no arguments were specified
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    options = parser.parse_args()

    # validate the inputs here
    if not os.path.isdir(options.input):

        c.error(
            Colors.RED +
            'Could not find the input directory! Specify a valid directory using -i $PATH.'
            + Colors._CLEAR)
        sys.exit(2)

    if os.path.exists(options.output) and int(options.stage) == 0:

        if not options.overwrite:
            c.error(
                Colors.RED +
                'The output directory exists! Add --overwrite to erase previous content!'
                + Colors._CLEAR)
            c.error(
                Colors.RED +
                'Or use --stage > 2 to start with a specific stage which re-uses the previous content..'
                + Colors._CLEAR)
            sys.exit(2)
Example #8
0
  def roi_extract( self, inputs, outputs ):
    '''
    '''
    # check if we have all required input data
    # we need at least: 
    #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
    if not os.path.exists( outputs['fibers_final'] ):
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_final'] + Colors.RED + ' but we really need it to start with stage 6!!' + Colors._CLEAR )
      sys.exit( 2 )

    s = io.loadTrk( outputs['fibers_final'] )
    tracks = s[0]
    header = s[1]

    scalarNames = header['scalar_name'].tolist()
    labels = {}

    # check if the segmentation is mapped
    try:
      seg_index = scalarNames.index( 'segmentation' )
    except:
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + 'segmentation' + Colors.RED + ' as a mapped scalar but we really need it!' )
      sys.exit( 2 )

    # create the roi subfolder
    if not os.path.exists( outputs['roi'] ):
      os.mkdir( outputs['roi'] )

    # parse the color table
    lut = fyborg.colortable.freesurfer.split( '\n' )
    colors = {}
    for color in lut:
      if not color or color[0] == '#':
        continue

      splitted_line = color.split( ' ' )
      splitted_line = filter( None, splitted_line )
      colors[splitted_line[0]] = splitted_line[1]


    # loop through tracks
    for i, t in enumerate( tracks ):

      tCoordinates = t[0]
      tScalars = t[1]

      # grab the scalars for each point
      for scalar in tScalars:

        # but only the label value
        label_value = str( int( scalar[seg_index] ) )

        if not label_value in labels:

          labels[label_value] = []

        if not i in labels[label_value]:
          # store the unique fiber id for this label
          labels[label_value].append( i )

    # now loop through all detected labels
    for l in labels:

      new_tracks = []

      for t_id in labels[l]:
        # grab the fiber + scalars
        current_fiber = tracks[t_id]

        new_tracks.append( current_fiber )

      # now store the trk file
      trk_outputfile = l + '_' + colors[l] + '.trk'
      nii_outputfile = l + '_' + colors[l] + '.nii'
      c.info( Colors.YELLOW + '  Creating fiber ROI ' + Colors.PURPLE + trk_outputfile + Colors.YELLOW + '!' + Colors._CLEAR )
      io.saveTrk( os.path.join( outputs['roi'], trk_outputfile ), new_tracks, header, None, True )

      # also create a roi label volume for this label value
      c.info( Colors.YELLOW + '  Creating NII ROI ' + Colors.PURPLE + nii_outputfile + Colors.YELLOW + '!' + Colors._CLEAR )
      cmd = 'ss;'
      cmd += 'chb-fsstable;'
      cmd += 'mri_binarize --i ' + outputs['segmentation'] + ' --o ' + os.path.join( outputs['roi'], nii_outputfile ) + ' --match ' + l + ' --binval ' + l + ';'
      self.__logger.debug( cmd )
      sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )
      sp.communicate()
Example #9
0
  def connectivity( self, inputs, outputs, cortex_only ):
    '''
    Generate connectivity matrices using mapped values.
    '''
    # check if we have all required input data
    # we need at least: 
    #  - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars
    if not os.path.exists( outputs['fibers_final'] ):
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_final'] + Colors.RED + ' but we really need it to start with stage 5!!' + Colors._CLEAR )
      sys.exit( 2 )

    s = io.loadTrk( outputs['fibers_final'] )
    tracks = s[0]
    header = s[1]

    scalarNames = header['scalar_name'].tolist()
    matrix = {}
    indices = {}

    # check if the segmentation is mapped
    try:
      indices['segmentation'] = scalarNames.index( 'segmentation' )
    except:
      c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + 'segmentation' + Colors.RED + ' as a mapped scalar but we really need it!' )
      sys.exit( 2 )

    if cortex_only:
      labels = [2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024, 2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025, 2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015, 2001, 2030, 2034, 2035, 1012, 1019, 1032, 1014, 1020, 1018, 1027, 1028, 1003, 1024, 1017, 1026, 1002, 1023, 1010, 1022, 1031, 1029, 1008, 1025, 1005, 1021, 1011, 1013, 1007, 1016, 1006, 1033, 1009, 1015, 1001, 1030, 1034, 1035]
    else:
      labels = [2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024, 2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025, 2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015, 2001, 2030, 2034, 2035, 49, 50, 51, 52, 58, 53, 54, 1012, 1019, 1032, 1014, 1020, 1018, 1027, 1028, 1003, 1024, 1017, 1026, 1002, 1023, 1010, 1022, 1031, 1029, 1008, 1025, 1005, 1021, 1011, 1013, 1007, 1016, 1006, 1033, 1009, 1015, 1001, 1030, 1034, 1035, 10, 11, 12, 13, 26, 17, 18, 16]

    c.info( Colors.YELLOW + '  Getting ready to create connectivity matrices for the following labels: ' + Colors.PURPLE + str( labels ) + Colors._CLEAR )
    c.info( Colors.YELLOW + '  Note: Mapped scalar values along the points will be averaged for each fiber track.' + Colors._CLEAR )

    # create matrices for the attached scalars
    for i, s in enumerate( scalarNames ):

      if i >= header['n_scalars']:
        break

      if not s or s == 'segmentation':
        continue

      # this is a scalar value for which a matrix will be created
      matrix[s] = np.zeros( [len( labels ), len( labels )] )
      indices[s] = scalarNames.index( s )
      c.info( Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + s + Colors.YELLOW + ' values!' + Colors._CLEAR )

      if s == 'adc':
        s = 'inv_adc'
        matrix[s] = np.zeros( [len( labels ), len( labels )] )
        indices[s] = scalarNames.index( 'adc' )
        c.info( Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + s + Colors.YELLOW + ' values!' + Colors._CLEAR )


    # always create one for the fiber counts
    matrix['fibercount'] = np.zeros( [len( labels ), len( labels )] )
    indices['fibercount'] = 0
    c.info( Colors.YELLOW + '  Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + 'fibercount' + Colors.YELLOW + ' values!' + Colors._CLEAR )

    c.info( Colors.YELLOW + '  Analyzing fibers of ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '..' + Colors._CLEAR )
    for tCounter, t in enumerate( tracks ):

      tCoordinates = t[0]
      tScalars = t[1]

      # find the segmentation labels for the start and end points
      start_label = tScalars[0, indices['segmentation']]
      end_label = tScalars[-1, indices['segmentation']]

      try:
        # now grab the index of the labels in our label list
        start_index = labels.index( start_label )
        end_index = labels.index( end_label )
      except:
        # this label is not monitored, so ignore this track
        continue

      # loop through all different scalars
      for m in matrix:

        # calculate the mean for each track
        value = np.mean( tScalars[:, indices[m]] )

        if m == 'inv_adc':
          # invert the value since it is 1-ADC
          value = 1 / value
        elif m == 'fibercount':
          # in the case of fibercount, add 1
          value = 1

        # store value in the matrix
        matrix[m][start_index, end_index] += value
        if not start_index == end_index:
          matrix[m][end_index, start_index] += value

    # fiber loop is done, all values are stored
    # now normalize the matrices
    np.seterr( all='ignore' ) # avoid div by 0 warnings
    cbar = None
    for m in matrix:
      if not m == 'fibercount':
        # normalize it
        matrix[m][:] /= matrix['fibercount']
        matrix[m] = np.nan_to_num( matrix[m] )

      # store the matrix
      c.info( Colors.YELLOW + '  Storing ' + Colors.PURPLE + m + Colors.YELLOW + ' connectivity matrix as ' + Colors.PURPLE + os.path.split( outputs['matrix_' + m] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
      np.savetxt( outputs['matrix_' + m], matrix[m], delimiter='\t' )

      # store a picture
      picture_path = os.path.splitext( os.path.split( outputs['matrix_' + m] )[1] )[0] + '.png'
      c.info( Colors.YELLOW + '  Generating ' + Colors.PURPLE + m + ' image' + Colors.YELLOW + ' as ' + Colors.PURPLE + picture_path + Colors.YELLOW + '!' + Colors._CLEAR )
      img = plot.imshow( matrix[m], interpolation='nearest' )
      img.set_cmap( 'jet' )
      img.set_norm( LogNorm() )
      img.axes.get_xaxis().set_visible( False )
      img.axes.get_yaxis().set_visible( False )
      if not cbar:
        cbar = plot.colorbar()
      cbar.set_label( m )
      cbar.set_ticks( [] )
      plot.savefig( os.path.join( os.path.split( outputs['matrix_' + m] )[0], picture_path ) )

    np.seterr( all='warn' ) # reactivate div by 0 warnings

    # now store the matlab version as well
    c.info( Colors.YELLOW + '  Storing ' + Colors.PURPLE + 'matlab data bundle' + Colors.YELLOW + ' containing ' + Colors.PURPLE + 'all matrices' + Colors.YELLOW + ' as ' + Colors.PURPLE + os.path.split( outputs['matrix_all'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )
    scipy.io.savemat( outputs['matrix_all'], matrix, oned_as='row' )
Example #10
0
  parser.add_argument( '-co', '--cortex_only', action='store_true', dest='cortex_only', help='Perform filtering for cortex specific analysis and skip sub-cortical structures.' )
  parser.add_argument( '-s', '--stage', action='store', dest='stage', default=0, type=int, help='Start with a specific stage while skipping the ones before. E.g. --stage 3 directly starts the mapping without preprocessing, --stage 4 starts with the filtering' )
  parser.add_argument( '-overwrite', '--overwrite', action='store_true', dest='overwrite', help='Overwrite any existing output. DANGER!!' )
  parser.add_argument( '-v', '--verbose', action='store_true', dest='verbose', help='Show verbose output' )

  # always show the help if no arguments were specified
  if len( sys.argv ) == 1:
    parser.print_help()
    sys.exit( 1 )

  options = parser.parse_args()

  # validate the inputs here
  if not os.path.isdir( options.input ):

    c.error( Colors.RED + 'Could not find the input directory! Specify a valid directory using -i $PATH.' + Colors._CLEAR )
    sys.exit( 2 )

  if os.path.exists( options.output ) and int( options.stage ) == 0:

    if not options.overwrite:
      c.error( Colors.RED + 'The output directory exists! Add --overwrite to erase previous content!' + Colors._CLEAR )
      c.error( Colors.RED + 'Or use --stage > 2 to start with a specific stage which re-uses the previous content..' + Colors._CLEAR )
      sys.exit( 2 )
    else:
      # silently delete the existing output
      shutil.rmtree( options.output )

  if options.stage > 0 and not os.path.exists( options.output ):
    # we start with a specific stage so we need the output stuff
    c.error( Colors.RED + 'The output directory does not exist! We need it when using -s/--stage to resume the process!' + Colors._CLEAR )