Пример #1
0
    def _run_interface(self, runtime):
        dx, dy, dz = get_data_dims(self.inputs.image_file)
        vx, vy, vz = get_vox_dims(self.inputs.image_file)
        image_file = nb.load(self.inputs.image_file)
        affine = image_file.get_affine()
        out_filename = op.abspath(self.inputs.out_filename)

        #Reads MRTrix tracks
        header, streamlines = read_mrtrix_tracks(self.inputs.in_file, as_generator=True)
        iflogger.info('MRTrix Header:')
        iflogger.info(header)
        # Writes to Trackvis
        trk_header = nb.trackvis.empty_header()
        trk_header['dim'] = [dx,dy,dz]
        trk_header['voxel_size'] = [vx,vy,vz]
        trk_header['n_count'] = header['count']

        if isdefined(self.inputs.matrix_file) and isdefined(self.inputs.registration_image_file):
            iflogger.info('Applying transformation from matrix file {m}'.format(m=self.inputs.matrix_file))
            xfm = np.genfromtxt(self.inputs.matrix_file)
            iflogger.info(xfm)
            registration_image_file = nb.load(self.inputs.registration_image_file)
            reg_affine = registration_image_file.get_affine()
            r_dx, r_dy, r_dz = get_data_dims(self.inputs.registration_image_file)
            r_vx, r_vy, r_vz = get_vox_dims(self.inputs.registration_image_file)
            iflogger.info('Using affine from registration image file {r}'.format(r=self.inputs.registration_image_file))
            iflogger.info(reg_affine)
            trk_header['vox_to_ras'] = reg_affine
            trk_header['dim'] = [r_dx,r_dy,r_dz]
            trk_header['voxel_size'] = [r_vx,r_vy,r_vz]

            affine = np.dot(affine,np.diag(1./np.array([vx, vy, vz, 1])))
            transformed_streamlines = transform_to_affine(streamlines, trk_header, affine)

            aff = affine_from_fsl_mat_file(xfm, [vx,vy,vz], [r_vx,r_vy,r_vz])
            iflogger.info(aff)

            axcode = aff2axcodes(reg_affine)
            trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2]

            final_streamlines = move_streamlines(transformed_streamlines, aff)
            trk_tracks = ((ii,None,None) for ii in final_streamlines)
            trk.write(out_filename, trk_tracks, trk_header)
            iflogger.info('Saving transformed Trackvis file as {out}'.format(out=out_filename))
            iflogger.info('New TrackVis Header:')
            iflogger.info(trk_header)
        else:
            iflogger.info('Applying transformation from scanner coordinates to {img}'.format(img=self.inputs.image_file))
            axcode = aff2axcodes(affine)
            trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2]
            trk_header['vox_to_ras'] = affine
            transformed_streamlines = transform_to_affine(streamlines, trk_header, affine)
            trk_tracks = ((ii,None,None) for ii in transformed_streamlines)
            trk.write(out_filename, trk_tracks, trk_header)
            iflogger.info('Saving Trackvis file as {out}'.format(out=out_filename))
            iflogger.info('TrackVis Header:')
            iflogger.info(trk_header)
        return runtime
Пример #2
0
    def _run_interface(self, runtime):
        filename = os.path.basename(self.inputs.gibbs_output)

        dx, dy, dz = get_data_dims(self.inputs.gibbs_input)
        vx, vy, vz = get_vox_dims(self.inputs.gibbs_input)
        image_file = nib.load(self.inputs.gibbs_input)
        affine = image_file.get_affine()
        import numpy as np
        #Reads MITK tracks
        fib, hdr = nib.trackvis.read(self.inputs.gibbs_output)
        trk_header = nib.trackvis.empty_header()
        trk_header['dim'] = [dx, dy, dz]
        trk_header['voxel_size'] = [vx, vy, vz]
        trk_header['origin'] = [0, 0, 0]
        axcode = nib.orientations.aff2axcodes(affine)
        if axcode[0] != str(hdr['voxel_order'])[0]:
            flip_x = -1
        else:
            flip_x = 1
        if axcode[1] != str(hdr['voxel_order'])[1]:
            flip_y = -1
        else:
            flip_y = 1
        if axcode[2] != str(hdr['voxel_order'])[2]:
            flip_z = -1
        else:
            flip_z = 1
        trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2]
        new_fib = []
        for i in range(len(fib)):
            temp_fib = fib[i][0].copy()
            for j in range(len(fib[i][0])):
                temp_fib[j] = [
                    flip_x * (fib[i][0][j][0] - hdr['origin'][0]) + vx / 2,
                    flip_y * (fib[i][0][j][1] - hdr['origin'][1]) + vy / 2,
                    flip_z * (fib[i][0][j][2] - hdr['origin'][2]) + vz / 2
                ]
            new_fib.append((temp_fib, None, None))
        nib.trackvis.write(os.path.abspath(filename),
                           new_fib,
                           trk_header,
                           points_space='voxmm')
        iflogger.info('file written to %s' % os.path.abspath(filename))
        return runtime
Пример #3
0
 def _run_interface(self, runtime):    
     filename = os.path.basename(self.inputs.gibbs_output)
 
     dx, dy, dz = get_data_dims(self.inputs.gibbs_input)
     vx, vy, vz = get_vox_dims(self.inputs.gibbs_input)
     image_file = nib.load(self.inputs.gibbs_input)
     affine = image_file.get_affine()
     import numpy as np
     #Reads MITK tracks
     fib, hdr = nib.trackvis.read(self.inputs.gibbs_output)
     trk_header = nib.trackvis.empty_header()
     trk_header['dim'] = [dx,dy,dz]
     trk_header['voxel_size'] = [vx,vy,vz]
     trk_header['origin'] = [0 ,0 ,0]
     axcode = nib.orientations.aff2axcodes(affine)
     if axcode[0] != str(hdr['voxel_order'])[0]:
         flip_x = -1
     else:
         flip_x = 1
     if axcode[1] != str(hdr['voxel_order'])[1]:
         flip_y = -1
     else:
         flip_y = 1
     if axcode[2] != str(hdr['voxel_order'])[2]:
         flip_z = -1
     else:
         flip_z = 1
     trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2]
     new_fib = []
     for i in range(len(fib)):
         temp_fib = fib[i][0].copy()
         for j in range(len(fib[i][0])):
             temp_fib[j] = [flip_x*(fib[i][0][j][0]-hdr['origin'][0])+vx/2,flip_y*(fib[i][0][j][1]-hdr['origin'][1])+vy/2, flip_z*(fib[i][0][j][2]-hdr['origin'][2])+vz/2]
         new_fib.append((temp_fib,None,None))
     nib.trackvis.write(os.path.abspath(filename), new_fib, trk_header, points_space = 'voxmm')
     iflogger.info('file written to %s' % os.path.abspath(filename))
     return runtime
Пример #4
0
    def _run_interface(self, runtime):
        dx, dy, dz = get_data_dims(self.inputs.image_file)
        vx, vy, vz = get_vox_dims(self.inputs.image_file)
        image_file = nb.load(self.inputs.image_file)
        affine = image_file.get_affine()
        out_filename = op.abspath(self.inputs.out_filename)

        #Reads MRTrix tracks
        header, streamlines = read_mrtrix_tracks(self.inputs.in_file,
                                                 as_generator=True)
        iflogger.info('MRTrix Header:')
        iflogger.info(header)
        # Writes to Trackvis
        trk_header = nb.trackvis.empty_header()
        trk_header['dim'] = [dx, dy, dz]
        trk_header['voxel_size'] = [vx, vy, vz]
        trk_header['n_count'] = header['count']

        if isdefined(self.inputs.matrix_file) and isdefined(
                self.inputs.registration_image_file):
            iflogger.info(
                'Applying transformation from matrix file {m}'.format(
                    m=self.inputs.matrix_file))
            xfm = np.genfromtxt(self.inputs.matrix_file)
            iflogger.info(xfm)
            registration_image_file = nb.load(
                self.inputs.registration_image_file)
            reg_affine = registration_image_file.get_affine()
            r_dx, r_dy, r_dz = get_data_dims(
                self.inputs.registration_image_file)
            r_vx, r_vy, r_vz = get_vox_dims(
                self.inputs.registration_image_file)
            iflogger.info(
                'Using affine from registration image file {r}'.format(
                    r=self.inputs.registration_image_file))
            iflogger.info(reg_affine)
            trk_header['vox_to_ras'] = reg_affine
            trk_header['dim'] = [r_dx, r_dy, r_dz]
            trk_header['voxel_size'] = [r_vx, r_vy, r_vz]

            affine = np.dot(affine, np.diag(1. / np.array([vx, vy, vz, 1])))
            transformed_streamlines = transform_to_affine(
                streamlines, trk_header, affine)

            aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz],
                                           [r_vx, r_vy, r_vz])
            iflogger.info(aff)

            axcode = aff2axcodes(reg_affine)
            trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2]

            final_streamlines = move_streamlines(transformed_streamlines, aff)
            trk_tracks = ((ii, None, None) for ii in final_streamlines)
            trk.write(out_filename, trk_tracks, trk_header)
            iflogger.info('Saving transformed Trackvis file as {out}'.format(
                out=out_filename))
            iflogger.info('New TrackVis Header:')
            iflogger.info(trk_header)
        else:
            iflogger.info(
                'Applying transformation from scanner coordinates to {img}'.
                format(img=self.inputs.image_file))
            axcode = aff2axcodes(affine)
            trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2]
            trk_header['vox_to_ras'] = affine
            transformed_streamlines = transform_to_affine(
                streamlines, trk_header, affine)
            trk_tracks = ((ii, None, None) for ii in transformed_streamlines)
            trk.write(out_filename, trk_tracks, trk_header)
            iflogger.info(
                'Saving Trackvis file as {out}'.format(out=out_filename))
            iflogger.info('TrackVis Header:')
            iflogger.info(trk_header)
        return runtime
    def _run_interface(self, runtime):
        key = 'congraph'
        edge_key = 'weight'

        iflogger.info(
            'T-value Threshold file: {t}'.format(t=self.inputs.t_value_threshold_file))
        iflogger.info(
            'Independent component to use: {i}'.format(i=self.inputs.component_index))
        path, name, ext = split_filename(self.inputs.t_value_threshold_file)

        if ext == '.mat':
            t_value_dict = sio.loadmat(self.inputs.t_value_threshold_file)
            t_values = t_value_dict['t_value_per_node']
            t_value_per_node = t_values[:, self.inputs.component_index - 1]
            number_of_ICs = np.shape(t_values)[1]
        else:
            iflogger.info(
                "Please save the t-values as a Matlab file with key 't_value_per_node'")

        functional = nb.load(self.inputs.in_file)
        functionaldata = functional.get_data()
        segmentation = nb.load(self.inputs.segmentation_file)
        segmentationdata = segmentation.get_data()
        rois = get_roi_list(self.inputs.segmentation_file)
        number_of_nodes = len(rois)
        iflogger.info(
            'Found {roi} unique region values'.format(roi=number_of_nodes))
        iflogger.info('Significance threshold: {p}'.format(
            p=self.inputs.significance_threshold))
        #number_of_images = self.inputs.number_of_images
        # degrees_of_freedom = number_of_images - \
        #    number_of_ICs - 1
        #sig = self.inputs.significance_threshold
        #threshold = tinv((1-sig), degrees_of_freedom)
        #threshold = 2*threshold
        #iflogger.info('Weight threshold: {w}'.format(w=threshold))

        iflogger.info(
            'Functional image: {img}'.format(img=self.inputs.in_file))
        iflogger.info(
            'Segmentation image: {img}'.format(img=self.inputs.segmentation_file))
        if not get_data_dims(self.inputs.in_file) == get_data_dims(self.inputs.segmentation_file):
            iflogger.error(
                'Image dimensions are not the same, please reslice the images to the same dimensions')
            dx, dy, dz = get_data_dims(self.inputs.in_file)
            iflogger.error('Functional image dimensions: {dimx}, {dimy}, {dimz}'.format(
                dimx=dx, dimy=dy, dimz=dz))
            dx, dy, dz = get_data_dims(self.inputs.segmentation_file)
            iflogger.error('Segmentation image dimensions: {dimx}, {dimy}, {dimz}'.format(
                dimx=dx, dimy=dy, dimz=dz))

        stats = {}

        if self.inputs.give_nodes_values:
            func_mean = []
            for idx, roi in enumerate(rois):
                values = []
                x, y, z = np.where(segmentationdata == roi)
                for index in range(0, len(x)):
                    value = functionaldata[x[index]][y[index]][z[index]]
                    values.append(value)
                func_mean.append(np.mean(values))
                iflogger.info(
                    'Region ID: {id}, Mean Value: {avg}'.format(id=roi, avg=np.mean(values)))
            stats[key] = func_mean

        connectivity_matrix = np.zeros((number_of_nodes, number_of_nodes))
        correlation_matrix = np.zeros((number_of_nodes, number_of_nodes))
        anticorrelation_matrix = np.zeros((number_of_nodes, number_of_nodes))
        iflogger.info('Drawing edges...')
        for idx_i, roi_i in enumerate(rois):
            t_i = t_value_per_node[idx_i]
            #iflogger.info('ROI:{i}, T-value: {t}'.format(i=roi_i, t=t_i))
            for idx_j, roi_j in enumerate(rois):
                t_j = t_value_per_node[idx_j]
                #iflogger.info('...ROI:{j}, T-value: {t}'.format(j=roi_j, t=t_j))
                if idx_j > idx_i:
                    if (t_i > 0 and t_j > 0) or (t_i < 0 and t_j < 0):
                        weight = abs(t_i) + abs(t_j) - abs(t_i - t_j)
                        #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))
                        # if weight > threshold:
                        connectivity_matrix[idx_i, idx_j] = weight
                        correlation_matrix[idx_i, idx_j] = weight
                            #iflogger.info('Drawing a correlation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j))
                    elif (t_i < 0 and t_j > 0) or (t_i > 0 and t_j < 0):
                        weight = abs(t_i) + abs(t_j) - abs(t_i + t_j)
                        #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))
                        # if weight > threshold:
                        connectivity_matrix[idx_i, idx_j] = -weight
                        anticorrelation_matrix[idx_i, idx_j] = weight
                            #iflogger.info('Drawing an anticorrelation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j))
                            #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))

        edges = len(np.nonzero(connectivity_matrix)[0])
        cor_edges = len(np.nonzero(correlation_matrix)[0])
        anticor_edges = len(np.nonzero(anticorrelation_matrix)[0])

        iflogger.info('Total edges: {e}'.format(e=edges))
        iflogger.info('Total correlation edges: {c}'.format(c=cor_edges))
        iflogger.info(
            'Total anticorrelation edges: {a}'.format(a=anticor_edges))

        connectivity_matrix = connectivity_matrix + connectivity_matrix.T
        correlation_matrix = correlation_matrix + correlation_matrix.T
        anticorrelation_matrix = anticorrelation_matrix + \
            anticorrelation_matrix.T

        stats[edge_key] = connectivity_matrix
        stats['correlation'] = correlation_matrix
        stats['anticorrelation'] = anticorrelation_matrix

        try:
            gp = nx.read_gpickle(self.inputs.resolution_network_file)
        except IndexError:
            gp = nx.read_graphml(self.inputs.resolution_network_file)
        nodedict = gp.node[gp.nodes()[0]]
        if not nodedict.has_key('dn_position'):
            iflogger.info("Creating node positions from segmentation")
            G = nx.Graph()
            for u, d in gp.nodes_iter(data=True):
                G.add_node(int(u), d)
                xyz = tuple(
                    np.mean(np.where(np.flipud(segmentationdata) == int(d["dn_correspondence_id"])), axis=1))
                G.node[int(u)]['dn_position'] = xyz
            ntwkname = op.abspath('nodepositions.pck')
            nx.write_gpickle(G, ntwkname)
        else:
            ntwkname = self.inputs.resolution_network_file

        try:
            ntwkname = nx.read_gpickle(ntwkname)
        except IndexError:
            ntwkname = nx.read_graphml(ntwkname)

        newntwk = ntwkname.copy()
        newntwk = remove_all_edges(newntwk)

        if self.inputs.give_nodes_values:
            newntwk = add_node_data(stats[key], newntwk)
            corntwk = add_node_data(stats[key], newntwk)
            anticorntwk = add_node_data(stats[key], newntwk)

            newntwk = add_edge_data(stats[edge_key], newntwk)
            corntwk = add_edge_data(stats['correlation'], corntwk)
            anticorntwk = add_edge_data(stats['anticorrelation'], anticorntwk)
        else:
            newntwk = add_edge_data(stats[edge_key], ntwkname)
            corntwk = add_edge_data(stats['correlation'], ntwkname)
            anticorntwk = add_edge_data(stats['anticorrelation'], ntwkname)

        if isdefined(self.inputs.out_network_file):
            path, name, ext = split_filename(self.inputs.out_network_file)
            if not ext == '.pck':
                ext = '.pck'
            out_network_file = op.abspath(name + ext)
        else:
            if isdefined(self.inputs.subject_id):
                out_network_file = op.abspath(
                    self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.pck')
            else:
                out_network_file = op.abspath(
                    'IC_' + str(self.inputs.component_index) + '.pck')

        path, name, ext = split_filename(out_network_file)
        iflogger.info(
            'Saving output network as {ntwk}'.format(ntwk=out_network_file))
        nx.write_gpickle(newntwk, out_network_file)

        out_correlation_network = op.abspath(name + '_correlation' + ext)
        iflogger.info(
            'Saving correlation network as {ntwk}'.format(ntwk=out_correlation_network))
        nx.write_gpickle(corntwk, out_correlation_network)

        out_anticorrelation_network = op.abspath(
            name + '_anticorrelation' + ext)
        iflogger.info('Saving anticorrelation network as {ntwk}'.format(
            ntwk=out_anticorrelation_network))
        nx.write_gpickle(anticorntwk, out_anticorrelation_network)

        if isdefined(self.inputs.subject_id):
            stats['subject_id'] = self.inputs.subject_id

        if isdefined(self.inputs.out_stats_file):
            path, name, ext = split_filename(self.inputs.out_stats_file)
            if not ext == '.mat':
                ext = '.mat'
            out_stats_file = op.abspath(name + ext)
        else:
            if isdefined(self.inputs.subject_id):
                out_stats_file = op.abspath(
                    self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.mat')
            else:
                out_stats_file = op.abspath(
                    'IC_' + str(self.inputs.component_index) + '.mat')

        iflogger.info(
            'Saving image statistics as {stats}'.format(stats=out_stats_file))
        sio.savemat(out_stats_file, stats)
        return runtime
Пример #6
0
    def _run_interface(self, runtime):
        key = 'congraph'
        edge_key = 'weight'

        iflogger.info('T-value Threshold file: {t}'.format(
            t=self.inputs.t_value_threshold_file))
        iflogger.info('Independent component to use: {i}'.format(
            i=self.inputs.component_index))
        path, name, ext = split_filename(self.inputs.t_value_threshold_file)

        if ext == '.mat':
            t_value_dict = sio.loadmat(self.inputs.t_value_threshold_file)
            t_values = t_value_dict['t_value_per_node']
            t_value_per_node = t_values[:, self.inputs.component_index - 1]
            number_of_ICs = np.shape(t_values)[1]
        else:
            iflogger.info(
                "Please save the t-values as a Matlab file with key 't_value_per_node'"
            )

        functional = nb.load(self.inputs.in_file)
        functionaldata = functional.get_data()
        segmentation = nb.load(self.inputs.segmentation_file)
        segmentationdata = segmentation.get_data()
        rois = get_roi_list(self.inputs.segmentation_file)
        number_of_nodes = len(rois)
        iflogger.info(
            'Found {roi} unique region values'.format(roi=number_of_nodes))
        iflogger.info('Significance threshold: {p}'.format(
            p=self.inputs.significance_threshold))
        #number_of_images = self.inputs.number_of_images
        # degrees_of_freedom = number_of_images - \
        #    number_of_ICs - 1
        #sig = self.inputs.significance_threshold
        #threshold = tinv((1-sig), degrees_of_freedom)
        #threshold = 2*threshold
        #iflogger.info('Weight threshold: {w}'.format(w=threshold))

        iflogger.info(
            'Functional image: {img}'.format(img=self.inputs.in_file))
        iflogger.info('Segmentation image: {img}'.format(
            img=self.inputs.segmentation_file))
        if not get_data_dims(self.inputs.in_file) == get_data_dims(
                self.inputs.segmentation_file):
            iflogger.error(
                'Image dimensions are not the same, please reslice the images to the same dimensions'
            )
            dx, dy, dz = get_data_dims(self.inputs.in_file)
            iflogger.error(
                'Functional image dimensions: {dimx}, {dimy}, {dimz}'.format(
                    dimx=dx, dimy=dy, dimz=dz))
            dx, dy, dz = get_data_dims(self.inputs.segmentation_file)
            iflogger.error(
                'Segmentation image dimensions: {dimx}, {dimy}, {dimz}'.format(
                    dimx=dx, dimy=dy, dimz=dz))

        stats = {}

        if self.inputs.give_nodes_values:
            func_mean = []
            for idx, roi in enumerate(rois):
                values = []
                x, y, z = np.where(segmentationdata == roi)
                for index in range(0, len(x)):
                    value = functionaldata[x[index]][y[index]][z[index]]
                    values.append(value)
                func_mean.append(np.mean(values))
                iflogger.info('Region ID: {id}, Mean Value: {avg}'.format(
                    id=roi, avg=np.mean(values)))
            stats[key] = func_mean

        connectivity_matrix = np.zeros((number_of_nodes, number_of_nodes))
        correlation_matrix = np.zeros((number_of_nodes, number_of_nodes))
        anticorrelation_matrix = np.zeros((number_of_nodes, number_of_nodes))
        iflogger.info('Drawing edges...')
        for idx_i, roi_i in enumerate(rois):
            t_i = t_value_per_node[idx_i]
            #iflogger.info('ROI:{i}, T-value: {t}'.format(i=roi_i, t=t_i))
            for idx_j, roi_j in enumerate(rois):
                t_j = t_value_per_node[idx_j]
                #iflogger.info('...ROI:{j}, T-value: {t}'.format(j=roi_j, t=t_j))
                if idx_j > idx_i:
                    if (t_i > 0 and t_j > 0) or (t_i < 0 and t_j < 0):
                        weight = abs(t_i) + abs(t_j) - abs(t_i - t_j)
                        #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))
                        # if weight > threshold:
                        connectivity_matrix[idx_i, idx_j] = weight
                        correlation_matrix[idx_i, idx_j] = weight
                        #iflogger.info('Drawing a correlation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j))
                    elif (t_i < 0 and t_j > 0) or (t_i > 0 and t_j < 0):
                        weight = abs(t_i) + abs(t_j) - abs(t_i + t_j)
                        #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))
                        # if weight > threshold:
                        connectivity_matrix[idx_i, idx_j] = -weight
                        anticorrelation_matrix[idx_i, idx_j] = weight
                        #iflogger.info('Drawing an anticorrelation edge for ROIs {i}-{j} at {id_i},{id_j}'.format(i=roi_i, j=roi_j, id_i=idx_i, id_j=idx_j))
                        #iflogger.info('Weight = {w}     T-values for ROIs {i}-{j}'.format(w=weight, i=t_i, j=t_j))

        edges = len(np.nonzero(connectivity_matrix)[0])
        cor_edges = len(np.nonzero(correlation_matrix)[0])
        anticor_edges = len(np.nonzero(anticorrelation_matrix)[0])

        iflogger.info('Total edges: {e}'.format(e=edges))
        iflogger.info('Total correlation edges: {c}'.format(c=cor_edges))
        iflogger.info(
            'Total anticorrelation edges: {a}'.format(a=anticor_edges))

        connectivity_matrix = connectivity_matrix + connectivity_matrix.T
        correlation_matrix = correlation_matrix + correlation_matrix.T
        anticorrelation_matrix = anticorrelation_matrix + \
            anticorrelation_matrix.T

        stats[edge_key] = connectivity_matrix
        stats['correlation'] = correlation_matrix
        stats['anticorrelation'] = anticorrelation_matrix

        try:
            gp = nx.read_gpickle(self.inputs.resolution_network_file)
        except IndexError:
            gp = nx.read_graphml(self.inputs.resolution_network_file)
        nodedict = gp.node[gp.nodes()[0]]
        if not nodedict.has_key('dn_position'):
            iflogger.info("Creating node positions from segmentation")
            G = nx.Graph()
            for u, d in gp.nodes_iter(data=True):
                G.add_node(int(u), d)
                xyz = tuple(
                    np.mean(np.where(
                        np.flipud(segmentationdata) == int(
                            d["dn_correspondence_id"])),
                            axis=1))
                G.node[int(u)]['dn_position'] = xyz
            ntwkname = op.abspath('nodepositions.pck')
            nx.write_gpickle(G, ntwkname)
        else:
            ntwkname = self.inputs.resolution_network_file

        try:
            ntwkname = nx.read_gpickle(ntwkname)
        except IndexError:
            ntwkname = nx.read_graphml(ntwkname)

        newntwk = ntwkname.copy()
        newntwk = remove_all_edges(newntwk)

        if self.inputs.give_nodes_values:
            newntwk = add_node_data(stats[key], newntwk)
            corntwk = add_node_data(stats[key], newntwk)
            anticorntwk = add_node_data(stats[key], newntwk)

            newntwk = add_edge_data(stats[edge_key], newntwk)
            corntwk = add_edge_data(stats['correlation'], corntwk)
            anticorntwk = add_edge_data(stats['anticorrelation'], anticorntwk)
        else:
            newntwk = add_edge_data(stats[edge_key], ntwkname)
            corntwk = add_edge_data(stats['correlation'], ntwkname)
            anticorntwk = add_edge_data(stats['anticorrelation'], ntwkname)

        if isdefined(self.inputs.out_network_file):
            path, name, ext = split_filename(self.inputs.out_network_file)
            if not ext == '.pck':
                ext = '.pck'
            out_network_file = op.abspath(name + ext)
        else:
            if isdefined(self.inputs.subject_id):
                out_network_file = op.abspath(
                    self.inputs.subject_id + '_IC_' +
                    str(self.inputs.component_index) + '.pck')
            else:
                out_network_file = op.abspath(
                    'IC_' + str(self.inputs.component_index) + '.pck')

        path, name, ext = split_filename(out_network_file)
        iflogger.info(
            'Saving output network as {ntwk}'.format(ntwk=out_network_file))
        nx.write_gpickle(newntwk, out_network_file)

        out_correlation_network = op.abspath(name + '_correlation' + ext)
        iflogger.info('Saving correlation network as {ntwk}'.format(
            ntwk=out_correlation_network))
        nx.write_gpickle(corntwk, out_correlation_network)

        out_anticorrelation_network = op.abspath(name + '_anticorrelation' +
                                                 ext)
        iflogger.info('Saving anticorrelation network as {ntwk}'.format(
            ntwk=out_anticorrelation_network))
        nx.write_gpickle(anticorntwk, out_anticorrelation_network)

        if isdefined(self.inputs.subject_id):
            stats['subject_id'] = self.inputs.subject_id

        if isdefined(self.inputs.out_stats_file):
            path, name, ext = split_filename(self.inputs.out_stats_file)
            if not ext == '.mat':
                ext = '.mat'
            out_stats_file = op.abspath(name + ext)
        else:
            if isdefined(self.inputs.subject_id):
                out_stats_file = op.abspath(self.inputs.subject_id + '_IC_' +
                                            str(self.inputs.component_index) +
                                            '.mat')
            else:
                out_stats_file = op.abspath('IC_' +
                                            str(self.inputs.component_index) +
                                            '.mat')

        iflogger.info(
            'Saving image statistics as {stats}'.format(stats=out_stats_file))
        sio.savemat(out_stats_file, stats)
        return runtime