Exemple #1
0
def make_clustered_scheme(start_scheme, scheme_name, subsets_to_cluster, cfg):

    #1. Create a new subset that merges the subsets_to_cluster
    newsub_parts = []

    #log.info("Clustering %d subsets" % len(subsets_to_cluster))

    for s in subsets_to_cluster:
        newsub_parts = newsub_parts + list(s.partitions)
    newsub = subset.Subset(*tuple(newsub_parts))

    #2. Then we define a new scheme with those merged subsets
    all_subs = [s for s in start_scheme.subsets]

    #pop out the subsets we're going to join together
    for s in subsets_to_cluster:
        all_subs.remove(s)

    #and now we add back in our new subset...
    all_subs.append(newsub)

    #and finally create the clustered scheme
    final_scheme = (scheme.Scheme(cfg, str(scheme_name), all_subs))

    return final_scheme
def create_high_resolution_scheme( scheme_a, b_scale = 1 ) :
    """Create an high-resolution version of a scheme to be used for kernel rotation (500 directions per shell).
    All other parameters of the scheme remain the same.

    Parameters
    ----------
    scheme : Scheme class
        Original acquisition scheme
    b_scale : float
        If needed, apply a scaling to the b-values (default : 1)
    """
    n = len( scheme_a.shells )
    raw = np.zeros( (500*n, 4 if scheme_a.version==0 else 7) )
    row = 0
    for i in xrange(n) :
        raw[row:row+500,0:3] = grad
        if scheme_a.version == 0 :
            raw[row:row+500,3] = scheme_a.shells[i]['b'] * b_scale
        else :
            raw[row:row+500,3] = scheme_a.shells[i]['G']
            raw[row:row+500,4] = scheme_a.shells[i]['Delta']
            raw[row:row+500,5] = scheme_a.shells[i]['delta']
            raw[row:row+500,6] = scheme_a.shells[i]['TE']
        row += 500

    return scheme.Scheme( raw )
    def do_analysis(self):
        '''A kmeans algorithm for heuristic partitioning searches'''

        start_result, start_scheme, tree_path = self.setup()

        step = 0

        start_subsets = list(
            start_scheme.subsets)  # we only work on lists of subsets

        self.analyse_list_of_subsets(start_subsets)

        # now we suppress ExternalProgramError for the rest of the algorithm
        the_config.suppress_errors = True

        for s in start_subsets:
            if s.fabricated:
                log.error("""One or more of your starting datablocks could not
                          be analysed. Please check your data and try again.
                          One way to fix this is to join your small datablocks
                          together into larger datablocks""")
                raise AnalysisError

        while True:
            step += 1
            with logtools.indented(log,
                                   "***k-means algorithm step %d***" % step):
                done, start_subsets = self.one_kmeans_step(
                    start_subsets, step, tree_path)

            if done:
                break

        # Ok, we're done. we just need deal with fabricated subsets
        final_scheme = self.finalise_fabrication(start_subsets, step)

        # Finally, for krmeans, we put the invariant sites back with their
        # nearest variable neighbours
        if the_config.search == 'krmeans':
            log.info("Reassigning invariant sites for krmeans algorithm")
            # the definition of krmeans is that we reassign the zero entropies
            final_subsets = self.reassign_invariant_sites(final_scheme.subsets)
            final_scheme = scheme.Scheme(the_config, "final_scheme_reassigned",
                                         final_subsets)

        log.info("Analysing final scheme")

        final_result = self.analyse_scheme(final_scheme)

        self.report(step)

        if not the_config.quick:
            the_config.reporter.write_scheme_summary(final_scheme,
                                                     final_result)

        return (final_scheme)
def make_clustered_scheme(start_scheme, scheme_name, subsets_to_cluster,
                          merged_sub, cfg):

    # 1. Then we define a new scheme with those merged subsets
    new_subsets = start_scheme.subsets - set(subsets_to_cluster)
    new_subsets.add(merged_sub)

    #3. Create the clustered scheme
    final_scheme = scheme.Scheme(cfg, str(scheme_name), new_subsets)

    return final_scheme
Exemple #5
0
    def define_schema(self, text, loc, scheme_def):
        try:
            # Clear out the subsets as we need to reuse it
            subs = tuple(self.subsets)
            self.subsets = []

            if self.ignore_schemes == False:
                sch = scheme.Scheme(self.cfg, scheme_def.name, subs)
                self.cfg.user_schemes.add_scheme(sch)

        except (scheme.SchemeError, subset.SubsetError):
            raise ParserError(text, loc,
                              "Error in '%s' can be found" % scheme_def.name)
    def define_scheme(self, text, loc, scheme_def):
        try:
            subs = tuple(self.current_subsets)

            if not self.ignore_schemes:
                sch = scheme.Scheme(self.cfg, scheme_def.name, subs)
                self.cfg.user_schemes.add_scheme(sch)

        except (scheme.SchemeError, subset.SubsetError):
            raise ParserError(text, loc,
                              "Error in '%s' can be found" % scheme_def.name)
        finally:
            # Clear out the current_subsets as we need to reuse it
            self.current_subsets = []
def make_split_scheme(start_scheme, scheme_name, subset_to_split,
                      split_subsets, cfg):

    # 1. Then we define a new scheme with those merged subsets
    new_subsets = start_scheme.subsets - {subset_to_split}

    # 2. add all of the split subsets
    for s in split_subsets:
        new_subsets.add(s)

    #3. Create the clustered scheme
    final_scheme = scheme.Scheme(cfg, str(scheme_name), new_subsets)

    return final_scheme
Exemple #8
0
    def load_data(self,
                  dwi_filename='DWI.nii',
                  scheme_filename='DWI.scheme',
                  mask_filename=None,
                  b0_thr=0):
        """Load the diffusion signal and its corresponding acquisition scheme.

        Parameters
        ----------
        dwi_filename : string
            The file name of the DWI data, relative to the subject folder (default : 'DWI.nii')
        scheme_filename : string
            The file name of the corresponding acquisition scheme (default : 'DWI.scheme')
        mask_filename : string
            The file name of the (optional) binary mask (default : None)
        b0_thr : float
            The threshold below which a b-value is considered a b0 (default : 0)
        """

        # Loading data, acquisition scheme and mask (optional)
        tic = time.time()
        print '\n-> Loading data:'

        print '\t* DWI signal...'
        self.set_config('dwi_filename', dwi_filename)
        self.niiDWI = nibabel.load(
            pjoin(self.get_config('DATA_path'), dwi_filename))
        self.niiDWI_img = self.niiDWI.get_data().astype(np.float32)
        hdr = self.niiDWI.header if nibabel.__version__ >= '2.0.0' else self.niiDWI.get_header(
        )
        self.set_config('dim', self.niiDWI_img.shape[:3])
        self.set_config('pixdim', tuple(hdr.get_zooms()[:3]))
        print '\t\t- dim    = %d x %d x %d x %d' % self.niiDWI_img.shape
        print '\t\t- pixdim = %.3f x %.3f x %.3f' % self.get_config('pixdim')
        # Scale signal intensities (if necessary)
        if (np.isfinite(hdr['scl_slope']) and np.isfinite(hdr['scl_inter'])
                and hdr['scl_slope'] != 0
                and (hdr['scl_slope'] != 1 or hdr['scl_inter'] != 0)):
            print '\t\t- rescaling data',
            self.niiDWI_img = self.niiDWI_img * hdr['scl_slope'] + hdr[
                'scl_inter']
            print "[OK]"

        print '\t* Acquisition scheme...'
        self.set_config('scheme_filename', scheme_filename)
        self.set_config('b0_thr', b0_thr)
        self.scheme = scheme.Scheme(
            pjoin(self.get_config('DATA_path'), scheme_filename), b0_thr)
        print '\t\t- %d samples, %d shells' % (self.scheme.nS,
                                               len(self.scheme.shells))
        print '\t\t- %d @ b=0' % (self.scheme.b0_count),
        for i in xrange(len(self.scheme.shells)):
            print ', %d @ b=%.1f' % (len(
                self.scheme.shells[i]['idx']), self.scheme.shells[i]['b']),
        print

        if self.scheme.nS != self.niiDWI_img.shape[3]:
            raise ValueError('Scheme does not match with DWI data')

        print '\t* Binary mask...'
        if mask_filename is not None:
            self.niiMASK = nibabel.load(
                pjoin(self.get_config('DATA_path'), mask_filename))
            self.niiMASK_img = self.niiMASK.get_data().astype(np.uint8)
            niiMASK_hdr = self.niiMASK.header if nibabel.__version__ >= '2.0.0' else self.niiMASK.get_header(
            )
            print '\t\t- dim    = %d x %d x %d' % self.niiMASK_img.shape[:3]
            print '\t\t- pixdim = %.3f x %.3f x %.3f' % niiMASK_hdr.get_zooms(
            )[:3]
            if self.get_config('dim') != self.niiMASK_img.shape[:3]:
                raise ValueError('MASK geometry does not match with DWI data')
        else:
            self.niiMASK = None
            self.niiMASK_img = np.ones(self.get_config('dim'))
            print '\t\t- not specified'
        print '\t\t- voxels = %d' % np.count_nonzero(self.niiMASK_img)

        # Preprocessing
        print '\n-> Preprocessing:'

        if self.get_config('doNormalizeSignal'):
            print '\t* Normalizing to b0...',
            sys.stdout.flush()
            if self.scheme.b0_count > 0:
                self.mean_b0s = np.mean(self.niiDWI_img[:, :, :,
                                                        self.scheme.b0_idx],
                                        axis=3)
            else:
                raise ValueError('No b0 volume to normalize signal with')
            norm_factor = self.mean_b0s.copy()
            idx = self.mean_b0s <= 0
            norm_factor[idx] = 1
            norm_factor = 1 / norm_factor
            norm_factor[idx] = 0
            for i in xrange(self.scheme.nS):
                self.niiDWI_img[:, :, :, i] *= norm_factor
            print '[ min=%.2f,  mean=%.2f, max=%.2f ]' % (self.niiDWI_img.min(
            ), self.niiDWI_img.mean(), self.niiDWI_img.max())

        if self.get_config('doMergeB0'):
            print '\t* Merging multiple b0 volume(s)...',
            mean = np.expand_dims(np.mean(self.niiDWI_img[:, :, :,
                                                          self.scheme.b0_idx],
                                          axis=3),
                                  axis=3)
            self.niiDWI_img = np.concatenate(
                (mean, self.niiDWI_img[:, :, :, self.scheme.dwi_idx]), axis=3)
        else:
            print '\t* Keeping all b0 volume(s)...'

        print '   [ %.1f seconds ]' % (time.time() - tic)
Exemple #9
0
    def finalise_fabrication(self, start_subsets, step):

        fabricated_subsets = []
        for s in start_subsets:

            # here we put a sensible lower limit on the size of subsets
            if len(s.columns) < the_config.min_subset_size:
                s.fabricated = True
                log.debug("Subset %s with only %d sites found" %
                          (s.subset_id, len(s.columns)))

            # here we can test if the alignment has all states:
            state_probs = self.alignment.check_state_probs(s, the_config)
            if state_probs:
                s.fabricated = True
                log.debug(
                    "Subset %s does not have all states in the alignment",
                    s.subset_id)

            if s.fabricated:
                fabricated_subsets.append(s)
                log.debug("added %s to fabricated subset", s.name)

        if fabricated_subsets:
            with logtools.indented(log, "Finalising partitioning scheme"):
                log.debug("There are %d/%d fabricated subsets" %
                          (len(fabricated_subsets), len(start_subsets)))

                i = 1
                while fabricated_subsets:

                    all_subs = start_subsets

                    # occasionally subsets with all value == 0.0 are given a
                    # centroid of None by scikit-learn. The true entropy here
                    # is 0.0 for all sites, so the true centroid is 0.0
                    for s in all_subs:
                        if s.centroid == None:
                            s.centroid = [0.0]
                            log.debug("Fixed a subset with a centroid of None")
                            log.debug("The subset has %d columns" %
                                      len(s.columns))

                    s = fabricated_subsets.pop(0)

                    log.debug("Working on fabricated subset %s with %d sites" %
                              (s.subset_id, len(s.columns)))
                    log.info("Finalising subset %d", i)
                    i = i + 1

                    all_subs.remove(s)

                    centroid = s.centroid

                    best_match = None

                    # get closest subset to s
                    for sub in all_subs:

                        centroid_array = [sub.centroid, centroid]

                        euclid_dist = spatial.distance.pdist(centroid_array)

                        if euclid_dist < best_match or best_match is None:
                            best_match = euclid_dist
                            closest_sub = sub

                    # join s with closest_sub to make joined_sub
                    merged_sub = subset_ops.merge_subsets([s, closest_sub])

                    # remove closest sub
                    all_subs.remove(closest_sub)

                    # and if closest_sub was fabricated too, we remove it here
                    if fabricated_subsets.count(closest_sub):
                        fabricated_subsets.remove(closest_sub)

                    # analyse joined sub
                    self.analyse_list_of_subsets([merged_sub])

                    # here we put a sensible lower limit on the size of subsets
                    if len(merged_sub.columns) < the_config.min_subset_size:
                        merged_sub.fabricated = True

                    # if joined has to be fabricated, add to fabricated list
                    if merged_sub.fabricated:
                        fabricated_subsets.append(merged_sub)

                    all_subs.append(merged_sub)
        else:
            all_subs = start_subsets

        # now build a scheme from start_subs, and it should work
        final_scheme = scheme.Scheme(the_config, "final_scheme", all_subs)

        # return final scheme
        return final_scheme
Exemple #10
0
def main():
    # window
    window = gtk.Window()
    window.resize(400, 400)  # TODO this resizes the surface and view?
    window.move(400, 600)
    window.connect('destroy', gtk.main_quit)
    window.realize()

    da = gtk.DrawingArea()
    '''
  This must precede realization of view? or use add_events().
  First three are mouse events.
  STRUCTURE is configure-event (resizing the window)
  Last are focus and keyboard events.
  '''
    da.set_events( \
      gdk.BUTTON_PRESS_MASK \
      | gdk.POINTER_MOTION_MASK \
      | gdk.BUTTON_RELEASE_MASK \
      | gdk.STRUCTURE_MASK \
      | gdk.FOCUS_CHANGE_MASK\
      | gdk.KEY_RELEASE_MASK \
      | gdk.KEY_PRESS_MASK )
    '''
  Enable drawing area widget to receive keyboard events: focus_in, key_release, etc.
  We implement our own widgets (controls) including text controls that receive keyboard.
  Also, we implement our own *traversal* (via the tab key per convention)
  among our controls that get the keyboard focus.
  '''
    da.set_flags(da.flags() | gtk.CAN_FOCUS)

    window.add(da)

    # Can draw to several ports.
    a_view = port.ViewPort(da)
    a_printerport = port.PrinterPort()
    a_fileport = port.FilePort()

    # global singletons
    config.viewport = a_view
    config.scheme = scheme.Scheme()

    window.show_all()  # Show now so allocation becomes valid

    gui.manager.control.control_manager = gui.manager.control.ControlsManager(
    )  # Enforces one control active
    controlinstances.build_all(a_printerport,
                               a_fileport)  # build singleton controls
    gui.manager.control.control_manager.set_root_control(
        controlinstances.bkgd_control)

    # Initial active control is the background manager. Controlee is the bkgd_control itself.
    gui.manager.control.control_manager.activate_control(
        controlinstances.bkgd_control, controlinstances.bkgd_control)

    a_view.set_model(config.scheme.model)
    a_printerport.set_model(config.scheme.model)
    a_fileport.set_model(config.scheme.model)

    make_test_doc()

    gtk.main()