Пример #1
0
    def process_sample(self, test):
        outdir = self.dir_svm + test
        if not os.path.isdir(outdir):
            os.makedirs(outdir)

        prior, mask = load_or_compute_prior_and_mask(
            test, force_recompute=self.force_recompute_prior)

        self.prior = prior
        self.seeds = (-1) * mask.astype(int)

        ## training
        if self.retrain:
            w, xi, info = self.train_svm(test)
            np.savetxt(outdir + 'w.test.txt', w)
            np.savetxt(outdir + 'xi.test.txt', [xi])

            try:
                w_gold = np.loadtxt(outdir + 'w.gold.txt')
                xi_gold = np.loadtxt(outdir + 'xi.gold.txt')

                np.testing.assert_allclose(w, w_gold, err_msg='w mismatch')
                np.testing.assert_allclose(xi, xi_gold, err_msg='xi mismatch')
            except Exception as e:
                print str(type(e)) + ', ' + e.message
            else:
                print 'PASS: learning tests'

        else:
            w = np.loadtxt(outdir + 'w.test.txt')

        self.w = w

        ## inference
        self.run_svm_inference(test, w)
 def process_sample(self, test):
     outdir = self.dir_svm + test
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     
     prior, mask = load_or_compute_prior_and_mask(test, force_recompute=self.force_recompute_prior)
     
     self.prior = prior
     self.seeds = (-1)*mask.astype(int)
     
     ## training
     if self.retrain:
         w,xi,info = self.train_svm(test)
         np.savetxt(outdir + 'w.test.txt',w)
         np.savetxt(outdir + 'xi.test.txt',[xi])
         
         try:
             w_gold  = np.loadtxt(outdir + 'w.gold.txt')        
             xi_gold = np.loadtxt(outdir + 'xi.gold.txt')        
             
             np.testing.assert_allclose(w,  w_gold,  err_msg='w mismatch')
             np.testing.assert_allclose(xi, xi_gold, err_msg='xi mismatch')
         except Exception as e:
             print str(type(e)) + ', ' + e.message
         else:
             print 'PASS: learning tests'
         
     else:
         w = np.loadtxt(outdir + 'w.test.txt')
     
     self.w = w
     
     ## inference
     self.run_svm_inference(test,w)
Пример #3
0
    def process_sample(self, test, fold=None):

        ## get prior
        prior, mask = load_or_compute_prior_and_mask(
            test, force_recompute=self.force_recompute_prior, pca=True, fold=fold
        )
        seeds = (-1) * mask
        mask = mask.astype(bool)

        ## load image
        file_name = config.dir_reg + test + "gray.hdr"
        logger.info("segmenting data: {}".format(file_name))
        im = io_analyze.load(file_name)
        file_gt = config.dir_reg + test + "seg.hdr"
        seg = io_analyze.load(file_gt)
        seg.flat[~np.in1d(seg, self.labelset)] = self.labelset[0]

        ## normalize image
        nim = im / np.std(im)

        ## init anchor_api
        anchor_api = MetaAnchor(
            prior=prior, prior_models=self.prior_models, prior_weights=self.prior_weights, image=nim
        )

        ## start segmenting
        # import ipdb; ipdb.set_trace()
        sol, impca = rwsegment_pca.segment(nim, anchor_api, seeds=seeds, labelset=self.labelset, **self.params)

        ## compute Dice coefficient per label
        dice = compute_dice_coef(sol, seg, labelset=self.labelset)
        logger.info("Dice: {}".format(dice))

        dice_pca = compute_dice_coef(impca, seg, labelset=self.labelset)
        logger.info("Dice pca only: {}".format(dice_pca))

        if not config.debug:
            if fold is not None:
                test_name = "f{}_{}".format(fold[0][:2], test)
            else:
                test_name = test
            outdir = config.dir_seg + "/{}/{}".format(self.model_name, test_name)

            logger.info("saving data in: {}".format(outdir))
            if not os.path.isdir(outdir):
                os.makedirs(outdir)

            io_analyze.save(outdir + "sol.hdr", sol.astype(np.int32))
            io_analyze.save(outdir + "solpca.hdr", impca.astype(np.int32))

            np.savetxt(outdir + "dice.txt", np.c_[dice.keys(), dice.values()], fmt="%d %.8f")
            np.savetxt(outdir + "dice_pca.txt", np.c_[dice.keys(), dice_pca.values()], fmt="%d %.8f")
Пример #4
0
    def process_sample(self, test, fold=None):
        if fold is not None:
            test_dir = 'f{}_{}'.format(fold[0][:2], test)
        else:
            test_dir = test
 
        if self.isroot:
            prior, mask = load_or_compute_prior_and_mask(
                test,force_recompute=self.force_recompute_prior, fold=fold)
            
            if self.use_parallel:
                # have only the root process compute the prior 
                # and pass it to the other processes
                self.comm.bcast((dict(prior.items()),mask),root=0)    
        else:
            prior,mask = self.comm.bcast(None,root=0)
        
        self.prior = prior
        self.seeds = (-1)*mask.astype(int)
        
        ## training set
        self.make_training_set(test, fold=fold)

        ## training
        if self.retrain:
            outdir = self.dir_svm + test_dir
            if not self.debug and not os.path.isdir(outdir):
                os.makedirs(outdir)

            ## instantiate functors
            self.svm_rwmean_api = SVMRWMeanAPI(
                self.prior, 
                self.laplacian_functions, 
                self.labelset,
                self.rwparams_svm,
                prior_models=self.prior_functions,   
                seeds=self.seeds,
                **self.svm_api_params
            )           

            if self.isroot:
                w,xi = self.train_svm(test,outdir=outdir)
                if self.debug:
                    pass
                elif self.isroot:
                    np.savetxt(outdir + 'w',w)
                    np.savetxt(outdir + 'xi',[xi])     
            else:
                ## parallel 
                rank = self.MPI_rank
                logger.debug('started worker #{}'.format(rank))                
                worker = svm_worker.SVMWorker(self.svm_rwmean_api)
                worker.work()

        else:
            if self.isroot and not self.retrain:    
                outdir = self.dir_svm + test
                logger.warning('Not retraining svm')
                w = np.loadtxt(outdir + 'w')
        
        ## inference
        if self.isroot: 
            self.w = w
            self.run_svm_inference(test,w, test_dir=test_dir)
    def process_sample(self, test, fold=None):
        if fold is not None:
            test_dir = 'f{}_{}'.format(fold[0][:2], test)
        else:
            test_dir = test

        if self.isroot:
            prior, mask = load_or_compute_prior_and_mask(
                test, force_recompute=self.force_recompute_prior, fold=fold)

            if self.use_parallel:
                # have only the root process compute the prior
                # and pass it to the other processes
                self.comm.bcast((dict(prior.items()), mask), root=0)
        else:
            prior, mask = self.comm.bcast(None, root=0)

        self.prior = prior
        self.seeds = (-1) * mask.astype(int)

        ## training set
        self.make_training_set(test, fold=fold)

        ## training
        if self.retrain:
            outdir = self.dir_svm + test_dir
            if not self.debug and not os.path.isdir(outdir):
                os.makedirs(outdir)

            ## instantiate functors
            self.svm_rwmean_api = SVMRWMeanAPI(
                self.prior,
                self.laplacian_functions,
                self.labelset,
                self.rwparams_svm,
                prior_models=self.prior_functions,
                seeds=self.seeds,
                **self.svm_api_params)

            if self.isroot:
                w, xi = self.train_svm(test, outdir=outdir)
                if self.debug:
                    pass
                elif self.isroot:
                    np.savetxt(outdir + 'w', w)
                    np.savetxt(outdir + 'xi', [xi])
            else:
                ## parallel
                rank = self.MPI_rank
                logger.debug('started worker #{}'.format(rank))
                worker = svm_worker.SVMWorker(self.svm_rwmean_api)
                worker.work()

        else:
            if self.isroot and not self.retrain:
                outdir = self.dir_svm + test
                logger.warning('Not retraining svm')
                w = np.loadtxt(outdir + 'w')

        ## inference
        if self.isroot:
            self.w = w
            self.run_svm_inference(test, w, test_dir=test_dir)
    def process_sample(self,test,fold=None):

        ## get prior
        prior, mask = load_or_compute_prior_and_mask(
            test,
            fold=fold,
            force_recompute=self.force_recompute_prior)
        seeds   = (-1)*mask
        
        ## load image
        file_name = config.dir_reg + test + 'gray.hdr'        
        logger.info('segmenting data: {}'.format(file_name))
        im      = io_analyze.load(file_name)
        file_gt = config.dir_reg + test + 'seg.hdr'
        seg     = io_analyze.load(file_gt)
        seg.flat[~np.in1d(seg, self.labelset)] = self.labelset[0]
        
           
        ## normalize image
        nim = im/np.std(im)
            
        ## init anchor_api
        anchor_api = MetaAnchor(
            prior=prior,
            prior_models=self.prior_models,
            prior_weights=self.prior_weights,
            image=nim,
            )
           
        ## start segmenting
        #import ipdb; ipdb.set_trace()
        sol,y = rwsegment.segment(
            nim, 
            anchor_api,
            seeds=seeds, 
            labelset=self.labelset, 
            weight_function=self.weight_function,
            **self.params
            )

        ## compute losses
        z = seg.ravel()==np.c_[self.labelset]
        flatmask = mask.ravel()*np.ones((len(self.labelset),1))
        
        ## loss 0 : 1 - Dice(y,z)
        loss0 = loss_functions.ideal_loss(z,y,mask=flatmask)
        logger.info('Tloss = {}'.format(loss0))
        
        ## loss2: squared difference with ztilde
        loss1 = loss_functions.anchor_loss(z,y,mask=flatmask)
        logger.info('SDloss = {}'.format(loss1))
        
        ## loss3: laplacian loss
        loss2 = loss_functions.laplacian_loss(z,y,mask=flatmask)
        logger.info('LAPloss = {}'.format(loss2))
 
        ## loss4: linear loss
        loss3 = loss_functions.linear_loss(z,y,mask=flatmask)
        logger.info('LINloss = {}'.format(loss3))
        
        ## compute Dice coefficient per label
        dice    = compute_dice_coef(sol, seg,labelset=self.labelset)
        logger.info('Dice: {}'.format(dice))
        
        if not config.debug:
            if fold is not None:
                test_name = 'f{}_{}'.format(fold[0][:2], test)
            else:
                test_name = test
            outdir = config.dir_seg + \
                '/{}/{}'.format(self.model_name,test_name)
            logger.info('saving data in: {}'.format(outdir))
            if not os.path.isdir(outdir):
                os.makedirs(outdir)
        
            f = open(outdir + 'losses.txt', 'w')
            f.write('ideal_loss\t{}\n'.format(loss0))
            f.write('anchor_loss\t{}\n'.format(loss1))
            f.write('laplacian_loss\t{}\n'.format(loss2))
            f.close()
            
            io_analyze.save(outdir + 'sol.hdr', sol.astype(np.int32)) 
            np.savetxt(
                outdir + 'dice.txt', np.c_[dice.keys(),dice.values()],fmt='%d %.8f')
    def compute_mean_segmentation(self, list):
        for test in list:
            file_gt = config.dir_reg + test + 'seg.hdr'
            seg     = io_analyze.load(file_gt)
            seg.flat[~np.in1d(seg, self.labelset)] = self.labelset[0]
           

            ## get prior
            prior, mask = load_or_compute_prior_and_mask(
                test,force_recompute=self.force_recompute_prior)
            mask = mask.astype(bool)            
           

            y = np.zeros((len(self.labelset),seg.size))
            y[:,0] = 1
            y.flat[prior['imask']] = prior['data']
 
            sol = np.zeros(seg.shape,dtype=np.int32)
            sol[mask] = self.labelset[np.argmax(prior['data'],axis=0)]

            ## compute losses
            z = seg.ravel()==np.c_[self.labelset]
            flatmask = mask.ravel()*np.ones((len(self.labelset),1))
 
            ## loss 0 : 1 - Dice(y,z)
            loss0 = loss_functions.ideal_loss(z,y,mask=flatmask)
            logger.info('Tloss = {}'.format(loss0))
            
            ## loss2: squared difference with ztilde
            #loss1 = loss_functions.anchor_loss(z,y,mask=flatmask)
            #logger.info('SDloss = {}'.format(loss1))
            
            ## loss3: laplacian loss
            #loss2 = loss_functions.laplacian_loss(z,y,mask=flatmask)
            #logger.info('LAPloss = {}'.format(loss2))
 
            ## loss4: linear loss
            #loss3 = loss_functions.linear_loss(z,y,mask=flatmask)
            #logger.info('LINloss = {}'.format(loss3))
            
            ## compute Dice coefficient per label
            dice    = compute_dice_coef(sol, seg,labelset=self.labelset)
            logger.info('Dice: {}'.format(dice))
            
            if not config.debug:
                outdir = config.dir_seg + \
                    '/{}/{}'.format('mean',test)
                logger.info('saving data in: {}'.format(outdir))
                if not os.path.isdir(outdir):
                    os.makedirs(outdir)
            
                #f = open(outdir + 'losses.txt', 'w')
                #f.write('ideal_loss\t{}\n'.format(loss0))
                #f.write('anchor_loss\t{}\n'.format(loss1))
                #f.write('laplacian_loss\t{}\n'.format(loss2))
                #f.close()
                
                io_analyze.save(outdir + 'sol.hdr', sol.astype(np.int32)) 

                np.savetxt(
                    outdir + 'dice.txt', np.c_[dice.keys(),dice.values()],fmt='%d %.8f')