def compute_losses(z,y,mask): ## loss 0 : 1 - Dice(y,z) loss0 = loss_functions.ideal_loss(z,y,mask=mask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(z,y,mask=mask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(z,y,mask=mask) logger.info('LAPloss = {}'.format(loss2)) ## loss4: linear loss loss3 = loss_functions.linear_loss(z,y,mask=mask) logger.info('LINloss = {}'.format(loss3)) return loss0, loss1, loss2, loss3
def compute_losses(z, y, mask): ## loss 0 : 1 - Dice(y,z) loss0 = loss_functions.ideal_loss(z, y, mask=mask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(z, y, mask=mask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(z, y, mask=mask) logger.info('LAPloss = {}'.format(loss2)) ## loss4: linear loss loss3 = loss_functions.linear_loss(z, y, mask=mask) logger.info('LINloss = {}'.format(loss3)) return loss0, loss1, loss2, loss3
def run_svm_inference(self,test,w, test_dir): logger.info('running inference on: {}'.format(test)) ## normalize w # w = w / np.sqrt(np.dot(w,w)) strw = ' '.join('{:.3}'.format(val) for val in np.asarray(w)*self.psi_scale) logger.debug('scaled w=[{}]'.format(strw)) weights_laplacians = np.asarray(w)[self.indices_laplacians] weights_laplacians_h = np.asarray(self.hand_tuned_w)[self.indices_laplacians] weights_priors = np.asarray(w)[self.indices_priors] weights_priors_h = np.asarray(self.hand_tuned_w)[self.indices_priors] ## segment test image with trained w ''' def meta_weight_functions(im,i,j,_w): data = 0 for iwf,wf in enumerate(self.laplacian_functions): _data = wf(im,i,j) data += _w[iwf]*_data return data weight_function = lambda im: meta_weight_functions(im,i,j,weights_laplacians) weight_function_h = lambda im: meta_weight_functions(im,i,j,weights_laplacians_h) ''' weight_function = MetaLaplacianFunction( weights_laplacians, self.laplacian_functions) weight_function_h = MetaLaplacianFunction( weights_laplacians_h, self.laplacian_functions) ## load images and ground truth file_seg = self.dir_reg + test + 'seg.hdr' file_im = self.dir_reg + test + 'gray.hdr' im = io_analyze.load(file_im) seg = io_analyze.load(file_seg) seg.flat[~np.in1d(seg.ravel(),self.labelset)] = self.labelset[0] nim = im/np.std(im) # normalize image by std ## test training data ? inference_train = True if inference_train: train_ims, train_segs, train_metas = self.training_set for tim, tz, tmeta in zip(train_ims, train_segs, train_metas): ## retrieve metadata islices = tmeta.pop('islices',None) imask = tmeta.pop('imask', None) iimask = tmeta.pop('iimask',None) if islices is not None: tseeds = self.seeds[islices] tprior = { 'data': np.asarray(self.prior['data'])[:,iimask], 'imask': imask, 'variance': np.asarray(self.prior['variance'])[:,iimask], 'labelset': self.labelset, } if 'intensity' in self.prior: tprior['intensity'] = self.prior['intensity'] else: tseeds = self.seeds tprior = self.prior ## prior tseg = self.labelset[np.argmax(tz, axis=0)].reshape(tim.shape) tanchor_api = MetaAnchor( tprior, self.prior_functions, weights_priors, image=tim, ) tsol,ty = rwsegment.segment( tim, tanchor_api, seeds=tseeds, weight_function=weight_function, **self.rwparams_inf ) ## compute Dice coefficient tdice = compute_dice_coef(tsol, tseg, labelset=self.labelset) logger.info('Dice coefficients for train: \n{}'.format(tdice)) nlabel = len(self.labelset) tflatmask = np.zeros(ty.shape, dtype=bool) tflatmask[:,imask] = True loss0 = loss_functions.ideal_loss(tz,ty,mask=tflatmask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(tz,ty,mask=tflatmask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(tz,ty,mask=tflatmask) logger.info('LAPloss = {}'.format(loss2)) tanchor_api_h = MetaAnchor( tprior, self.prior_functions, weights_priors_h, image=tim, ) tsol,ty = rwsegment.segment( tim, tanchor_api_h, seeds=tseeds, weight_function=weight_function_h, **self.rwparams_inf ) ## compute Dice coefficient tdice = compute_dice_coef(tsol, tseg, labelset=self.labelset) logger.info('Dice coefficients for train (hand-tuned): \n{}'.format(tdice)) loss0 = loss_functions.ideal_loss(tz,ty,mask=tflatmask) logger.info('Tloss (hand-tuned) = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(tz,ty,mask=tflatmask) logger.info('SDloss (hand-tuned) = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(tz,ty,mask=tflatmask) logger.info('LAPloss (hand-tuned) = {}'.format(loss2)) break ## prior anchor_api = MetaAnchor( self.prior, self.prior_functions, weights_priors, image=nim, ) sol,y = rwsegment.segment( nim, anchor_api, seeds=self.seeds, weight_function=weight_function, **self.rwparams_inf ) ## compute Dice coefficient dice = compute_dice_coef(sol, seg,labelset=self.labelset) logger.info('Dice coefficients: \n{}'.format(dice)) ## objective en_rw = rwsegment.energy_rw( nim, y, seeds=self.seeds,weight_function=weight_function, **self.rwparams_inf) en_anchor = rwsegment.energy_anchor( nim, y, anchor_api, seeds=self.seeds, **self.rwparams_inf) obj = en_rw + en_anchor logger.info('Objective = {:.3}'.format(obj)) ## compute losses z = seg.ravel()==np.c_[self.labelset] mask = self.seeds < 0 flatmask = mask.ravel()*np.ones((len(self.labelset),1)) ## loss 0 : 1 - Dice(y,z) loss0 = loss_functions.ideal_loss(z,y,mask=flatmask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(z,y,mask=flatmask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(z,y,mask=flatmask) logger.info('LAPloss = {}'.format(loss2)) ## loss4: linear loss loss3 = loss_functions.linear_loss(z,y,mask=flatmask) logger.info('LINloss = {}'.format(loss3)) ## saving if self.debug: pass elif self.isroot: outdir = self.dir_inf + test_dir logger.info('saving data in: {}'.format(outdir)) if not os.path.isdir(outdir): os.makedirs(outdir) #io_analyze.save(outdir + 'im.hdr',im.astype(np.int32)) #np.save(outdir + 'y.npy',y) #io_analyze.save(outdir + 'sol.hdr',sol.astype(np.int32)) np.savetxt(outdir + 'objective.txt', [obj]) np.savetxt( outdir + 'dice.txt', np.c_[dice.keys(),dice.values()],fmt='%d %f') f = open(outdir + 'losses.txt', 'w') f.write('ideal_loss\t{}\n'.format(loss0)) f.write('anchor_loss\t{}\n'.format(loss1)) f.write('laplacian_loss\t{}\n'.format(loss2)) f.close()
def run_svm_inference(self, test, w, test_dir): logger.info('running inference on: {}'.format(test)) ## normalize w # w = w / np.sqrt(np.dot(w,w)) strw = ' '.join('{:.3}'.format(val) for val in np.asarray(w) * self.psi_scale) logger.debug('scaled w=[{}]'.format(strw)) weights_laplacians = np.asarray(w)[self.indices_laplacians] weights_laplacians_h = np.asarray( self.hand_tuned_w)[self.indices_laplacians] weights_priors = np.asarray(w)[self.indices_priors] weights_priors_h = np.asarray(self.hand_tuned_w)[self.indices_priors] ## segment test image with trained w ''' def meta_weight_functions(im,i,j,_w): data = 0 for iwf,wf in enumerate(self.laplacian_functions): _data = wf(im,i,j) data += _w[iwf]*_data return data weight_function = lambda im: meta_weight_functions(im,i,j,weights_laplacians) weight_function_h = lambda im: meta_weight_functions(im,i,j,weights_laplacians_h) ''' weight_function = MetaLaplacianFunction(weights_laplacians, self.laplacian_functions) weight_function_h = MetaLaplacianFunction(weights_laplacians_h, self.laplacian_functions) ## load images and ground truth file_seg = self.dir_reg + test + 'seg.hdr' file_im = self.dir_reg + test + 'gray.hdr' im = io_analyze.load(file_im) seg = io_analyze.load(file_seg) seg.flat[~np.in1d(seg.ravel(), self.labelset)] = self.labelset[0] nim = im / np.std(im) # normalize image by std ## test training data ? inference_train = True if inference_train: train_ims, train_segs, train_metas = self.training_set for tim, tz, tmeta in zip(train_ims, train_segs, train_metas): ## retrieve metadata islices = tmeta.pop('islices', None) imask = tmeta.pop('imask', None) iimask = tmeta.pop('iimask', None) if islices is not None: tseeds = self.seeds[islices] tprior = { 'data': np.asarray(self.prior['data'])[:, iimask], 'imask': imask, 'variance': np.asarray(self.prior['variance'])[:, iimask], 'labelset': self.labelset, } if 'intensity' in self.prior: tprior['intensity'] = self.prior['intensity'] else: tseeds = self.seeds tprior = self.prior ## prior tseg = self.labelset[np.argmax(tz, axis=0)].reshape(tim.shape) tanchor_api = MetaAnchor( tprior, self.prior_functions, weights_priors, image=tim, ) tsol, ty = rwsegment.segment(tim, tanchor_api, seeds=tseeds, weight_function=weight_function, **self.rwparams_inf) ## compute Dice coefficient tdice = compute_dice_coef(tsol, tseg, labelset=self.labelset) logger.info('Dice coefficients for train: \n{}'.format(tdice)) nlabel = len(self.labelset) tflatmask = np.zeros(ty.shape, dtype=bool) tflatmask[:, imask] = True loss0 = loss_functions.ideal_loss(tz, ty, mask=tflatmask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(tz, ty, mask=tflatmask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(tz, ty, mask=tflatmask) logger.info('LAPloss = {}'.format(loss2)) tanchor_api_h = MetaAnchor( tprior, self.prior_functions, weights_priors_h, image=tim, ) tsol, ty = rwsegment.segment(tim, tanchor_api_h, seeds=tseeds, weight_function=weight_function_h, **self.rwparams_inf) ## compute Dice coefficient tdice = compute_dice_coef(tsol, tseg, labelset=self.labelset) logger.info( 'Dice coefficients for train (hand-tuned): \n{}'.format( tdice)) loss0 = loss_functions.ideal_loss(tz, ty, mask=tflatmask) logger.info('Tloss (hand-tuned) = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(tz, ty, mask=tflatmask) logger.info('SDloss (hand-tuned) = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(tz, ty, mask=tflatmask) logger.info('LAPloss (hand-tuned) = {}'.format(loss2)) break ## prior anchor_api = MetaAnchor( self.prior, self.prior_functions, weights_priors, image=nim, ) sol, y = rwsegment.segment(nim, anchor_api, seeds=self.seeds, weight_function=weight_function, **self.rwparams_inf) ## compute Dice coefficient dice = compute_dice_coef(sol, seg, labelset=self.labelset) logger.info('Dice coefficients: \n{}'.format(dice)) ## objective en_rw = rwsegment.energy_rw(nim, y, seeds=self.seeds, weight_function=weight_function, **self.rwparams_inf) en_anchor = rwsegment.energy_anchor(nim, y, anchor_api, seeds=self.seeds, **self.rwparams_inf) obj = en_rw + en_anchor logger.info('Objective = {:.3}'.format(obj)) ## compute losses z = seg.ravel() == np.c_[self.labelset] mask = self.seeds < 0 flatmask = mask.ravel() * np.ones((len(self.labelset), 1)) ## loss 0 : 1 - Dice(y,z) loss0 = loss_functions.ideal_loss(z, y, mask=flatmask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(z, y, mask=flatmask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(z, y, mask=flatmask) logger.info('LAPloss = {}'.format(loss2)) ## loss4: linear loss loss3 = loss_functions.linear_loss(z, y, mask=flatmask) logger.info('LINloss = {}'.format(loss3)) ## saving if self.debug: pass elif self.isroot: outdir = self.dir_inf + test_dir logger.info('saving data in: {}'.format(outdir)) if not os.path.isdir(outdir): os.makedirs(outdir) #io_analyze.save(outdir + 'im.hdr',im.astype(np.int32)) #np.save(outdir + 'y.npy',y) #io_analyze.save(outdir + 'sol.hdr',sol.astype(np.int32)) np.savetxt(outdir + 'objective.txt', [obj]) np.savetxt(outdir + 'dice.txt', np.c_[dice.keys(), dice.values()], fmt='%d %f') f = open(outdir + 'losses.txt', 'w') f.write('ideal_loss\t{}\n'.format(loss0)) f.write('anchor_loss\t{}\n'.format(loss1)) f.write('laplacian_loss\t{}\n'.format(loss2)) f.close()
def process_sample(self,test,fold=None): ## get prior prior, mask = load_or_compute_prior_and_mask( test, fold=fold, force_recompute=self.force_recompute_prior) seeds = (-1)*mask ## load image file_name = config.dir_reg + test + 'gray.hdr' logger.info('segmenting data: {}'.format(file_name)) im = io_analyze.load(file_name) file_gt = config.dir_reg + test + 'seg.hdr' seg = io_analyze.load(file_gt) seg.flat[~np.in1d(seg, self.labelset)] = self.labelset[0] ## normalize image nim = im/np.std(im) ## init anchor_api anchor_api = MetaAnchor( prior=prior, prior_models=self.prior_models, prior_weights=self.prior_weights, image=nim, ) ## start segmenting #import ipdb; ipdb.set_trace() sol,y = rwsegment.segment( nim, anchor_api, seeds=seeds, labelset=self.labelset, weight_function=self.weight_function, **self.params ) ## compute losses z = seg.ravel()==np.c_[self.labelset] flatmask = mask.ravel()*np.ones((len(self.labelset),1)) ## loss 0 : 1 - Dice(y,z) loss0 = loss_functions.ideal_loss(z,y,mask=flatmask) logger.info('Tloss = {}'.format(loss0)) ## loss2: squared difference with ztilde loss1 = loss_functions.anchor_loss(z,y,mask=flatmask) logger.info('SDloss = {}'.format(loss1)) ## loss3: laplacian loss loss2 = loss_functions.laplacian_loss(z,y,mask=flatmask) logger.info('LAPloss = {}'.format(loss2)) ## loss4: linear loss loss3 = loss_functions.linear_loss(z,y,mask=flatmask) logger.info('LINloss = {}'.format(loss3)) ## compute Dice coefficient per label dice = compute_dice_coef(sol, seg,labelset=self.labelset) logger.info('Dice: {}'.format(dice)) if not config.debug: if fold is not None: test_name = 'f{}_{}'.format(fold[0][:2], test) else: test_name = test outdir = config.dir_seg + \ '/{}/{}'.format(self.model_name,test_name) logger.info('saving data in: {}'.format(outdir)) if not os.path.isdir(outdir): os.makedirs(outdir) f = open(outdir + 'losses.txt', 'w') f.write('ideal_loss\t{}\n'.format(loss0)) f.write('anchor_loss\t{}\n'.format(loss1)) f.write('laplacian_loss\t{}\n'.format(loss2)) f.close() io_analyze.save(outdir + 'sol.hdr', sol.astype(np.int32)) np.savetxt( outdir + 'dice.txt', np.c_[dice.keys(),dice.values()],fmt='%d %.8f')