Ejemplo n.º 1
0
def train_irs(data_list, dataset):
    logthis("Train LGG IRS Started")
    irs = IntensityRangeStandardization()
    imgcnt = len(data_list[hl[dataset]]) // 5
    for i in range(imgcnt):
        for mod in range(MODCNT):
            curimg = get_img(ORIG_READ_PATH, data_list[hl[dataset]][i, mod])
            irs = irs.train([curimg[curimg > 0]])
        print("\rIRS Train", i + 1, "/", imgcnt, end="")
    with open(os.path.join(WRITE_PATH, "intensitymodel.txt"), 'wb') as f:
        pickle.dump(irs, f)
def _percentilemodelstandardisation(trainingfiles, brainmaskfiles, destfiles, destmodel):
    r"""
    Train an intensity standardisation model and apply it. All values outside of the
    brain mask are set to zero.
    
    Parameters
    ----------
    trainingfiles : sequence of strings
        All images to use for training and to which subsequently apply the trained model.
    brainmaskfiles : sequence of strings
        The brain masks corresponding to ``trainingfiles``.
    destfiles : sequence of strings
        The intensity standarised target locations corresponding to ``trainingfiles``.
    destmodel : string
        The target model location.
    """
    # check arguments
    if not len(trainingfiles) == len(brainmaskfiles):
        raise ValueError('The number of supplied trainingfiles must be equal to the number of brainmaskfiles.')
    elif not len(trainingfiles) == len(destfiles):
        raise ValueError('The number of supplied trainingfiles must be equal to the number of destfiles.')
    
    # loading input images (as image, header pairs)
    images = []
    headers = []
    for image_name in trainingfiles:
        i, h = load(image_name)
        images.append(i)
        headers.append(h)
        
    # loading brainmasks
    masks = [load(mask_name)[0].astype(numpy.bool) for mask_name in brainmaskfiles]
        
    # train the model
    irs = IntensityRangeStandardization()
    trained_model, transformed_images = irs.train_transform([i[m] for i, m in zip(images, masks)])
    
    # condense outliers in the image (extreme peak values at both end-points of the histogram)
    transformed_images = [_condense(i) for i in transformed_images]
    
    # saving the model
    with open(destmodel, 'wb') as f:
        pickle.dump(trained_model, f)
    
    # save the transformed images
    for ti, i, m, h, dest in zip(transformed_images, images, masks, headers, destfiles):
        i[~m] = 0
        i[m] = ti
        save(i, dest, h)
def main():
    args = getArguments(getParser())

    # prepare logger
    logger = Logger.getInstance()
    if args.debug: logger.setLevel(logging.DEBUG)
    elif args.verbose: logger.setLevel(logging.INFO)
    
    # loading input images (as image, header pairs)
    images = []
    headers = []
    for image_name in args.images:
        i, h = load(image_name)
        images.append(i)
        headers.append(h)
    
    # loading binary foreground masks if supplied, else create masks from threshold value
    if args.masks:
        masks = [load(mask_name)[0].astype(numpy.bool) for mask_name in args.masks]
    else:
        masks = [i > args.threshold for i in images]
    
    # if in application mode, load the supplied model and apply it to the images
    if args.lmodel:
        logger.info('Loading the model and transforming images...')
        with open(args.lmodel, 'r') as f:
            trained_model = pickle.load(f)
            if not isinstance(trained_model, IntensityRangeStandardization):
                raise ArgumentError('{} does not seem to be a valid pickled instance of an IntensityRangeStandardization object'.format(args.lmodel))
            transformed_images = [trained_model.transform(i[m], surpress_mapping_check = args.ignore) for i, m in zip(images, masks)]
            
    # in in training mode, train the model, apply it to the images and save it
    else:
        logger.info('Training the average intensity model...')
        irs = IntensityRangeStandardization()
        trained_model, transformed_images = irs.train_transform([i[m] for i, m in zip(images, masks)], surpress_mapping_check = args.ignore)
        logger.info('Saving the trained model as {}...'.format(args.smodel))
        with open(args.smodel, 'wb') as f:
                pickle.dump(trained_model, f)
                
    # save the transformed images
    if args.simages:
        logger.info('Saving intensity transformed images to {}...'.format(args.simages))
        for ti, i, m, h, image_name in zip(transformed_images, images, masks, headers, args.images):
            i[m] = ti
            save(i, '{}/{}'.format(args.simages, image_name.split('/')[-1]), h, args.force)
    
    logger.info('Terminated.')
def main():
    args = getArguments(getParser())

    # prepare logger
    logger = Logger.getInstance()
    if args.debug: logger.setLevel(logging.DEBUG)
    elif args.verbose: logger.setLevel(logging.INFO)
    
    # loading input images (as image, header pairs)
    images = []
    headers = []
    for image_name in args.images:
        i, h = load(image_name)
        images.append(i)
        headers.append(h)
    
    # loading binary foreground masks if supplied, else create masks from threshold value
    if args.masks:
        masks = [load(mask_name)[0].astype(numpy.bool) for mask_name in args.masks]
    else:
        masks = [i > args.threshold for i in images]
    
    # if in application mode, load the supplied model and apply it to the images
    if args.lmodel:
        logger.info('Loading the model and transforming images...')
        with open(args.lmodel, 'r') as f:
            trained_model = pickle.load(f)
            if not isinstance(trained_model, IntensityRangeStandardization):
                raise ArgumentError('{} does not seem to be a valid pickled instance of an IntensityRangeStandardization object'.format(args.lmodel))
            transformed_images = [trained_model.transform(i[m], surpress_mapping_check = args.ignore) for i, m in zip(images, masks)]
            
    # in in training mode, train the model, apply it to the images and save it
    else:
        logger.info('Training the average intensity model...')
        irs = IntensityRangeStandardization()
        trained_model, transformed_images = irs.train_transform([i[m] for i, m in zip(images, masks)], surpress_mapping_check = args.ignore)
        logger.info('Saving the trained model as {}...'.format(args.smodel))
        with open(args.smodel, 'wb') as f:
                pickle.dump(trained_model, f)
                
    # save the transformed images
    if args.simages:
        logger.info('Saving intensity transformed images to {}...'.format(args.simages))
        for ti, i, m, h, image_name in zip(transformed_images, images, masks, headers, args.images):
            i[m] = ti
            save(i, '{}/{}'.format(args.simages, image_name.split('/')[-1]), h, args.force)
    
    logger.info('Terminated.')
Ejemplo n.º 5
0
def train_IRS():
    hl_data = np.memmap(filename=HDD_OUTPUT_PATH + "hl_orig.dat",
                        dtype=np.float32,
                        mode="r").reshape(-1, SHAPE[0], SHAPE[1], SHAPE[2],
                                          SHAPE[3])

    logthis("HL IRS training started!")
    irs = IntensityRangeStandardization()
    for cur_cnt in range(hl_data.shape[0]):
        for mod_cnt in range(MOD_CNT):
            curmod = hl_data[cur_cnt, ..., mod_cnt]
            irs = irs.train([curmod[curmod > 0]])
        print("\rHL", cur_cnt, end="")
    with open(os.path.join(HDD_OUTPUT_PATH, "hl_irs.dat"), 'wb') as f:
        pickle.dump(irs, f)
    logthis("HL IRS training ended!")
 def test_ValidInitializationCases(self):
     """Test valid initialization cases."""
     IntensityRangeStandardization()
     IntensityRangeStandardization(landmarkp = IntensityRangeStandardization.L2)
     IntensityRangeStandardization(landmarkp = IntensityRangeStandardization.L3)
     IntensityRangeStandardization(landmarkp = IntensityRangeStandardization.L4)
     IntensityRangeStandardization(landmarkp = (50,))
     IntensityRangeStandardization(landmarkp = [50])
     IntensityRangeStandardization(landmarkp = numpy.asarray([50]))
Ejemplo n.º 7
0
def get_trained_irs(
    data_list,
    output_path,
    cutoffp=(1, 20),
    landmarkp=[2, 3, 4, 5, 6, 8, 10, 12, 14, 15, 16, 17, 18, 19]
):  # Default : cutoffp = (1, 99), landmarkp = [10, 20, 30, 40, 50, 60, 70, 90]
    flair_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                              landmarkp=landmarkp)
    t1_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                           landmarkp=landmarkp)
    t1c_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                            landmarkp=landmarkp)
    t2_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                           landmarkp=landmarkp)
    irs_list = [flair_irs, t1_irs, t1c_irs, t2_irs]
    for dataClass in ['HGG', 'LGG', 'TEST']:
        mods = MODS[dataClass]
        total = len(data_list[DATATYPE[dataClass]]) // mods
        if mods == 5:
            mods = 4
        fp = np.memmap(output_path + dataClass + '_orig.dat',
                       dtype=np.float32,
                       mode='r',
                       shape=(total, mods, SHAPE[0], SHAPE[1], SHAPE[2]))
        print(get_time() + ': %s StandardIntensityModel training STARTED' %
              (dataClass))
        for mod in range(mods):
            images = fp[:, mod, :, :, :]
            irs_list[mod] = irs_train_image(images,
                                            irs_list[mod],
                                            train_mode=True)
        print(get_time() + ': %s StandardIntensityModel training ENDED' %
              (dataClass))
    with open(output_path + 'Flair_irs.pkl', 'wb') as f1:
        pickle.dump(irs_list[0], f1)
    with open(output_path + 'T1_irs.pkl', 'wb') as f2:
        pickle.dump(irs_list[1], f2)
    with open(output_path + 'T1c_irs.pkl', 'wb') as f3:
        pickle.dump(irs_list[2], f3)
    with open(output_path + 'T2_irs.pkl', 'wb') as f4:
        pickle.dump(irs_list[3], f4)
 def test_Method(self):
     """Test the normal functioning of the method."""
     # test training with good and bad images
     irs = IntensityRangeStandardization()
     irs.train(TestIntensityRangeStandardization.good_trainingset + [TestIntensityRangeStandardization.bad_image])
     irs.transform(TestIntensityRangeStandardization.bad_image)
     
     # test equal methods
     irs = IntensityRangeStandardization()
     irs_ = irs.train(TestIntensityRangeStandardization.good_trainingset)
     self.assertEqual(irs, irs_)
     
     irs = IntensityRangeStandardization()
     irs.train(TestIntensityRangeStandardization.good_trainingset)
     timages = []
     for i in TestIntensityRangeStandardization.good_trainingset:
         timages.append(irs.transform(i))
         
     irs = IntensityRangeStandardization()
     irs_, timages_ = irs.train_transform(TestIntensityRangeStandardization.good_trainingset)
     
     self.assertEqual(irs, irs_, 'instance returned by transform() method is not the same as the once initialized')
     for ti, ti_ in zip(timages, timages_):
         numpy.testing.assert_allclose(ti, ti_, err_msg = 'train_transform() failed to produce the same results as transform()')
         
     
     # test pickling
     irs = IntensityRangeStandardization()
     irs_ = irs.train(TestIntensityRangeStandardization.good_trainingset)
     timages = []
     for i in TestIntensityRangeStandardization.good_trainingset:
         timages.append(irs.transform(i))
         
     with tempfile.TemporaryFile() as f:
         pickle.dump(irs, f)
         f.seek(0, 0)
         irs_ = pickle.load(f)
         
     timages_ = []
     for i in TestIntensityRangeStandardization.good_trainingset:
         timages_.append(irs_.transform(i))
         
     for ti, ti_ in zip(timages, timages_):
         numpy.testing.assert_allclose(ti, ti_, err_msg = 'pickling failed to preserve the instances model')     
 def test_MethodLimits(self):
     """Test the limits of the method."""   
     irs = IntensityRangeStandardization()
     irs.train(TestIntensityRangeStandardization.good_trainingset)
     self.assertRaises(InformationLossException, irs.transform, image = TestIntensityRangeStandardization.bad_image)
     
     irs = IntensityRangeStandardization()
     irs.train(TestIntensityRangeStandardization.good_trainingset)
     self.assertRaises(SingleIntensityAccumulationError, irs.transform, image = TestIntensityRangeStandardization.uniform_image)
     
     irs = IntensityRangeStandardization()
     irs.train(TestIntensityRangeStandardization.good_trainingset)
     self.assertRaises(SingleIntensityAccumulationError, irs.transform, image = TestIntensityRangeStandardization.single_intensity_image)
     
     irs = IntensityRangeStandardization()
     self.assertRaises(SingleIntensityAccumulationError, irs.train, images = [TestIntensityRangeStandardization.uniform_image] * 10)
     
     irs = IntensityRangeStandardization()
     self.assertRaises(SingleIntensityAccumulationError, irs.train, images = [TestIntensityRangeStandardization.single_intensity_image] * 10)
 def test_InvalidUseCases(self):
     """Test invalid use-cases."""
     irs = IntensityRangeStandardization()
     self.assertRaises(UntrainedException, irs.transform, image = TestIntensityRangeStandardization.good_image)
Ejemplo n.º 11
0
def get_trained_irs(
    data_list,
    output_path,
    cutoffp=(1, 20),
    landmarkp=[2, 3, 4, 5, 6, 8, 10, 12, 14, 15, 16, 17, 18, 19]
):  # Default : cutoffp = (1, 99), landmarkp = [10, 20, 30, 40, 50, 60, 70, 90]
    flair_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                              landmarkp=landmarkp)
    t1_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                           landmarkp=landmarkp)
    t1c_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                            landmarkp=landmarkp)
    t2_irs = IntensityRangeStandardization(cutoffp=cutoffp,
                                           landmarkp=landmarkp)

    for dataClass in ['HGG', 'LGG', 'TEST']:
        mods = MODS[dataClass]
        total = len(data_list[DATATYPE[dataClass]]) // mods
        if mods == 5:  # OT data는 별도의 전처리과정을 필요로 하지 않으므로, 따리 관리하도록 한다.
            mods = 4
        fp = np.memmap(output_path + dataClass + '_orig.dat',
                       dtype=np.float32,
                       mode='r',
                       shape=(total, mods, SHAPE[0], SHAPE[1], SHAPE[2]))
        print('\r',
              get_time() + ': training irs with {} images'.format(dataClass))
        # 이미 사전에 학습된 표준강도공간이 존재하면 이를 불러와서 계속해서 학습한다.
        for mod in range(mods):
            images = fp[:, mod, :, :, :]
            if mod == MOD['MR_Flair']:
                flair_irs = flair_irs.train([images[images > 0]])
            elif mod == MOD['MR_T1']:
                t1_irs = t1_irs.train([images[images > 0]])
            elif mod == MOD['MR_T1c']:
                t1c_irs = t1c_irs.train([images[images > 0]])
            elif mod == MOD['MR_T2']:
                t2_irs = t2_irs.train([images[images > 0]])
    with open(output_path + 'Flair_irs.pkl', 'wb') as f1:
        pickle.dump(flair_irs, f1)
    with open(output_path + 'T1_irs.pkl', 'wb') as f2:
        pickle.dump(t1_irs, f2)
    with open(output_path + 'T1c_irs.pkl', 'wb') as f3:
        pickle.dump(t1c_irs, f3)
    with open(output_path + 'T2_irs.pkl', 'wb') as f4:
        pickle.dump(t2_irs, f4)