Esempio n. 1
0
    def __init__(self, conf):

        # -----------   define model --------------- #
        build_model = PreBuildConverter(in_channels=1, out_classes=5, add_soft_max=True, pretrained=conf.pre_train)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        self.milestones = conf.milestones
        self.writer = SummaryWriter(logdir=conf.log_path)
        self.step = 0
        self.epoch = 0
        print('two model heads generated')

        self.get_opt(conf)
        """
        paras_only_bn = []
        paras_wo_bn = []
        for model in self.models:
            paras_only_bn_, paras_wo_bn_ = separate_bn_paras(model)
            paras_only_bn.append(paras_only_bn_)
            paras_wo_bn.append(paras_wo_bn_)

        self.optimizer = optim.SGD([
                                       {'params': paras_wo_bn[model_num],
                                        'weight_decay': 5e-4}
                                       for model_num in range(conf.n_models)
                                   ] + [
                                       {'params': paras_only_bn[model_num]}
                                       for model_num in range(conf.n_models)
                                   ], lr=conf.lr, momentum=conf.momentum)
        """
        print(self.optimizer)

        # ------------  define loaders -------------- #

        self.loader = CBIS_Dataloader(n_patches=2, conf=conf,
                                      og_resize=(1152, 896), patch_size=225, roi_sampling_ratio=.5)

        self.eval_loader = CBIS_Dataloader(n_patches=2, conf=conf,
                                      og_resize=(1152, 896), patch_size=225, roi_sampling_ratio=.5)

        print('optimizers generated')
        self.running_loss = 0.
        self.running_pearson_loss = 0.
        self.running_ensemble_loss = 0.

        self.board_loss_every = max(self.loader.train_len // 10, 1)
        self.evaluate_every = conf.evaluate_every
        self.save_every = max(conf.epoch_per_save, 1)
        assert self.save_every >= self.evaluate_every
Esempio n. 2
0
    def __init__(self, conf, inference=False):

        # -----------   define model --------------- #
        self.n_classes = 10
        build_model = PreBuildConverter(in_channels=3, out_classes=self.n_classes, add_func=True, softmax=True,
                                        pretrained=conf.pre_train)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        if not inference:
            self.milestones = conf.milestones
            if not os.path.exists(conf.log_path):
                os.mkdir(conf.log_path)
            if not os.path.exists(conf.save_path):
                os.mkdir(conf.save_path)
            self.writer = SummaryWriter(logdir=conf.log_path)
            self.step = 0
            self.epoch = 0
            print('two model heads generated')

            self.get_opt(conf)
            print(self.optimizer)

        # ------------  define loaders -------------- #

        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.ds = datasets.CIFAR10('data')
        self.ds.transform = trans.ToTensor()
        self.loader = DataLoader(self.ds, **dloader_args)
        eval_sampler = RandomSampler(self.ds, replacement=True, num_samples=len(self.ds) // 10)
        self.eval_loader = DataLoader(self.ds, sampler=eval_sampler, **dloader_args)

        self.cka_loss = conf.cka_loss(self.models, conf.cka_layers) if conf.cka else None
        if not inference:
            print('optimizers generated')
            self.running_loss = 0.
            self.running_pearson_loss = 0.
            self.running_ensemble_loss = 0.
            self.running_cka_loss = 0.

            self.board_loss_every = max(len(self.loader) // 4, 1)
            self.evaluate_every = conf.epoch_per_eval
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every
Esempio n. 3
0
    def __init__(self,
                 model_path_list=None,
                 n_classes=0,
                 force=0,
                 dst_trans=None,
                 lr=0.01,
                 iterations=300):
        # model skeleton
        self.devices = 3  #[0,1,2,3]
        build_model = PreBuildConverter(in_channels=3,
                                        out_classes=n_classes,
                                        add_func=True,
                                        softmax=False,
                                        pretrained=False)

        # load weights
        def load_fix(target_path):
            a = torch.load(
                target_path,
                map_location=lambda storage, loc: storage.cuda(self.devices))
            fixed_a = {k.split('module.')[-1]: a[k] for k in a}
            torch.save(fixed_a, target_path)

        self.models = []
        for target_path in model_path_list:
            model = build_model.get_by_str('densenet121').cuda(self.devices)
            load_fix(target_path)
            model.load_state_dict(torch.load(target_path))
            model.train(mode=False)
            self.models.append(model)

        # process params
        self.force_regeneration = force
        self.dst_trans = dst_trans
        self.lr = lr
        self.it_num = iterations
Esempio n. 4
0
    def __init__(self, conf, inference=False):

        # ------------  define dataset -------------- #
        if conf.dat_mode == 'nih':
            self.ds_train, self.ds_test = get_nih()
        if conf.dat_mode == 'isic':
            dat_args = {'ood': conf.ood, 'with_rank': conf.rank}
            self.ds_train, self.ds_test = get_isic(**dat_args)
        elif conf.dat_mode.lower() == 'chexpert':
            chexpert_args = {
                'No_finding': conf.use_clean,
                'parenchymal': conf.use_int,
                'extraparenchymal': conf.use_ext,
                'limit_out_labels': conf.ood_limit,
                'with_rank': conf.rank
            }
            self.ds_train, self.ds_test, self.out_in_ds, self.out_out_ds = get_chexpert(
                **chexpert_args)
            self.ds_morph = get_chexpert(morph_load=conf.morph,
                                         **chexpert_args)
        else:
            raise ValueError('no such dataset')

        self.n_classes = len(self.ds_train.label_names)

        # ------------  define loaders -------------- #
        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.loader = DataLoader(self.ds_train, **dloader_args)
        eval_sampler = RandomSampler(self.ds_test,
                                     replacement=True,
                                     num_samples=len(self.ds_test) // 5)
        self.eval_loader = DataLoader(self.ds_test,
                                      sampler=eval_sampler,
                                      **dloader_args)
        self.morph_loader = [] if not conf.morph else DataLoader(
            self.ds_morph, **dloader_args)

        # -----------   define models --------------- #
        build_model = PreBuildConverter(
            in_channels=3,
            out_classes=self.n_classes,
            add_rank=conf.rank,  #add_func=True, softmax=False,  # sigmoid
            rank_out_features=None
            if not conf.rank else self.ds_train.n_rank_labels,
            pretrained=conf.pre_train,
            cat=conf.cat)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(
                build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        if not inference:
            self.milestones = conf.milestones
            if not os.path.exists(conf.log_path):
                os.mkdir(conf.log_path)
            if not os.path.exists(conf.save_path):
                os.mkdir(conf.save_path)

            self.writer = SummaryWriter(logdir=conf.log_path)
            if conf.dat_mode.lower() == 'chexpert':
                tables = [
                    self.ds_train.table, self.ds_test.table,
                    None if self.out_in_ds is None else self.out_in_ds.table,
                    self.out_out_ds.table
                ]
                names = ['ds_train', 'ds_test', 'out_in', 'out_out']
                for name, table in zip(names, tables):
                    if table is None:
                        continue
                    table.to_csv(os.path.join(conf.save_path, name))

            self.step = 0
            self.epoch = 0

            self.get_opt(conf)
            print(self.optimizer)
            print('optimizers generated')

            # ------------  define loss -------------- #
            self.cka_loss = conf.cka_loss(
                self.models, conf.cka_layers) if conf.cka else None
            self.running_loss = 0.
            self.running_pearson_loss = 0.
            self.running_ensemble_loss = 0.
            self.running_cka_loss = 0.
            self.running_ncl_loss = 0.
            self.running_morph_loss = 0.
            self.running_rank_loss = 0.
            self.running_rank_pearson_loss = 0.

            # ------------  define save/log times -------------- #
            self.board_loss_every = max(len(self.loader) // 4, 1)
            self.evaluate_every = conf.epoch_per_eval
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every
Esempio n. 5
0
    def __init__(self, conf, inference=False):

        # -----------   define model --------------- #
        self.n_classes = conf.n_shapes * conf.n_colors
        if conf.shape_only:
            self.n_classes = conf.n_shapes
        elif conf.color_only:
            self.n_classes = conf.n_colors
        self.n_classes = self.n_classes + (0 if conf.no_bkg else 1)

        build_model = PreBuildConverter(in_channels=3,
                                        out_classes=self.n_classes,
                                        add_soft_max=True,
                                        pretrained=conf.pre_train,
                                        half=conf.half)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(
                build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        if not inference:
            self.milestones = conf.milestones
            if not os.path.exists(conf.log_path):
                os.mkdir(conf.log_path)
            if not os.path.exists(conf.save_path):
                os.mkdir(conf.save_path)
            self.writer = SummaryWriter(logdir=conf.log_path)
            self.step = 0
            self.epoch = 0
            print('two model heads generated')

            self.get_opt(conf)
            print(self.optimizer)

        # ------------  define loaders -------------- #

        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.ds = ShapeDataSet(prob_bkg=conf.bkg_prob,
                               no_bkg=conf.no_bkg,
                               shape_only=conf.shape_only,
                               color_only=conf.color_only,
                               n_shapes=conf.n_shapes,
                               n_colors=conf.n_colors)
        self.loader = DataLoader(self.ds, **dloader_args)
        eval_sampler = RandomSampler(self.ds,
                                     replacement=True,
                                     num_samples=len(self.ds) // 10)
        self.eval_loader = DataLoader(self.ds,
                                      sampler=eval_sampler,
                                      **dloader_args)

        if not inference:
            print('optimizers generated')
            self.running_loss = 0.
            self.running_pearson_loss = 0.
            self.running_ensemble_loss = 0.

            self.board_loss_every = max(len(self.loader) // 4, 1)
            self.evaluate_every = conf.evaluate_every
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every
Esempio n. 6
0
    def __init__(self, conf, inference=False):
        print(conf)

        # -----------   define model --------------- #
        build_model = PreBuildConverter(in_channels=1,
                                        out_classes=2,
                                        add_soft_max=True)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(
                build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define loaders -------------- #
        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,  # check that it fits in mem
            'shuffle': True
        }

        grey_loader = partial(cv2.imread, flags=cv2.IMREAD_GRAYSCALE)
        file_ext = ('.png', )
        im_trans = conf.im_transform
        self.dataset = DatasetFolder(conf.train_folder,
                                     extensions=file_ext,
                                     loader=grey_loader,
                                     transform=im_trans)
        self.train_loader = DataLoader(self.dataset, **dloader_args)

        self.test_ds = DatasetFolder(conf.test_folder,
                                     extensions=file_ext,
                                     loader=grey_loader,
                                     transform=im_trans)
        self.test_loader = DataLoader(self.test_ds, **dloader_args)

        if conf.morph_dir:
            self.morph_ds = DatasetFolder(conf.morph_dir,
                                          extensions=file_ext,
                                          loader=grey_loader,
                                          transform=im_trans)
            self.morph_loader = DataLoader(self.morph_ds, **dloader_args)
        else:
            self.morph_loader = []

        # ------------  define params -------------- #
        self.inference = inference
        if not inference:
            self.milestones = conf.milestones
            self.writer = SummaryWriter(conf.log_path)
            self.step = 0
            print('two model heads generated')

            paras_only_bn = []
            paras_wo_bn = []
            for model in self.models:
                paras_only_bn_, paras_wo_bn_ = separate_bn_paras(model)
                paras_only_bn.append(paras_only_bn_)
                paras_wo_bn.append(paras_wo_bn_)

            self.optimizer = optim.SGD([{
                'params': paras_wo_bn[model_num],
                'weight_decay': 5e-4
            } for model_num in range(conf.n_models)] +
                                       [{
                                           'params': paras_only_bn[model_num]
                                       }
                                        for model_num in range(conf.n_models)],
                                       lr=conf.lr,
                                       momentum=conf.momentum)
            print(self.optimizer)

            print('optimizers generated')
            self.board_loss_every = max(len(self.train_loader) // 5, 1)
            self.evaluate_every = conf.evaluate_every
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every
        else:
            self.threshold = conf.threshold
Esempio n. 7
0
    def __init__(self, conf, inference=False):
        # ------------  define dataset -------------- #
        self.do_valid = conf.valid_size > 0
        if self.do_valid:
            self.ds_train, self.ds_test, self.final = get_isic(ood=conf.ood, with_valid=conf.valid_size)
        else:
            self.ds_train, self.final = get_isic(ood=conf.ood, with_valid=conf.valid_size, with_path=True)
            self.ds_test = self.ds_train

        self.n_classes = len(self.ds_train.classes)

        # ------------  define loaders -------------- #
        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.loader = DataLoader(self.ds_train, sampler=StratifiedSampler(self.ds_train), **dloader_args)

        eval_sampler = RandomSampler(self.ds_test, replacement=True, num_samples=len(self.ds_test) // 2)
        dloader_args = {
            'batch_size': conf.batch_size, #int(np.ceil(conf.batch_size / 2)),
            'pin_memory': False,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.final_loader = DataLoader(self.final, **dloader_args)
        self.eval_loader = DataLoader(self.ds_test, sampler=eval_sampler, **dloader_args)
        # self.eval_loader = DataLoader(self.ds_test, **dloader_args)

        train_eval_sampler = RandomSampler(self.ds_train, replacement=True, num_samples=len(self.ds_train) // 10)
        dloader_args = {
            'batch_size': conf.batch_size, #int(np.ceil(conf.batch_size / 2)),
            'pin_memory': False,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }
        self.train_eval_loader = DataLoader(self.ds_train, sampler=train_eval_sampler, **dloader_args)

        # -----------   define model --------------- #
        self.n_classes = len(self.ds_train.classes)
        build_model = PreBuildConverter(in_channels=3, out_classes=self.n_classes, add_func=True, softmax=True,
                                        pretrained=conf.pre_train)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        if not inference:
            # rebalance loss
            conf.ce_loss = CrossEntropyLoss(weight=self.ds_train.class_weights.to(conf.device))

            self.milestones = conf.milestones
            if not os.path.exists(conf.log_path):
                os.mkdir(conf.log_path)
            if not os.path.exists(conf.save_path):
                os.mkdir(conf.save_path)
            self.writer = SummaryWriter(logdir=conf.log_path)
            self.step = 0
            self.epoch = 0
            print('two model heads generated')

            self.get_opt(conf)
            #self.scheduler = StepLR(self.optimizer, step_size=25, gamma=0.2)

            print('optimizers generated')
            self.running_loss = 0.
            self.running_pearson_loss = 0.
            self.running_ensemble_loss = 0.
            self.running_cka_loss = 0.

            self.board_loss_every = max(len(self.loader) // 2, 1)
            self.evaluate_every = conf.epoch_per_eval
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every
Esempio n. 8
0
    def __init__(self, conf):

        # -----------   define model --------------- #
        build_model = PreBuildConverter(in_channels=1, out_classes=5, add_soft_max=True)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        self.milestones = conf.milestones
        if not os.path.exists(conf.log_path):
            os.mkdir(conf.log_path)
        os.mkdir(conf.log_path / str(conf.local_rank))
        self.writer = SummaryWriter(logdir=conf.log_path / str(conf.local_rank))
        self.step = 0
        self.epoch = 0
        print('two model heads generated')

        paras_only_bn = []
        paras_wo_bn = []
        for model in self.models:
            paras_only_bn_, paras_wo_bn_ = separate_bn_paras(model)
            paras_only_bn.append(paras_only_bn_)
            paras_wo_bn.append(paras_wo_bn_)

        self.optimizer = optim.SGD([
                                       {'params': paras_wo_bn[model_num],
                                        'weight_decay': 5e-4}
                                       for model_num in range(conf.n_models)
                                   ] + [
                                       {'params': paras_only_bn[model_num]}
                                       for model_num in range(conf.n_models)
                                   ], lr=conf.lr, momentum=conf.momentum)
        print(self.optimizer)

        # ------------  define loaders -------------- #

        self.train_ds = CBIS_PatchDataSet_INMEM(mode='train', nb_abn=conf.n_patch, nb_bkg=conf.n_patch)
        self.test_ds = CBIS_PatchDataSet_INMEM(mode='test', nb_abn=conf.n_patch, nb_bkg=conf.n_patch)

        self.train_sampler = DistributedSampler(self.train_ds, num_replicas=4, rank=conf.local_rank)
        self.test_sampler = DistributedSampler(self.train_ds, num_replicas=4, rank=conf.local_rank)

        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }

        self.train_loader = DataLoader(self.train_ds, sampler=self.train_sampler, **dloader_args)
        self.test_loader = DataLoader(self.test_ds, sampler=self.test_sampler, **dloader_args)

        eval_train_sampler = RandomSampler(self.train_ds, replacement=True, num_samples=len(self.train_ds) // 10)
        eval_test_sampler = RandomSampler(self.test_ds, replacement=True, num_samples=len(self.test_ds) // 2)
        self.eval_train = DataLoader(self.train_ds, sampler=eval_train_sampler, **dloader_args)
        self.eval_test = DataLoader(self.test_ds, sampler=eval_test_sampler, **dloader_args)

        print('optimizers generated')
        self.board_loss_every = max(len(self.train_loader) // 4, 1)
        self.evaluate_every = conf.evaluate_every
        self.save_every = max(conf.epoch_per_save, 1)
        assert self.save_every >= self.evaluate_every
Esempio n. 9
0
    def __init__(self, conf, inference=False):

        # -----------   define model --------------- #
        self.n_classes = (2 if (conf.type_only or conf.cancer_only or conf.single_type) else 4) + (0 if conf.no_bkg else 1)
        build_model = PreBuildConverter(in_channels=1, out_classes=self.n_classes, add_soft_max=True,
                                        pretrained=conf.pre_train, half=conf.half)
        self.models = []
        for _ in range(conf.n_models):
            self.models.append(build_model.get_by_str(conf.net_mode).to(conf.device))
        print('{} {} models generated'.format(conf.n_models, conf.net_mode))

        # ------------  define params -------------- #
        if not inference:
            self.milestones = conf.milestones
            if not os.path.exists(conf.log_path):
                os.mkdir(conf.log_path)
            if not os.path.exists(conf.save_path):
                os.mkdir(conf.save_path)
            self.writer = SummaryWriter(logdir=conf.log_path)
            self.step = 0
            self.epoch = 0
            print('two model heads generated')

            self.get_opt(conf)
            print(self.optimizer)

        # ------------  define loaders -------------- #

        self.train_ds = CBIS_PatchDataSet_INMEM(mode='train', patch_num=conf.n_patch, with_roi=conf.with_roi,
                                                prob_bkg=conf.bkg_prob, no_bkg=conf.no_bkg,
                                                single_type=conf.single_type,
                                                cancer_only=conf.cancer_only, type_only=conf.type_only)
        self.test_ds = CBIS_PatchDataSet_INMEM(mode='test', patch_num=conf.n_patch, with_roi=conf.with_roi,
                                               prob_bkg=conf.bkg_prob, no_bkg=conf.no_bkg,
                                               single_type=conf.single_type,
                                               cancer_only=conf.cancer_only, type_only=conf.type_only)

        dloader_args = {
            'batch_size': conf.batch_size,
            'pin_memory': True,
            'num_workers': conf.num_workers,
            'drop_last': False,
        }

        train_weights = pd.Series(self.train_ds.label_arr).value_counts()
        train_weights = (len(self.train_ds.label_arr) / pd.Series(self.train_ds.label_arr).map(train_weights)).values
        test_weights = pd.Series(self.test_ds.label_arr).value_counts()
        test_weights = (len(self.test_ds.label_arr) / pd.Series(self.test_ds.label_arr).map(test_weights)).values

        self.train_loader = DataLoader(self.train_ds,
                                       sampler=WeightedRandomSampler(train_weights, len(train_weights)), **dloader_args)
        self.test_loader = DataLoader(self.test_ds,
                                      sampler=WeightedRandomSampler(test_weights, len(test_weights)), **dloader_args)

        self.eval_train = DataLoader(self.train_ds,
                                       sampler=WeightedRandomSampler(train_weights, len(train_weights) // 10), **dloader_args)
        self.eval_test = DataLoader(self.test_ds,
                                      sampler=WeightedRandomSampler(test_weights, len(test_weights) // 2), **dloader_args)

        """
        #eval_train_sampler = RandomSampler(self.train_ds, replacement=True, num_samples=len(self.train_ds) // 10)
        #eval_test_sampler = RandomSampler(self.test_ds, replacement=True, num_samples=len(self.test_ds) // 2)
        #self.eval_train = DataLoader(self.train_ds, sampler=eval_train_sampler, **dloader_args)
        #self.eval_test = DataLoader(self.test_ds, sampler=eval_test_sampler, **dloader_args)
        """

        if not inference:
            print('optimizers generated')
            self.running_loss = 0.
            self.running_pearson_loss = 0.
            self.running_ensemble_loss = 0.

            self.board_loss_every = max(len(self.train_loader) // 4, 1)
            self.evaluate_every = conf.evaluate_every
            self.save_every = max(conf.epoch_per_save, 1)
            assert self.save_every >= self.evaluate_every