Пример #1
0
    def epoch_valid(self, epoch, valid, reduction='avg'):
        test_time_augmentation = False
        self.model.eval()
        num_steps = len(valid)
        metrics = []
        tau = self.tau

        with torch.no_grad():
            for step, (examples, labels) in enumerate(valid):
                original_labels = labels
                if not self.is_multiclass():
                    labels = labels.argmax(dim=-1)

                batch_size = examples.size(0)

                # Test-Time Augment flip
                if self.use_test_time_augmentation and test_time_augmentation:
                    examples = torch.cat([examples, torch.flip(examples, dims=[-1])], dim=0)
                    labels = torch.cat([labels, labels], dim=0)

                # skeleton.nn.MoveToHook.to((examples, labels), self.device, self.is_half)
                logits, loss = self.model(examples, labels, tau=tau, reduction=reduction)

                # avergae
                if self.use_test_time_augmentation and test_time_augmentation:
                    logits1, logits2 = torch.split(logits, batch_size, dim=0)
                    logits = (logits1 + logits2) / 2.0

                logits, prediction = self.activation(logits.float())
                tpr, tnr, nbac = NBAC(prediction, original_labels.float())
                if reduction == 'avg':
                    auc = AUC(logits, original_labels.float())
                else:
                    auc = max([AUC(logits[i:i+16], original_labels[i:i+16].float()) for i in range(int(len(logits)) // 16)])

                score = auc if self.hyper_params['conditions']['score_type'] == 'auc' else float(nbac.detach().float())
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })

                LOGGER.debug(
                    '[valid] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f NBAC:%.3f tpr:%.3f tnr:%.3f, lr:%.8f',
                    epoch, step, num_steps, loss, auc, nbac, tpr, tnr,
                    self.optimizer.get_learning_rate()
                )
            if reduction == 'avg':
                valid_loss = np.average([m['loss'] for m in metrics])
                valid_score = np.average([m['score'] for m in metrics])
            elif reduction in ['min', 'max']:
                valid_loss = np.min([m['loss'] for m in metrics])
                valid_score = np.max([m['score'] for m in metrics])
            else:
                raise Exception('not support reduction method: %s' % reduction)
        self.optimizer.update(valid_loss=np.average(valid_loss))

        return {
            'loss': valid_loss,
            'score': valid_score,
        }
Пример #2
0
    def epoch_valid(self, epoch, valid, reduction='avg'):
        test_time_augmentation = False
        self.model.eval()
        num_steps = len(valid)
        metrics = []
        tau = self.tau

        with torch.no_grad():
            for step, (examples, labels) in enumerate(valid):
                original_labels = labels
                if not self.is_multiclass():
                    labels = labels.argmax(dim=-1)

                batch_size = examples.size(0)

                if self.use_test_time_augmentation and test_time_augmentation:
                    examples = torch.cat(
                        [examples, torch.flip(examples, dims=[-1])], dim=0)
                    labels = torch.cat([labels, labels], dim=0)

                logits, loss = self.model(examples,
                                          labels,
                                          tau=tau,
                                          reduction=reduction)

                if self.use_test_time_augmentation and test_time_augmentation:
                    logits1, logits2 = torch.split(logits, batch_size, dim=0)
                    logits = (logits1 + logits2) / 2.0

                logits, prediction = self.activation(logits.float())
                if reduction == 'avg':
                    auc = AUC(logits, original_labels.float())
                else:
                    auc = max([
                        AUC(logits[i:i + 16],
                            original_labels[i:i + 16].float())
                        for i in range(int(len(logits)) // 16)
                    ])

                score = auc
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })

            if reduction == 'avg':
                valid_loss = np.average([m['loss'] for m in metrics])
                valid_score = np.average([m['score'] for m in metrics])
            elif reduction in ['min', 'max']:
                valid_loss = np.min([m['loss'] for m in metrics])
                valid_score = np.max([m['score'] for m in metrics])
            else:
                raise Exception('not support reduction method: %s' % reduction)
        self.optimizer.update(valid_loss=np.average(valid_loss))

        return {
            'loss': valid_loss,
            'score': valid_score,
        }
Пример #3
0
    def epoch_train(self, epoch, train, model=None, optimizer=None):
        model = model if model is not None else self.model
        if epoch < 0:
            optimizer = optimizer if optimizer is not None else self.optimizer_fc
        else:
            optimizer = optimizer if optimizer is not None else self.optimizer
        #optimizer = optimizer if optimizer is not None else self.optimizer

        # batch_size = self.hyper_params['dataset']['batch_size']
        model.train()
        model.zero_grad()

        num_steps = len(train)
        metrics = []
        for step, (examples, labels) in enumerate(train):
            if examples.shape[0] == 1:
                examples = examples[0]
                labels = labels[0]
            original_labels = labels
            if not self.is_multiclass():
                labels = labels.argmax(dim=-1)

            skeleton.nn.MoveToHook.to((examples, labels), self.device,
                                      self.is_half)
            logits, loss, features = model(examples,
                                           labels,
                                           tau=self.tau,
                                           reduction='avg')
            loss = loss.sum()
            loss.backward()

            max_epoch = self.hyper_params['dataset']['max_epoch']
            optimizer.update(maximum_epoch=max_epoch)
            optimizer.step()
            model.zero_grad()

            logits, prediction = self.activation(logits.float())
            tpr, tnr, nbac = NBAC(prediction, original_labels.float())
            auc = AUC(logits, original_labels.float())

            score = auc if self.hyper_params['conditions'][
                'score_type'] == 'auc' else float(nbac.detach().float())
            metrics.append({
                'loss': loss.detach().float().cpu(),
                'score': score,
            })

            LOGGER.debug(
                '[train] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f NBAC:%.3f tpr:%.3f tnr:%.3f, lr:%.8f',
                epoch, step, num_steps, loss, auc, nbac, tpr, tnr,
                optimizer.get_learning_rate())

        train_loss = np.average([m['loss'] for m in metrics])
        train_score = np.average([m['score'] for m in metrics])
        optimizer.update(train_loss=train_loss)

        return {
            'loss': train_loss,
            'score': train_score,
        }
Пример #4
0
    def epoch_valid(self, epoch, valid, reduction='avg'):
        self.model.eval()
        num_steps = len(valid)
        metrics = []
        tau = self.tau

        for step, (examples, labels) in enumerate(valid):
            original_labels = labels
            if not self.is_multiclass():
                labels = labels.argmax(dim=-1)

            logits, loss = self.model(examples,
                                      labels,
                                      tau=tau,
                                      reduction=reduction)

            logits, prediction = self.activation(logits.float())
            tpr, tnr, nbac = NBAC(prediction, original_labels.float())
            auc = AUC(logits, original_labels.float())

            score = auc if self.hyper_params['conditions'][
                'score_type'] == 'auc' else float(nbac.detach().float())
            metrics.append({
                'loss': loss.detach().float().cpu(),
                'score': score,
            })

            LOGGER.debug(
                '[valid] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f NBAC:%.3f tpr:%.3f tnr:%.3f, lr:%.8f',
                epoch, step, num_steps, loss, auc, nbac, tpr, tnr,
                self.optimizer.get_learning_rate())
        if reduction == 'avg':
            valid_loss = np.average([m['loss'] for m in metrics])
            valid_score = np.average([m['score'] for m in metrics])
        elif reduction == 'max':
            valid_loss = np.max([m['loss'] for m in metrics])
            valid_score = np.max([m['score'] for m in metrics])
        elif reduction == 'min':
            valid_loss = np.min([m['loss'] for m in metrics])
            valid_score = np.min([m['score'] for m in metrics])
        else:
            raise Exception('not support reduction method: %s' % reduction)
        self.optimizer.update(valid_loss=np.average(valid_loss))

        return {
            'loss': valid_loss,
            'score': valid_score,
        }
Пример #5
0
    def epoch_train(self, epoch, train, model=None, optimizer=None):
        model = model if model is not None else self.model
        if epoch < 0:
            optimizer = optimizer if optimizer is not None else self.optimizer_fc
        else:
            optimizer = optimizer if optimizer is not None else self.optimizer
        model.train()
        model.zero_grad()

        num_steps = len(train)
        metrics = []
        if self.switch == True:
            self.checkpoints = []
            step = 0
            for (examples, labels, original_labels) in (self.pre_data * 2):
                logits, loss = model(examples, labels, tau=self.tau, reduction='avg')
                loss = loss.sum()
                loss.backward()

                max_epoch = self.hyper_params['dataset']['max_epoch']
                optimizer.update(maximum_epoch=max_epoch)
                optimizer.step()
                model.zero_grad()

                logits, prediction = self.activation(logits.float())
                auc = AUC(logits, original_labels.float())
                score = auc
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })

                step += 1
            self.switch = False
            del self.pre_data
        else:
            for step, (examples, labels) in enumerate(train):
                if examples.shape[0] == 1:
                    examples = examples[0]
                    labels = labels[0]
                original_labels = labels
                if not self.is_multiclass():
                    labels = labels.argmax(dim=-1)

                skeleton.nn.MoveToHook.to((examples, labels), self.device, self.is_half)
                logits, loss = model(examples, labels, tau=self.tau, reduction='avg')
                loss = loss.sum()
                loss.backward()

                max_epoch = self.hyper_params['dataset']['max_epoch']
                optimizer.update(maximum_epoch=max_epoch)
                optimizer.step()
                model.zero_grad()
                if self.info['loop']['epoch'] < 2:
                    self.pre_data.append((examples, labels, original_labels))

                logits, prediction = self.activation(logits.float())
                auc = AUC(logits, original_labels.float())
                score = auc
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })


        train_loss = np.average([m['loss'] for m in metrics])
        train_score = np.average([m['score'] for m in metrics])
        optimizer.update(train_loss=train_loss)

        return {
            'loss': train_loss,
            'score': train_score,
        }
Пример #6
0
    def epoch_train(self, epoch, train, model=None, optimizer=None):
        model = model if model is not None else self.model

        if epoch == 0:  # update image mode
            self.build_optimizer()

        if self.image_mode == 'small_image':
            self.start_fc_epoch = 1
            self.start_conv_epoch = 1
            batch_num_per_epoch = [10, 20, 25, 30]
        elif self.image_mode == ' large_image':
            self.start_fc_epoch = 1
            self.start_conv_epoch = 1
            batch_num_per_epoch = [15, 15, 20, 25]
        else:
            self.start_fc_epoch = 1
            self.start_conv_epoch = 1
            batch_num_per_epoch = [10, 15, 20, 25]

        if epoch < self.start_fc_epoch and not self.is_video():
            optimizer = optimizer if optimizer is not None else self.optimizer_simple
        elif epoch < self.start_conv_epoch and not self.is_video():
            optimizer = optimizer if optimizer is not None else self.optimizer_fc
        else:
            optimizer = optimizer if optimizer is not None else self.optimizer_all

        if epoch >= self.start_fc_epoch:
            self.model.set_use_linear_only(False)
        # if epoch >= start_fc_epoch:
        #     self.model_pred.set_use_linear_only(False)

        if epoch < len(batch_num_per_epoch):
            train.set_steps(batch_num_per_epoch[epoch])
        else:
            train.change_steps(step_num=2)

        model.train()
        model.zero_grad()

        num_steps = len(train)
        metrics = []
        for step, (examples, labels) in enumerate(train):
            if examples.shape[0] == 1:
                examples = examples[0]
                labels = labels[0]
            original_labels = labels
            if not self.is_multiclass():
                labels = labels.argmax(dim=-1)

            skeleton.nn.MoveToHook.to((examples, labels), self.device,
                                      self.is_half)
            logits, loss = model(examples,
                                 labels,
                                 tau=self.tau,
                                 reduction='avg')
            loss = loss.sum()
            loss.backward()
            optimizer.step()
            model.zero_grad()

            logits, prediction = self.activation(logits.float())
            tpr, tnr, nbac = NBAC(prediction, original_labels.float())
            auc = AUC(logits, original_labels.float())

            score = auc if self.hyper_params['conditions'][
                'score_type'] == 'auc' else float(nbac.detach().float())
            metrics.append({
                'loss': loss.detach().float().cpu(),
                'score': score,
            })

            LOGGER.debug(
                '[train] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f NBAC:%.3f tpr:%.3f tnr:%.3f, lr:%s',
                epoch, step, num_steps, loss, auc, nbac, tpr, tnr,
                [x['lr'] for x in optimizer.param_groups])

        train_loss = np.average([m['loss'] for m in metrics])
        train_score = np.average([m['score'] for m in metrics])
        # optimizer.update(train_loss=train_loss)

        return {
            'loss': train_loss,
            'score': train_score,
        }
Пример #7
0
    def epoch_train(self, epoch, train, model=None):
        model = model if model is not None else self.model
        model.round_idx += 1
        if epoch < 0:
            optimizer = model.optimizer_fc
        else:
            optimizer = model.optimizer

        model.train()
        model.zero_grad()
        if model.info['condition']['first']['train']:
            num_steps = 10000
        else:
            num_steps = model.hyper_params['dataset']['steps_per_epoch']
        # train
        metrics = []
        scores = []
        score = 0
        step = 0

        for step, (examples, labels) in zip(range(num_steps), train):
            if examples.shape[0] == 1:
                examples = examples[0]
                labels = labels[0]
            original_labels = labels
            if not self.is_multiclass():
                labels = labels.argmax(dim=-1)
            # LOGGER.info('*'*30+str(self.is_multiclass()) + '*'*30)
            skeleton.nn.MoveToHook.to((examples, labels), self.device, self.is_half)
            logits, loss = model(examples, labels, tau=self.tau, reduction='avg')
            loss = loss.sum()
            loss.backward()

            max_epoch =model.hyper_params['dataset']['max_epoch']
            optimizer.update(maximum_epoch=max_epoch)
            optimizer.step()
            model.zero_grad()
            if model.info['condition']['first']['train']:
                logits, prediction = self.activation(logits.float())
                score = AUC(logits, original_labels.float())
                scores.append(score)
                if step > 10 and sum(scores[-10:]) > 2.:
                    break
        
            if step == num_steps - 1:
                logits, prediction = self.activation(logits.float())
                score = AUC(logits, original_labels.float())

            metrics.append({
                'loss': loss.detach().float().cpu(),
                'score': 0,
            })

        train_loss = np.average([m['loss'] for m in metrics])
        train_score = score
        optimizer.update(train_loss=train_loss)

        self.train_loss = train_loss
        self.train_score = train_score

        return {
            'loss': train_loss,
            'score': train_score,
            'run_steps': step
        }
Пример #8
0
    def epoch_train(self, epoch, train, model=None, optimizer=None):
        model = model if model is not None else self.model
        if epoch < 0:
            optimizer = optimizer if optimizer is not None else self.optimizer_fc
        else:
            optimizer = optimizer if optimizer is not None else self.optimizer
        model.train()
        model.zero_grad()

        num_steps = len(train)
        metrics = []
        if self.switch == True:
            LOGGER.info('Switch to ResNet9')
            self.checkpoints = []
            step = 0
            for (examples, labels, original_labels) in (self.pre_data * 2):
                logits, loss = model(examples, labels, tau=self.tau, reduction='avg')
                loss = loss.sum()
                loss.backward()

                max_epoch = self.hyper_params['dataset']['max_epoch']
                optimizer.update(maximum_epoch=max_epoch)
                optimizer.step()
                model.zero_grad()

                logits, prediction = self.activation(logits.float())
                # tpr, tnr, nbac = NBAC(prediction, original_labels.float())
                # if step == num_steps - 1:
                #     auc = AUC(logits, original_labels.float())
                # else:
                #     auc = 0
                auc = AUC(logits, original_labels.float())
                score = auc
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })

                LOGGER.debug(
                    '[train] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f, lr:%.8f',
                    epoch, step, num_steps, loss, auc,
                    optimizer.get_learning_rate()
                )
                step += 1
            self.switch = False
            del self.pre_data
            LOGGER.info('Switch to ResNet9 ended')
        else:
            for step, (examples, labels) in enumerate(train):
                if examples.shape[0] == 1:
                    examples = examples[0]
                    labels = labels[0]
                original_labels = labels
                if not self.is_multiclass():
                    labels = labels.argmax(dim=-1)

                skeleton.nn.MoveToHook.to((examples, labels), self.device, self.is_half)
                logits, loss = model(examples, labels, tau=self.tau, reduction='avg')
                loss = loss.sum()
                loss.backward()

                max_epoch = self.hyper_params['dataset']['max_epoch']
                optimizer.update(maximum_epoch=max_epoch)
                optimizer.step()
                model.zero_grad()
                if self.info['loop']['epoch'] < 2:
                    self.pre_data.append((examples, labels, original_labels))

                logits, prediction = self.activation(logits.float())
                # tpr, tnr, nbac = NBAC(prediction, original_labels.float())
                # if step == num_steps - 1:
                #     auc = AUC(logits, original_labels.float())
                # else:
                #     auc = 0
                auc = AUC(logits, original_labels.float())
                score = auc
                metrics.append({
                    'loss': loss.detach().float().cpu(),
                    'score': score,
                })

                LOGGER.debug(
                    '[train] [%02d] [%03d/%03d] loss:%.6f AUC:%.3f, lr:%.8f',
                    epoch, step, num_steps, loss, auc,
                    optimizer.get_learning_rate()
                )

        train_loss = np.average([m['loss'] for m in metrics])
        train_score = np.average([m['score'] for m in metrics])
        optimizer.update(train_loss=train_loss)

        return {
            'loss': train_loss,
            'score': train_score,
        }