Exemplo n.º 1
0
    def pretrain_reid(self):
        """Training reid, and may validate on val set."""

        start_ep = cfg.pre_reid_resume_ep if cfg.pre_reid_resume else 0
        for ep in range(start_ep, cfg.pre_reid_num_epochs):

            # Force all BN layers to use global mean and variance
            utils.may_set_mode(self.modules_optims, 'eval')
            # Enable dropout
            #utils.may_set_mode(self.reidTop.dropout, 'train')

            epoch_done = False
            ep_losses = []
            ep_st = time.time()
            step = 0
            while not epoch_done:

                step += 1
                step_st = time.time()

                ims, im_names, labels, ims_mirrored, epoch_done = \
                    self.train_set.next_batch()
                ims = TVT(Variable(torch.from_numpy(ims).float()))
                labels = TVT(Variable(torch.LongTensor(labels)))
                logits = self.googlenet(ims)

                loss = self.reid_criterion(logits, labels)
                self.optimReID.zero_grad()
                loss.backward()
                self.optimReID.step()

                ep_losses.append(utils.to_scalar(loss))

                # Step logs
                if step % cfg.pre_reid_log_steps == 0:
                    print '[Step {}/Ep {}], [{:.2f}s], [loss: {}]'.format(
                        step + 1, ep + 1,
                        time.time() - step_st, utils.to_scalar(loss))

            # Epoch logs
            print '===========> [Epoch {}], [{:.2f}s], [ep_avg_loss: {}]'.format(
                ep + 1,
                time.time() - ep_st, np.mean(ep_losses))

            # validation

            if cfg.train_val_part == 'train':
                self.val_set.eval_single_query(True)
                self.val_set.eval_multi_query(False)

            # epoch saving
            if (ep + 1) % cfg.pre_reid_epochs_per_saving_ckpt == 0 \
                    or ep + 1 == cfg.pre_reid_num_epochs:
                utils.may_save_modules_optims_state_dicts(
                    self.modules_optims,
                    cfg.pre_reid_ckpt_saving_tmpl.format(ep + 1))

        self.train_set.stop_prefetching_threads()
        if cfg.train_val_part == 'train':
            self.val_set.stop_prefetching_threads()
Exemplo n.º 2
0
 def feature_func(ims):
     """A function to be called in the test/val set, to extract features."""
     # Set eval mode
     utils.may_set_mode(self.models, 'eval')
     ims = TVT(Variable(torch.from_numpy(ims).float()))
     feats = self.googlenet(ims)
     feats = feats.data.cpu().numpy()
     return feats
Exemplo n.º 3
0
 def feature_func(ims):
     """A function to be called in the val set, to extract features."""
     # Set eval mode
     utils.may_set_mode(self.models, 'eval')
     ims = TVT(Variable(torch.FloatTensor(ims)))
     feats = self.reidTop(self.reidBot(ims))
     feats = feats.data.cpu().numpy()
     return feats
Exemplo n.º 4
0
 def feature_func(ims):
   """A function to be called in the val set, to extract features."""
   # Set eval mode
   # Force all BN layers to use global mean and variance, also disable
   # dropout.
   utils.may_set_mode(self.modules_optims, 'eval')
   ims = TVT(Variable(torch.from_numpy(ims).float()))
   feats, _ = self.reidTop(self.reidBot(ims))
   feats = feats.data.cpu().numpy()
   return feats