def train_loop(self, train_w_loader, train_thetas_loader, test_loader,
                   model):

        best_top1 = 0.0

        # firstly, train weights only
        for epoch in range(self.train_thetas_from_the_epoch):
            self.writer.add_scalar('learning_rate/weights',
                                   self.w_optimizer.param_groups[0]['lr'],
                                   epoch)

            self.logger.info("Firstly, start to train weights for epoch %d" %
                             (epoch))
            self._training_step(model,
                                train_w_loader,
                                self.w_optimizer,
                                epoch,
                                info_for_logger="_w_step_")
            self.w_scheduler.step()

        for epoch in range(self.train_thetas_from_the_epoch, self.cnt_epochs):
            self.writer.add_scalar('learning_rate/weights',
                                   self.w_optimizer.param_groups[0]['lr'],
                                   epoch)
            self.writer.add_scalar('learning_rate/theta',
                                   self.theta_optimizer.param_groups[0]['lr'],
                                   epoch)

            self.logger.info("Start to train weights for epoch %d" % (epoch))
            self._training_step(model,
                                train_w_loader,
                                self.w_optimizer,
                                epoch,
                                info_for_logger="_w_step_")
            self.w_scheduler.step()

            self.logger.info("Start to train theta for epoch %d" % (epoch))
            self._training_step(model,
                                train_thetas_loader,
                                self.theta_optimizer,
                                epoch,
                                info_for_logger="_theta_step_")

            top1_avg = self._validate(model, test_loader, epoch)
            if best_top1 < top1_avg:
                best_top1 = top1_avg
                self.logger.info("Best top1 acc by now. Save model")
                save(model, self.path_to_save_model)

            self.temperature = self.temperature * self.exp_anneal_rate
    def train_loop(self, train_w_loader, train_thetas_loader, test_loader,
                   model):

        best_acc = 9999.9
        best_lat = 9999.9
        score = 99999.9

        # firstly, train weights only
        for epoch in range(self.train_thetas_from_the_epoch):
            # self.writer.add_scalar('learning_rate/weights', self.w_optimizer.param_groups[0]['lr'], epoch)
            self.logger.info("Prepare train  %d" % (epoch))
            self._training_step(model, train_w_loader, self.w_optimizer)
            self.w_scheduler.step()

        for epoch in range(self.train_thetas_from_the_epoch, self.cnt_epochs):
            # self.writer.add_scalar('learning_rate/weights', self.w_optimizer.param_groups[0]['lr'], epoch)
            # self.writer.add_scalar('learning_rate/theta', self.theta_optimizer.param_groups[0]['lr'], epoch)
            self.logger.info("Epoch %d" % (epoch))
            self.logger.info("Train Weights for epoch %d" % (epoch))
            self._training_step(model, train_w_loader, self.w_optimizer)
            self.w_scheduler.step()
            self.logger.info("Train Thetas for epoch %d" % (epoch))
            self._training_step(model, train_thetas_loader,
                                self.theta_optimizer)

            acc, lat = self._validate(model, test_loader)
            # import pdb
            # pdb.set_trace()
            current_score = acc + np.abs(lat - self.target_latency)
            if acc < best_acc or lat < best_lat or current_score < score:
                if acc < best_acc:
                    best_acc = acc
                if lat < best_lat:
                    best_lat = lat
                if current_score < score:
                    score = current_score
                self.logger.info("Best top1 score by now. Save model")
                save(
                    model, self.path_to_save_model +
                    "score%.3f_acc%.8f_lat%.3f.pth" % (score, acc, lat))

            self.temperature = self.temperature * self.exp_anneal_rate
    def train_loop(self, train_loader, valid_loader, model):
        best_top1 = 0.0

        for epoch in range(self.cnt_epochs):
            
            self.writer.add_scalar('learning_rate', self.optimizer.param_groups[0]['lr'], epoch)
            
            #if epoch and epoch % self.lr_decay_period == 0:
            #    self.optimizer.param_groups[0]['lr'] *= self.lr_decay

            # training
            self._train(train_loader, model, epoch)
            # validation
            top1_avg = self._validate(valid_loader, model, epoch)

            if best_top1 < top1_avg:
                best_top1 = top1_avg
                self.logger.info("Best top1 accuracy by now. Save model")
                save(model, self.path_to_save_model)
            self.scheduler.step()
Beispiel #4
0
    def train_loop(self, train_w_loader, train_thetas_loader, test_loader,
                   model):
        global n
        best_top1 = 0.0
        best_lat = 10000000
        best_energy = 10000000
        n = 1
        # firstly, train weights only
        for epoch in range(self.train_thetas_from_the_epoch):
            self.writer.add_scalar('learning_rate/weights',
                                   self.w_optimizer.param_groups[0]['lr'],
                                   epoch)

            self.logger.info("Firstly, start to train weights for epoch %d" %
                             (epoch))
            self._training_step(model,
                                train_w_loader,
                                self.w_optimizer,
                                epoch,
                                info_for_logger="_w_step_")
            self.w_scheduler.step()

        for epoch in range(self.train_thetas_from_the_epoch, self.cnt_epochs):
            self.writer.add_scalar('learning_rate/weights',
                                   self.w_optimizer.param_groups[0]['lr'],
                                   epoch)
            self.writer.add_scalar('learning_rate/theta',
                                   self.theta_optimizer.param_groups[0]['lr'],
                                   epoch)

            self.logger.info("Start to train weights for epoch %d" % (epoch))
            self._training_step(model,
                                train_w_loader,
                                self.w_optimizer,
                                epoch,
                                info_for_logger="_w_step_")
            self.w_scheduler.step()

            self.logger.info("Start to train theta for epoch %d" % (epoch))
            self._training_step(model,
                                train_thetas_loader,
                                self.theta_optimizer,
                                epoch,
                                info_for_logger="_theta_step_")

            top1_avg, top3_avg, lat_avg, energy_avg = self._validate(
                model, test_loader, epoch)
            #if best_top1 < top1_avg and lat_avg < best_lat:
            #if best_top1 < top1_avg: #original

            if (best_top1 < top1_avg and lat_avg < best_lat
                    and energy_avg < best_energy) or best_top1 < top1_avg:
                if best_top1 < top1_avg:
                    best_top1 = top1_avg
                    print("Best Acc!!")
                if lat_avg < best_lat:
                    best_lat = lat_avg
                    print("Best Speed!!")
                if energy_avg < best_energy:
                    best_energy = energy_avg
                    print("Best Energy!!")
                self.logger.info("Best top1 acc by now. Save model")
                #print("Over Acc: 0.70")
                #print("Model Number = " + str(n))
                save(model, self.path_to_save_model + str(n) + '.pth')
                #n += 1
            '''
            if (top1_avg >= 0.75) or (top1_avg >= 0.75  and lat_avg < best_lat) or (top1_avg >= 0.75  and energy_avg < best_energy) :
                if lat_avg < best_lat:
                    best_lat = lat_avg
                    print("Best Latency!!")
                if energy_avg < best_energy:
                     best_energy = energy_avg
                     print("Best Energy!!")
                self.logger.info("Best top1 acc by now. Save model")
                print("Over Acc: 0.75")
                #print("Model Number = " + str(n))
                save(model, self.path_to_save_model + str(n) + '.pth')
                #n+=1
            '''
            self.temperature = self.temperature * self.exp_anneal_rate