Пример #1
0
    def start(self):
        train_loader, valid_loader = self.read_dataset()

        #create network
        self.NET = self.get_network()
        print("#net params:", sum(p.numel() for p in self.NET.parameters()))
        if self.USE_CUDA:
            self.NET.cuda()

        optimizer = Optimizer.get_optimizer(self.OPTIMIZER, self.NET,
                                            self.LEARNING_RATE)
        criterion = Loss.get_loss(self.LOSS)
        scheduler = ExponentialLR(optimizer, gamma=0.95)
        self.__train(train_loader, valid_loader, optimizer, criterion,
                     scheduler)
Пример #2
0
    def train(self, loader):
        net = Network_AE(vocab_size=len(self.VOCAB), drop_out=self.DROP_OUT)
        if self.USE_CUDA:
            net.cuda()

        optimizer = Optimizer.get_optimizer(self.OPTIMIZER, net,
                                            self.LEARNING_RATE)
        criterion = Loss.get_loss(self.LOSS)

        #train
        plot_train_loss = []

        net.train()
        for epoch in range(1, self.EPOCH + 1):
            train_loss = 0.0
            for batch_idx, (X, y) in enumerate(loader):
                y = torch.squeeze(y)
                if self.USE_CUDA:
                    X = Variable(X.cuda())
                    y = Variable(y.cuda())
                else:
                    X = Variable(X)
                    y = Variable(y)

                output = net(X)
                loss = criterion(output, y)
                train_loss += loss.data[0]

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

            print("epoch:{}, train_loss:{}".format(epoch, train_loss))
            plot_train_loss.append(train_loss)

        #plot
        self.plot(plot_train_loss)
Пример #3
0
 def start(self):
     train_loader, valid_loader = self.read_dataset()
     optimizer = Optimizer.get_optimizer(self.OPTIMIZER, self.NET, self.LEARNING_RATE)
     criterion = Loss.get_loss(self.LOSS)
     scheduler = ExponentialLR(optimizer, gamma=0.95)
     self.__train(train_loader, valid_loader, optimizer, criterion, scheduler)