Пример #1
0
    def __init__(self, validate=False):
        super(DiscriminativeLearningRateTask, self).__init__()
        bs, steps = 8, 64
        dataset = XORDataset(bs * steps)
        if validate:
            self.train_loader = DataLoader(Subset(dataset, range(steps - bs)))
            self.val_loader = DataLoader(
                Subset(dataset, range(steps - bs, steps)))
        else:
            self.train_loader = DataLoader(dataset)
            self.val_loader = None

        dataset = XORDataset(128)
        self.model = LinearMLP([8, 4, 1])
        self.optimizer = optim.SGD(
            [
                {
                    "params": self.model.net[0].parameters(),
                    "lr": 1e-3
                },
                {
                    "params": self.model.net[1].parameters(),
                    "lr": 1e-5
                },
            ],
            lr=1e-5,
            momentum=0.5,
        )
        self.criterion = nn.MSELoss()
        self.device = torch.device("cuda")
Пример #2
0
    def __init__(self, batch_size=8, steps=100, validate=False):
        super(DiscriminativeLearningRateTask, self).__init__()
        n_total = batch_size * steps
        dataset = XORDataset(n_total)
        if validate:
            n_train = int(n_total * 0.9)
            self.train_loader = DataLoader(
                Subset(dataset, range(n_train)),
                batch_size=batch_size
            )
            self.val_loader = DataLoader(
                Subset(dataset, range(n_train, n_total)),
                batch_size=batch_size
            )
        else:
            self.train_loader = DataLoader(dataset, batch_size=batch_size)
            self.val_loader = None

        self.batch_size = batch_size
        self.model = LinearMLP([8, 4, 1])
        self.optimizer = optim.SGD(
            [
                {"params": self.model.net[0].parameters(), "lr": 1e-3},
                {"params": self.model.net[1].parameters(), "lr": 1e-5},
            ],
            lr=1e-5,
            momentum=0.5,
        )
        self.criterion = nn.MSELoss()
        self.device = torch.device("cuda")
Пример #3
0
    def __init__(self, validate=False):
        super(XORTask, self).__init__()
        bs, steps = 8, 64
        dataset = XORDataset(bs * steps)
        if validate:
            self.train_loader = DataLoader(Subset(dataset, range(steps - bs)))
            self.val_loader = DataLoader(
                Subset(dataset, range(steps - bs, steps)))
        else:
            self.train_loader = DataLoader(dataset)
            self.val_loader = None

        self.batch_size = bs
        self.model = LinearMLP([8, 4, 1])
        self.optimizer = optim.SGD(self.model.parameters(), lr=1e-5)
        self.criterion = nn.MSELoss()
        self.device = torch.device("cuda")
Пример #4
0
    def __init__(self, batch_size=8, steps=100, validate=False):
        super(XORTask, self).__init__()
        n_total = batch_size * steps
        dataset = XORDataset(n_total)
        if validate:
            n_train = int(n_total * 0.9)
            self.train_loader = DataLoader(Subset(dataset, range(n_train)),
                                           batch_size=batch_size)
            self.val_loader = DataLoader(Subset(dataset,
                                                range(n_train, n_total)),
                                         batch_size=batch_size)
        else:
            self.train_loader = DataLoader(dataset, batch_size=batch_size)
            self.val_loader = None

        self.batch_size = batch_size
        self.model = LinearMLP([8, 4, 1])
        self.optimizer = optim.SGD(self.model.parameters(), lr=1e-5)
        self.criterion = nn.MSELoss()
        self.device = torch.device("cuda")
Пример #5
0
        print("{} - [Epoch {}] - END\n\n".format(tag, epoch))

def calculate_correct_predictions(predictions, labels):
    predictions = torch.round(torch.sigmoid(predictions))
    correct_predictions = torch.sum(predictions == labels).data.item()

    return correct_predictions

if __name__ == "__main__":
    # Open dataset from CSV file
    training_df = pd.read_csv("dataset/training.csv")
    validation_df = pd.read_csv("dataset/validation.csv")
    X_train, y_train = training_df["inputs"].tolist(), training_df["labels"].tolist()
    X_val, y_val = validation_df["inputs"].tolist(), validation_df["labels"].tolist()

    # Start creating the datasets
    training_ds = XORDataset(X_train, y_train)
    validation_ds = XORDataset(X_val, y_val)

    # Create model, optimizer, and loss function
    lstm_xor_model = LSTMXOR(1, 512).to(DEVICE)
    optimizer = optim.Adam(lstm_xor_model.parameters(), lr=1e-2)
    loss_function = nn.BCEWithLogitsLoss()

    for epoch in range(3):
        # Train the model
        run_model(lstm_xor_model, optimizer, loss_function, training_ds, epoch + 1, TRAINING_TAG)

        # Validate the model
        run_model(lstm_xor_model, optimizer, loss_function, validation_ds, epoch + 1, VALIDATION_TAG, is_evaluation_mode=True, shuffle=False)