Exemplo n.º 1
0
    def training_step(self, train_batch, batch_idx):
        """
        Training the data as batches and returns training loss on each batch

        :param train_batch: Batch data
        :param batch_idx: Batch indices

        :return: output - Training loss
        """

        output = {}

        x, y = train_batch
        prob_mask = self.forward(x)
        loss = self.criterion(prob_mask, y.type(torch.long))

        acc = accuracy(torch.argmax(prob_mask, dim=1).float(), y)
        output['acc'] = torch.tensor(acc)

        iter_iou, iter_count = iou_fnc(
            torch.argmax(prob_mask, dim=1).float(), y, self.args['n_class'])
        for i in range(self.args['n_class']):
            output['iou_' + str(i)] = torch.tensor(iter_iou[i])
            output['iou_cnt_' + str(i)] = torch.tensor(iter_count[i])

        output['loss'] = loss

        return output
Exemplo n.º 2
0
    def validation_step(self, test_batch, batch_idx):
        """
        Predicts on the test dataset to compute the current performance of the model.

        :param test_batch: Batch data
        :param batch_idx: Batch indices

        :return: output - Validation performance
        """

        output = {}

        x, y = test_batch
        prob_mask = self.forward(x)
        loss = self.criterion(prob_mask, y.type(torch.long))

        acc = accuracy(torch.argmax(prob_mask, dim=1).float(), y)
        output['val_acc'] = torch.tensor(acc)

        iter_iou, iter_count = iou_fnc(
            torch.argmax(prob_mask, dim=1).float(), y, self.args['n_class'])
        for i in range(self.args['n_class']):
            output['val_iou_' + str(i)] = torch.tensor(iter_iou[i])
            output['val_iou_cnt_' + str(i)] = torch.tensor(iter_count[i])

        output['val_loss'] = loss

        return output
Exemplo n.º 3
0
def classification():
    # Generate a random binary classification problem.
    X, y = make_classification(
        n_samples=1000,
        n_features=20,
        n_informative=10,
        random_state=1111,
        n_classes=2,
        class_sep=2.5,
    )
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.1,
                                                        random_state=1111)

    for s in ['svd', 'eigen']:
        p = PCA(10, solver=s)

        # fit PCA with training data, not entire dataset
        p.fit(X_train)
        X_train_reduced = p.transform(X_train)
        X_test_reduced = p.transform(X_test)

        model = LogisticRegression(lr=0.01, max_iters=10, penalty='l2', C=0.01)
        model.fit(X_train_reduced, y_train)
        predictions = model.predict(X_test_reduced)
        print('classification accuracy', accuracy(y_test, predictions))
Exemplo n.º 4
0
def classificationfm():
    # Generate a random binary classification problem.
    X, y = make_classification(
        n_samples=100,
        n_features=20,
        n_informative=10,
        random_state=1111,
        n_classes=2,
        class_sep=2.5,
    )
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.1,
                                                        random_state=1111)

    model = FMClassifier(n_components=2, max_iter=20)
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print('classification accuracy', accuracy(y_test, predictions))