def test_one_d_shape_change(self):
     y = self.y.flatten()
     test_size = 0.2
     train_n, test_n, d = 16, 4, 3
     x_train, x_test, y_train, y_test = train_test_split(self.x, y, test_size=test_size)
     self.assertTupleEqual(y_train.shape, (train_n, 1), 'Shape of y_train did not change 2d')
     self.assertTupleEqual(y_test.shape, (test_n, 1), 'Shape of y_test did not change to 2d')
    def test_valid_shapes_floor(self):
        test_size = 0.28
        train_n, test_n, d = 15, 5, 3

        x_train, x_test, y_train, y_test = train_test_split(self.x, self.y, test_size=test_size)

        self.assertTupleEqual(x_train.shape, (train_n, d), 'Shape of x_train did not match')
        self.assertTupleEqual(x_test.shape, (test_n, d), 'Shape of x_test did not match')
        self.assertTupleEqual(y_train.shape, (train_n, 1), 'Shape of y_train did not match')
        self.assertTupleEqual(y_test.shape, (test_n, 1), 'Shape of y_test did not match')
    def setUpClass(cls):
        cls.n, cls.d = 1000, 10
        np.random.seed(7)

        x = np.random.uniform(0, 50, cls.n * cls.d).reshape(cls.n, cls.d)
        eps = np.random.normal(loc=0, scale=2, size=cls.n * cls.d).reshape(cls.n, cls.d)
        w = np.random.uniform(1, 5, cls.d).reshape(cls.d, 1)
        y = (x + eps).dot(w)

        x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=7, test_size=0.25)

        x_standardizer = Standardizer(x_train)
        cls.x_train_std = x_standardizer.standardize(x_train)
        cls.x_test_std = x_standardizer.standardize(x_test)

        y_standardizer = Standardizer(y_train)
        cls.y_train_std = y_standardizer.standardize(y_train)
        cls.y_test_std = y_standardizer.standardize(y_test)

        cls.model = LassoRegression(cls.x_train_std, cls.y_train_std, lambd=0.5 / 2)
        cls.model.cycliccoorddescent()
        cls.model.randcoorddescent()
    def setUpClass(cls):
        cls.n, cls.d = 500, 10
        np.random.seed(7)

        x_class1 = np.random.normal(0, 5, cls.n * cls.d).reshape(cls.n, cls.d)
        x_class2 = np.random.normal(0, 10, cls.n * cls.d).reshape(cls.n, cls.d)
        w = np.random.uniform(1, 5, cls.d).reshape(cls.d, 1)
        y_class1 = np.full((cls.n, 1), -1)
        y_class2 = np.full((cls.n, 1), 1)

        x = np.concatenate((x_class1, x_class2))
        y = np.concatenate((y_class1, y_class2))

        x_train, x_test, cls.y_train, cls.y_test = train_test_split(
            x, y, random_state=7, test_size=0.25)

        x_standardizer = Standardizer(x_train)

        cls.x_train_std = x_standardizer.standardize(x_train)
        cls.x_test_std = x_standardizer.standardize(x_test)

        cls.model = LogisticRegression(cls.x_train_std, cls.y_train, lamd=1)
        cls.model.train()