def testAccuracyScore(self): y_pred = [0, 2, 1, 3] y_true = [0, 1, 2, 3] score = accuracy_score(y_true, y_pred) result = self.executor.execute_tileables([score])[0] expected = sklearn_accuracy_score(y_true, y_pred) self.assertAlmostEqual(result, expected) score = accuracy_score(y_true, y_pred, normalize=False) result = self.executor.execute_tileables([score])[0] expected = sklearn_accuracy_score(y_true, y_pred, normalize=False) self.assertAlmostEqual(result, expected) y_pred = np.array([[0, 1], [1, 1]]) y_true = np.ones((2, 2)) score = accuracy_score(y_true, y_pred) result = self.executor.execute_tileables([score])[0] expected = sklearn_accuracy_score(y_true, y_pred) self.assertAlmostEqual(result, expected) sample_weight = [0.7, 0.3] score = accuracy_score(y_true, y_pred, sample_weight=sample_weight) result = self.executor.execute_tileables([score])[0] expected = sklearn_accuracy_score(y_true, y_pred, sample_weight=sample_weight) self.assertAlmostEqual(result, expected) score = accuracy_score(mt.tensor(y_true), mt.tensor(y_pred), sample_weight=mt.tensor(sample_weight), normalize=False) result = self.executor.execute_tileables([score])[0] expected = sklearn_accuracy_score(y_true, y_pred, sample_weight=sample_weight, normalize=False) self.assertAlmostEqual(result, expected)
def test_accuracy_score(setup): y_pred = [0, 2, 1, 3] y_true = [0, 1, 2, 3] score = accuracy_score(y_true, y_pred) result = score.execute().fetch() expected = sklearn_accuracy_score(y_true, y_pred) assert pytest.approx(result) == expected score = accuracy_score(y_true, y_pred, normalize=False) result = score.execute().fetch() expected = sklearn_accuracy_score(y_true, y_pred, normalize=False) assert pytest.approx(result) == expected y_pred = np.array([[0, 1], [1, 1]]) y_true = np.ones((2, 2)) score = accuracy_score(y_true, y_pred) result = score.execute().fetch() expected = sklearn_accuracy_score(y_true, y_pred) assert pytest.approx(result) == expected sample_weight = [0.7, 0.3] score = accuracy_score(y_true, y_pred, sample_weight=sample_weight) result = score.execute().fetch() expected = sklearn_accuracy_score(y_true, y_pred, sample_weight=sample_weight) assert pytest.approx(result) == expected score = accuracy_score(mt.tensor(y_true), mt.tensor(y_pred), sample_weight=mt.tensor(sample_weight), normalize=False) result = score.execute().fetch() expected = sklearn_accuracy_score(y_true, y_pred, sample_weight=sample_weight, normalize=False) assert pytest.approx(result) == expected
def test_dataframe_accuracy_score(setup): rs = np.random.RandomState(0) raw = pd.DataFrame({'a': rs.randint(0, 10, (10,)), 'b': rs.randint(0, 10, (10,))}) df = md.DataFrame(raw) y = df['a'].to_tensor().astype('int') pred = df['b'].astype('int') score = accuracy_score(y, pred) expect = sklearn_accuracy_score(raw['a'].to_numpy().astype('int'), raw['b'].to_numpy().astype('int')) assert pytest.approx(score.fetch()) == expect
def testAccuracyScore(self): service_ep = 'http://127.0.0.1:' + self.web_port timeout = 120 if 'CI' in os.environ else -1 with new_session(service_ep) as sess: run_kwargs = {'timeout': timeout} rs = np.random.RandomState(0) raw = pd.DataFrame({ 'a': rs.randint(0, 10, (10, )), 'b': rs.randint(0, 10, (10, )) }) df = md.DataFrame(raw) y = df['a'].to_tensor().astype('int') pred = df['b'].astype('int') score = accuracy_score(y, pred, session=sess, run_kwargs=run_kwargs) expect = sklearn_accuracy_score(raw['a'].to_numpy().astype('int'), raw['b'].to_numpy().astype('int')) self.assertAlmostEqual(score.fetch(session=sess), expect)