def test_fit(self): # given data = constant_timeseries(0, 3) transformers = [self.DataTransformerMock1() for _ in range(10) ] + [self.DataTransformerMock2() for _ in range(10)] p = Pipeline(transformers) # when p.fit(data) # then for i in range(10): self.assertFalse(transformers[i].fit_called) for i in range(10, 20): self.assertTrue(transformers[i].fit_called)
def test_transform(self): # given mock1 = self.DataTransformerMock1() mock2 = self.DataTransformerMock2() data = constant_timeseries(0, 3) transformers = [mock1] * 10 + [mock2] * 10 p = Pipeline(transformers) # when p.fit(data) transformed = p.transform(data) # then self.assertEqual(63, len(transformed)) self.assertEqual([0] * 3 + [1] * 30 + [2] * 30, list(transformed.values())) for t in transformers: self.assertTrue(t.transform_called) self.assertFalse(t.inverse_transform_called)
def test_fit_skips_superfluous_transforms(self): # given data = constant_timeseries(0, 100) transformers = [self.DataTransformerMock1() for _ in range(10)]\ + [self.DataTransformerMock2()]\ + [self. DataTransformerMock1() for _ in range(10)] p = Pipeline(transformers) # when p.fit(data) # then for i in range(10): self.assertTrue(transformers[i].transform_called) self.assertTrue(transformers[10].fit_called) self.assertFalse(transformers[10].transform_called) for i in range(11, 21): self.assertFalse(transformers[i].transform_called)