Esempio n. 1
0
 def test_already_flat(self):
     gen = TransformChain(VariableSignalGenerator(fs=10000), Reduce())
     X, Y = gen.fit_transform([{'sin': 100}, {'cos': 150}], [0, 1])
     self.assertEqual(len(X), 2)
     self.assertEqual(len(X[0]), 10000)
     self.assertEqual(list(Y), [0, 1])
     return
Esempio n. 2
0
    def test_inverse_fit_transform(self):
        # non-flattening transform
        ogen = TransformChain(VariableSignalGenerator(fs=10000),
                              WhiteNoise(sigma=0.1, clones=2),
                              SegmentSignal(chunksize=200))

        # flattening transform
        fgen = ogen.clone()
        fgen.add(Reduce())

        # transform/inverse-transform
        fX, fY = fgen.fit_transform([{'sin': 100}, {'cos': 150}], [0, 1])
        tX, tY = fgen.inverse_fit_transform(fX, fY)

        # expected inversion
        fX, fY = ogen.fit_transform([{'sin': 100}, {'cos': 150}], [0, 1])
        oX, oY = ogen.inverse_fit_transform(fX, fY)

        # check dimensionality
        self.assertEqual(len(oX), len(tX))
        self.assertEqual(len(oX[0]), len(tX[0]))
        self.assertEqual(len(oY), len(tY))
        self.assertEqual(list(oX[3]), list(tX[3]))
        self.assertEqual(list(oY), list(tY))
        return
Esempio n. 3
0
 def test_chained(self):
     xform = TransformChain(
         VariableSignalGenerator(fs=1000), SegmentSignal(chunksize=200),
         Reduce(),
         FeatureTransform(NormalizedPower(), DominantFrequency(fs=1000)))
     xform.fit(self.data)
     self.assertEqual([round(x, 2) for x in xform._X[0]], [0.64, 5])
     self.assertEqual([round(x, 2) for x in xform._X[-1]], [0.64, 15])
     self.assertEqual(len(xform._X), len(self.data) * 5)
     return
Esempio n. 4
0
 def test_predict_with_simulator(self):
     learner = Learner(
         transform=[
             VariableSignalGenerator(),
             WhiteNoise(clones=2),
             SegmentSignal(),
             Reduce()
         ]
     )
     learner.fit(self.data, self.truth)
     pred = learner.predict([{'sin': 2}])
     self.assertEqual(pred[0], False)
     self.assertEqual(len(pred), 1)
     pred = learner.predict([{'sin': 12}, {'sin': 13}])
     self.assertEqual(pred[0], True)
     self.assertEqual(len(pred), 2)
     return
Esempio n. 5
0
class TestValidator(unittest.TestCase):
    """
    Tests for jade.Validator object.
    """
    # generate data:
    # here we're tyring to predict whether or not a
    # signal is above a periodicity of 5
    truth = [False] * 20 + [True] * 20
    data = [{'sin': i} for i in numpy.linspace(5, 10, 10)] + \
           [{'cos': i} for i in numpy.linspace(5, 10, 10)] + \
           [{'sin': i} for i in numpy.linspace(11, 15, 10)] + \
           [{'cos': i} for i in numpy.linspace(11, 15, 10)]

    learner = Learner(
        transform=[
            VariableSignalGenerator(),
            WhiteNoise(clones=2),
            SegmentSignal(),
            Reduce()
        ], model=SVC(kernel='rbf')
    )

    def test_cv_score(self):
        val = Validator(self.learner, self.data, self.truth)
        scores = val.cv_score(folds=3, seed=42)
        self.assertEqual([round(x, 2) for x in scores], [1, 0.92, 0.99])
        return

    def test_split_score(self):
        val = Validator(self.learner, self.data, self.truth)
        score = val.split_score(test_size=0.3, seed=42)
        self.assertEqual(round(score, 2), 0.92)
        return

    def test_bootstrap_score(self):
        val = Validator(self.learner, self.data, self.truth)
        scores = val.bootstrap_score(splits=3, test_size=0.3, seed=42)
        self.assertEqual([round(x, 2) for x in scores], [0.92, 1, 0.99])
        return

    def test_identity_score(self):
        val = Validator(self.learner, self.data, self.truth)
        score = val.identity_score()
        self.assertEqual(score, 1)
        return
Esempio n. 6
0
    def test_fit_transform(self):
        # normal multi-layer transform
        gen = TransformChain(VariableSignalGenerator(fs=10000),
                             WhiteNoise(sigma=0.1, clones=2),
                             SegmentSignal(chunksize=200))
        X, Y = gen.fit_transform([{'sin': 100}, {'cos': 150}], [0, 1])
        self.assertEqual(len(X), len(Y))
        self.assertEqual(len(X), 6)
        self.assertEqual(len(X[0]), len(Y[0]))
        self.assertEqual(len(X[0]), 50)
        self.assertEqual(len(X[0][0]), 200)

        # add flattening layer
        gen.add(Reduce())
        X, Y = gen.fit_transform([{'sin': 100}, {'cos': 150}], [0, 1])
        self.assertEqual(len(X), len(Y))
        self.assertEqual(len(X), 300)
        self.assertEqual(len(X[0]), 200)
        return
Esempio n. 7
0
 def test_save(self):
     learner = Learner(
         transform=[
             VariableSignalGenerator(),
             WhiteNoise(clones=2),
             SegmentSignal(),
             Reduce()
         ], model=SVC(kernel='rbf')
     )
     learner.fit(self.data, self.truth)
     test = list(self.data)
     random.shuffle(test)
     pred = learner.predict(test)
     tmp = tmpfile('.pkl')
     learner.save(tmp)
     del learner
     learner = Learner.load(tmp)
     self.assertEqual(list(learner.predict(test)), list(pred))
     return
Esempio n. 8
0
 def test_keras_save(self):
     try:
         from keras.models import Sequential
         from keras.layers import Dense, Dropout, Flatten, Reshape
         from keras.layers import Conv1D, Conv2D, MaxPooling2D, MaxPooling1D
         from keras.wrappers.scikit_learn import KerasClassifier
     except ImportError:
         self.skipTest('Keras not installed on this system.')
     def build():
         cnn = Sequential([
             Reshape((1, 100, 1), input_shape=(100,)),
             Conv2D(64, (3, 1), padding="same", activation="relu"),
             MaxPooling2D(pool_size=(1, 2)),
             Flatten(),
             Dense(128, activation="relu"),
             Dropout(0.2),
             Dense(1, activation='sigmoid'),
         ])
         cnn.compile(
             loss='binary_crossentropy',
             optimizer='adam',
             metrics=['accuracy']
         )
         return cnn
     learner = Learner(
         transform=[
             VariableSignalGenerator(),
             WhiteNoise(clones=2),
             SegmentSignal(),
             Reduce()
         ], model=KerasClassifier(build)
     )
     learner.fit(self.data, self.truth, verbose=0)
     test = list(self.data)
     random.shuffle(test)
     pred = learner.predict(test, verbose=0)
     tmp = tmpfile('.pkl')
     learner.save(tmp)
     del learner
     learner = Learner.load(tmp)
     self.assertEqual(list(learner.predict(test, verbose=0)), list(pred))
     return
Esempio n. 9
0
 def test_transform_features(self):
     learner = Learner(
         transform=[
             VariableSignalGenerator(fs=1000),
             SegmentSignal(chunksize=200),
             Reduce(),
             FeatureTransform(
                 NormalizedPower(),
                 DominantFrequency(fs=1000)
             )
         ]
     )
     X, Y = learner.transform(self.data, self.truth)
     self.assertEqual(len(Y), 200)
     self.assertEqual(len(X), 200)
     self.assertEqual(len(X[0]), 2)
     df = pandas.DataFrame(X, columns=learner.feature_names)
     self.assertEqual(len(df), 200)
     self.assertEqual(list(df.columns), ['NormalizedPower', 'DominantFrequency'])
     return