def test_pickler_keras():
    '''Tests keras dump / load functionality'''
    iris = load_iris()
    X = iris.data
    y_onehot = pd.get_dummies(iris.target).values

    model = Sequential()
    model.add(Dense(3, input_dim=4, activation='relu'))
    model.add(Dense(3, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    model.fit(X, y_onehot, verbose=0)

    with TemporaryDirectory() as root:

        with AcumosContextManager(root) as context:
            model_path = context.build_path('model.pkl')
            with open(model_path, 'wb') as f:
                dump_model(model, f)

            assert {'keras', 'dill', 'acumos', 'h5py',
                    'tensorflow'} == context.package_names

        with AcumosContextManager(root) as context:
            with open(model_path, 'rb') as f:
                loaded_model = load_model(f)

    assert (model.predict_classes(
        X, verbose=0) == loaded_model.predict_classes(X, verbose=0)).all()
Example #2
0
def test_function_import():
    '''Tests that a module used by a function is captured correctly'''
    import numpy as np

    def foo():
        return np.arange(5)

    with AcumosContextManager() as context:
        model_path = context.build_path('model.pkl')
        with open(model_path, 'wb') as f:
            dump_model(foo, f)

        assert {'dill', 'acumos', 'numpy'} == context.package_names

        with open(model_path, 'rb') as f:
            loaded_model = load_model(f)

    assert (loaded_model() == np.arange(5)).all()
Example #3
0
def test_nested_model():
    '''Tests nested models'''
    iris = load_iris()
    X = iris.data
    y = iris.target
    y_onehot = pd.get_dummies(iris.target).values

    m1 = Sequential()
    m1.add(Dense(3, input_dim=4, activation='relu'))
    m1.add(Dense(3, activation='softmax'))
    m1.compile(loss='categorical_crossentropy',
               optimizer='adam',
               metrics=['accuracy'])
    m1.fit(X, y_onehot, verbose=0)

    m2 = SVC()
    m2.fit(X, y)

    # lambda watch out
    crazy_good_model = lambda x: m1.predict_classes(x) + m2.predict(x)  # noqa
    out1 = crazy_good_model(X)

    with TemporaryDirectory() as root:

        with AcumosContextManager(root) as context:
            model_path = context.build_path('model.pkl')
            with open(model_path, 'wb') as f:
                dump_model(crazy_good_model, f)

            assert {
                'sklearn', 'keras', 'dill', 'acumos', 'numpy', 'h5py',
                'tensorflow'
            } == context.package_names

        with AcumosContextManager(root) as context:
            with open(model_path, 'rb') as f:
                loaded_model = load_model(f)

    out2 = loaded_model(X)
    assert (out1 == out2).all()
Example #4
0
def test_pickler_sklearn():
    '''Tests sklearn dump / load functionality'''
    iris = load_iris()
    X = iris.data
    y = iris.target

    model = SVC()
    model.fit(X, y)

    with TemporaryDirectory() as root:

        with AcumosContextManager(root) as context:
            model_path = context.build_path('model.pkl')
            with open(model_path, 'wb') as f:
                dump_model(model, f)

            assert {'sklearn', 'dill', 'acumos',
                    'numpy'} == context.package_names

        with AcumosContextManager(root) as context:
            with open(model_path, 'rb') as f:
                loaded_model = load_model(f)

    assert (model.predict(X) == loaded_model.predict(X)).all()
Example #5
0
def test_pickler_tensorflow():
    '''Tests tensorflow session and graph serialization'''
    tf.set_random_seed(0)

    iris = load_iris()
    data = iris.data
    target = iris.target
    target_onehot = pd.get_dummies(target).values.astype(float)

    with tf.Graph().as_default():

        # test pickling a session with trained weights

        session = tf.Session()
        x, y, prediction = _build_tf_model(session, data, target_onehot)
        yhat = session.run([prediction], {x: data})[0]

        with TemporaryDirectory() as model_root:
            with AcumosContextManager(model_root) as context:
                model_path = context.build_path('model.pkl')
                with open(model_path, 'wb') as f:
                    dump_model(session, f)

                assert {'acumos', 'dill',
                        'tensorflow'} == context.package_names

            with AcumosContextManager(model_root) as context:
                with open(model_path, 'rb') as f:
                    loaded_session = load_model(f)

            loaded_graph = loaded_session.graph
            loaded_prediction = loaded_graph.get_tensor_by_name(
                prediction.name)
            loaded_x = loaded_graph.get_tensor_by_name(x.name)
            loaded_yhat = loaded_session.run([loaded_prediction],
                                             {loaded_x: data})[0]

            assert loaded_session is not session
            assert loaded_graph is not session.graph
            assert (yhat == loaded_yhat).all()

        # tests pickling a session with a frozen graph

        with TemporaryDirectory() as frozen_root:
            save_path = path_join(frozen_root, 'model')

            with loaded_session.graph.as_default():
                saver = tf.train.Saver()
                saver.save(loaded_session, save_path)

            frozen_path = _freeze_graph(frozen_root, ['prediction'])
            frozen_graph = _unfreeze_graph(frozen_path)
            frozen_session = tf.Session(graph=frozen_graph)

        with TemporaryDirectory() as model_root:
            with AcumosContextManager(model_root) as context:
                model_path = context.build_path('model.pkl')
                with open(model_path, 'wb') as f:
                    dump_model(frozen_session, f)

            with AcumosContextManager(model_root) as context:
                with open(model_path, 'rb') as f:
                    loaded_frozen_session = load_model(f)

            loaded_frozen_graph = loaded_frozen_session.graph
            loaded_frozen_prediction = loaded_frozen_graph.get_tensor_by_name(
                prediction.name)
            loaded_frozen_x = loaded_frozen_graph.get_tensor_by_name(x.name)
            loaded_frozen_yhat = loaded_frozen_session.run(
                [loaded_frozen_prediction], {loaded_frozen_x: data})[0]

            assert loaded_frozen_session is not frozen_session
            assert loaded_frozen_graph is not frozen_session.graph
            assert (yhat == loaded_frozen_yhat).all()