Exemple #1
0
    def test_merge_method_seq_concat(self):
        bx1 = BLayer.Input(shape=(10, ))
        bx1_1 = BLayer.Input(shape=(10, ))
        bx2 = BLayer.Input(shape=(10, ))
        by1 = BLayer.Dense(12, activation="sigmoid")(bx1)
        bbranch1_node = BModel(bx1, by1)(bx1_1)
        bbranch2 = BSequential()
        bbranch2.add(BLayer.Dense(12, input_dim=10))
        bbranch2_node = bbranch2(bx2)
        bz = BLayer.merge([bbranch1_node, bbranch2_node], mode="concat")
        bmodel = BModel([bx1_1, bx2], bz)

        kx1 = KLayer.Input(shape=(10, ))
        kx2 = KLayer.Input(shape=(10, ))
        ky1 = KLayer.Dense(12, activation="sigmoid")(kx1)
        kbranch1_node = KModel(kx1, ky1)(kx1)
        kbranch2 = KSequential()
        kbranch2.add(KLayer.Dense(12, input_dim=10))
        kbranch2_node = kbranch2(kx2)
        kz = KLayer.merge([kbranch1_node, kbranch2_node], mode="concat")
        kmodel = KModel([kx1, kx2], kz)

        input_data = [np.random.random([2, 10]), np.random.random([2, 10])]
        self.compare_newapi(kmodel, bmodel, input_data,
                            self.convert_two_dense_model)
Exemple #2
0
 def compare_newapi(self,
                    klayer,
                    blayer,
                    input_data,
                    weight_converter=None,
                    is_training=False,
                    rtol=1e-6,
                    atol=1e-6):
     from keras.models import Sequential as KSequential
     from bigdl.nn.keras.layer import Sequential as BSequential
     bmodel = BSequential()
     bmodel.add(blayer)
     kmodel = KSequential()
     kmodel.add(klayer)
     koutput = kmodel.predict(input_data)
     if isinstance(blayer, BLayer.BatchNormalization):
         k_running_mean = K.eval(klayer.running_mean)
         k_running_std = K.eval(klayer.running_std)
         blayer.set_running_mean(k_running_mean)
         blayer.set_running_std(k_running_std)
     if kmodel.get_weights():
         bmodel.set_weights(weight_converter(klayer, kmodel.get_weights()))
     bmodel.training(is_training)
     boutput = bmodel.forward(input_data)
     self.assert_allclose(boutput, koutput, rtol=rtol, atol=atol)
Exemple #3
0
 def compare_layer(self,
                   klayer,
                   zlayer,
                   input_data,
                   weight_converter=None,
                   is_training=False,
                   rtol=1e-6,
                   atol=1e-6):
     """
     Compare forward results for Keras layer against Zoo Keras API layer.
     """
     from keras.models import Sequential as KSequential
     from zoo.pipeline.api.keras.models import Sequential as ZSequential
     zmodel = ZSequential()
     zmodel.add(zlayer)
     kmodel = KSequential()
     kmodel.add(klayer)
     koutput = kmodel.predict(input_data)
     from zoo.pipeline.api.keras.layers import BatchNormalization
     if isinstance(zlayer, BatchNormalization):
         k_running_mean = K.eval(klayer.running_mean)
         k_running_std = K.eval(klayer.running_std)
         zlayer.set_running_mean(k_running_mean)
         zlayer.set_running_std(k_running_std)
     if kmodel.get_weights():
         zmodel.set_weights(weight_converter(klayer, kmodel.get_weights()))
     zmodel.training(is_training)
     zoutput = zmodel.forward(input_data)
     self.assert_allclose(zoutput, koutput, rtol=rtol, atol=atol)
    def test_load_keras(self):
        model = KSequential()
        model.add(KLayer.Dense(32, activation='relu', input_dim=100))

        tmp_path_json = create_tmp_path() + ".json"
        model_json = model.to_json()
        with open(tmp_path_json, "w") as json_file:
            json_file.write(model_json)
        reloaded_json_model = Net.load_keras(json_path=tmp_path_json)

        tmp_path_hdf5 = create_tmp_path() + ".h5"
        model.save(tmp_path_hdf5)
        reloaded_hdf5_model = Net.load_keras(hdf5_path=tmp_path_hdf5)
Exemple #5
0
def KRlt_MLPR_training(X, y, input_dim, metric, solver, hidden_layer,
                       activation, Epochs):

    DNN = KSequential()

    DNN.add(KDense(units=input_dim, input_shape=(input_dim, )))

    for layer in hidden_layer:
        DNN.add(KDense(units=layer, activation=activation))

    DNN.add(KDense(units=1, activation='linear'))

    DNN.compile(loss=metric, optimizer=solver, metrics=[metric])
    history = DNN.fit(x=X, y=y, epochs=Epochs)

    return DNN, history
Exemple #6
0
def KRlt_MLPR_trainingREV(X, y, input_dim, output_dim, metric, solver,
                          hidden_layer, activation, Epochs):

    DNN = KSequential()
    DNN.add(
        KDense(units=hidden_layer,
               input_shape=(input_dim, ),
               activation=activation))
    DNN.add(KDense(units=hidden_layer, activation=activation))
    DNN.add(KDense(units=hidden_layer, activation=activation))
    DNN.add(KDense(units=output_dim, activation="linear"))

    DNN.compile(loss=metric, optimizer=solver, metrics=[metric])
    history = DNN.fit(x=X, y=y, epochs=Epochs)

    return DNN, history
Exemple #7
0
    def test_merge_method_seq_concat(self):
        zx1 = ZLayer.Input(shape=(10, ))
        zx2 = ZLayer.Input(shape=(10, ))
        zy1 = ZLayer.Dense(12, activation="sigmoid")(zx1)
        zbranch1_node = ZModel(zx1, zy1)(zx1)
        zbranch2 = ZSequential()
        zbranch2.add(ZLayer.Dense(12, input_dim=10))
        zbranch2_node = zbranch2(zx2)
        zz = ZLayer.merge([zbranch1_node, zbranch2_node], mode="concat")
        zmodel = ZModel([zx1, zx2], zz)

        kx1 = KLayer.Input(shape=(10, ))
        kx2 = KLayer.Input(shape=(10, ))
        ky1 = KLayer.Dense(12, activation="sigmoid")(kx1)
        kbranch1_node = KModel(kx1, ky1)(kx1)
        kbranch2 = KSequential()
        kbranch2.add(KLayer.Dense(12, input_dim=10))
        kbranch2_node = kbranch2(kx2)
        kz = KLayer.merge([kbranch1_node, kbranch2_node], mode="concat")
        kmodel = KModel([kx1, kx2], kz)

        input_data = [np.random.random([2, 10]), np.random.random([2, 10])]
        self.compare_layer(kmodel, zmodel, input_data, self.convert_two_dense)