Ejemplo n.º 1
0
def train_and_predict(train_df, test_df):

    # Data Cleaning
    # clean the data
    cleaner = DataCleaner()
    cleaner.columns_with_no_nan(train_df)
    cleaner.columns_with_no_nan(test_df)
    train_df = cleaner.drop_columns(train_df)
    train_df = cleaner.resolve_nan(train_df)
    test_df = cleaner.drop_columns(test_df)
    test_df = cleaner.resolve_nan(test_df)


    # features engineering
    train_df, test_df = engineer_features(train_df, test_df)

    # train the model from Model
    model = Classifier()
    model = model.model()

    # LabelEncoding/OneHotEncoding?
    train_df = model.encode(train_df)
    test_df = model.encode(test_df)

    # training progress and results
    model = model.train(model, train_df)

    # predict on test_df with predict method from Model
    y_test = model.predict(model, test_df)
    return y_test
Ejemplo n.º 2
0
def test_coupled_weights_of_backbone():
    """
    This function will fail because there are multiple models defined 
    in the keras/tensorflow graph which are not used during training. 

    Returns:
        bool -- [description]
    """
    data = mnist_data()
    backbone = ResNet()

    preds = backbone(data.get_test()[0])
    gen = ResGen(backbone)
    input_shape = gen.get_input_shape()
    rand_data_shape = ((50, ) + input_shape[1:] + (1, ))
    random_noise_data = np.random.normal(size=rand_data_shape)

    discriminator = Discriminator(backbone)
    classifier = Classifier(backbone, 10)

    discriminator_predicitons_1 = discriminator(data.get_test()[0])
    classifier_predicitons_1 = classifier.predict(data.get_test()[0])
    generator_predictions_1 = gen.predict(random_noise_data)[0]

    classifier.compile(optimizer='adam',
                       loss='sparse_categorical_crossentropy',
                       metrics=['accuracy'])
    classifier.summary()
    # classifier.fit(x=x_train,y=y_train,batch_size=6000,epochs=1, validation_data=(x_vali,y_vali),callbacks=[checkpoint])
    classifier.fit(x=data.get_n_samples(35)[0],
                   y=data.get_n_samples(35)[1],
                   batch_size=6000,
                   epochs=1,
                   validation_data=data.get_vali())

    discriminator_predicitons_2 = discriminator(data.get_test()[0])
    classifier_predicitons_2 = classifier.predict(data.get_test()[0])
    generator_predictions_2 = gen.predict(random_noise_data)[0]

    discriminator_diff = discriminator_predicitons_1 - discriminator_predicitons_2
    classifier_diff = classifier_predicitons_1 - classifier_predicitons_2
    generator_diff = generator_predicitons_1 - generator_predicitons_2

    return True
Ejemplo n.º 3
0
def train_classifier_depricated(tpu=False):

    scope = strategy.scope()

    print("Number of accelerators: ", strategy.num_replicas_in_sync)

    data = mnist_data()

    backbone = ResNet()
    discriminator = Discriminator(backbone)
    classifier = Classifier(backbone, 10)
    preds = classifier.predict(data.get_test()[0])

    classifier.compile(optimizer='adam',
                       loss='sparse_categorical_crossentropy',
                       metrics=['accuracy'])
    classifier.summary()

    if (tpu):
        classifier = convert_model_for_tpu(classifier)

    checkpoint = keras.callbacks.ModelCheckpoint(
        './checkpoints/classifier/classifier_{epoch:.2f}.h5',
        monitor='val_loss',
        verbose=0,
        save_best_only=False,
        save_weights_only=True)
    # classifier.fit(x=x_train,y=y_train,batch_size=6000,epochs=1, validation_data=(x_vali,y_vali),callbacks=[checkpoint])
    classifier.fit(x=data.get_n_samples(35)[0],
                   y=data.get_n_samples(35)[1],
                   batch_size=6000,
                   epochs=20,
                   validation_data=data.get_vali(),
                   callbacks=[checkpoint])
    # import pdb; pdb.set_trace()  # breakpoint 396fe169 //
    backbone = classifier.get_backbone()
    backbone.save_weights('backbone_weights.h5')
    return (classifier, x_test)