Exemple #1
0
X_train, X_test, y_train, y_test = train_test_split(input,
                                                    y,
                                                    test_size=0.1,
                                                    random_state=42)

model = NASNetLarge(weights=None, classes=7)

model.summary()
model.compile(loss='categorical_crossentropy',
              optimizer='sgd',
              metrics=['accuracy'])
time_callback = TimeHistory()
model.fit(X_train,
          y_train,
          epochs=5,
          batch_size=16,
          validation_data=(X_test, y_test),
          callbacks=[time_callback])
name = 'results/UHCS_NASNetLarge_Weights'
score = model.evaluate(X_test, y_test, batch_size=16)
print('Test score:', score[0])
print('Test accuracy:', score[1])
model.save_weights(name + '.h5')

times = time_callback.times
file = open('NASNetLarge.txt', 'w')
file.write('Test score:' + str(score[0]) + '\n')
file.write('Test accuracy:' + str(score[1]) + '\n')
file.write(str(times))
file.close()
Exemple #2
0
    # Load our model
    # model = densenet169_model(img_rows=img_rows, img_cols=img_cols, color_type=channel, num_classes=num_classes)

    # load keras model
    model = NASNetLarge(weights=None, classes=10)
    sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(optimizer=sgd,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    model.summary()

    # Start Fine-tuning
    model.fit(
        X_train,
        Y_train,
        batch_size=batch_size,
        epochs=nb_epoch,
        shuffle=True,
        verbose=1,
        validation_data=(X_valid, Y_valid),
    )

    # Make predictions
    predictions_valid = model.predict(X_valid,
                                      batch_size=batch_size,
                                      verbose=1)

    # Cross-entropy loss score
    score = log_loss(Y_valid, predictions_valid)
Exemple #3
0
# In[ ]:


opt = RMSprop(lr=0.0001)
model.compile(loss='mean_squared_error', optimizer=opt, metrics=['mae'])


# **Puting the model for fit**
# 
# **NOTE: The number of epochs is set to 100**

# In[11]:


network_history = model.fit(x_train, y_train, batch_size=8, epochs=100, verbose=1, validation_data= (x_val, y_val))


# ### Save the Model Trained
# 
# 

# In[ ]:


#model.save('/content/drive/My Drive/ColabNotebooks/AllmodeloRMSpropXception.h5')

model.save('/content/drive/My Drive/ColabNotebooks/NasNet/modelNasNet.h5')


# ### Load the Model Trained