def mlp(epochs: int): set_seed(42) (x_train, y_train), (x_test, y_test) = boston_housing.load_data() model = Sequential([ InputLayer(input_shape=(x_train.shape[1], )), Dense(10, input_shape=(x_train.shape[1], ), activation="relu"), Dense(10, activation="relu"), Dense(10, activation="relu"), Dense(10, activation="relu"), Dropout(0.2), Dense(1, activation="relu") ]) model.compile(optimizer="nadam", loss="mse") history = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=epochs, batch_size=100, shuffle=True).history model.save("model.h5") pd.DataFrame(history).to_csv("history.csv") #plot_history(history) plt.savefig("history.png")
def model(input_size: int): """Return a multi-layer perceptron.""" set_seed(42) model = Sequential([ InputLayer(input_shape=(input_size, )), *[Dense(80, activation="relu") for _ in range(2)], Dropout(0.2), Dense(1, activation="sigmoid") ]) model.compile(optimizer="nadam", loss='binary_crossentropy', metrics=["auprc", "auroc", "accuracy"]) return model
def get_batch_sizes(resolution:int, minimum:int, size:int, seed:int, base:float=1.1, delta:int=10): set_seed(seed) batch_sizes = base**np.arange(delta, delta+resolution) np.random.shuffle(batch_sizes) return minimum+np.ceil(batch_sizes/np.max(batch_sizes)*(size-minimum)).astype(int)
def test_set_seed(): set_seed(42, True)