from tensorflow.keras import layers, Model, regularizers # Define the input layer inputs = layers.Input(shape=(784,)) # Add a hidden layer with L2 regularization hidden_layer = layers.Dense(128, activation='relu', kernel_regularizer=regularizers.l2(0.01))(inputs) # Add the output layer with softmax activation outputs = layers.Dense(10, activation='softmax')(hidden_layer) # Define the model model = Model(inputs=inputs, outputs=outputs) # Add L2 regularization loss to the model model.add_loss(0.01 * tf.reduce_sum(tf.square(hidden_layer)))
from tensorflow.keras import layers, Model, backend as K # Define the custom loss function def custom_loss(y_true, y_pred, alpha=0.5): # Calculate the binary cross-entropy loss bce_loss = K.binary_crossentropy(y_true, y_pred) # Calculate the custom part of the loss custom_loss = alpha * K.square(y_pred - y_true) # Return the sum of the two losses return bce_loss + custom_loss # Define the input layer inputs = layers.Input(shape=(784,)) # Add a hidden layer with 64 neurons and ReLU activation hidden_layer = layers.Dense(64, activation='relu')(inputs) # Add the output layer with sigmoid activation outputs = layers.Dense(1, activation='sigmoid')(hidden_layer) # Define the model model = Model(inputs=inputs, outputs=outputs) # Set the model's loss function to the custom loss function model.compile(loss=custom_loss, optimizer='adam') # Train the model using the custom loss function model.fit(X_train, y_train, epochs=10, batch_size=32)In this example, we define a custom loss function that combines binary cross-entropy loss with a custom term that penalizes incorrect predictions more heavily. We then define a model with an input layer, a hidden layer with ReLU activation, and an output layer with sigmoid activation. We set the model's loss function to the custom loss function using `model.compile`, and then train the model using `model.fit`.