def learn1(): num_epochs = 1 batch_size = 50 learning_rate = 0.001 model = tf.keras.models.Sequential([ tf.keras.layers.Flatten(), tf.keras.layers.Dense(100, activation=tf.nn.relu), tf.keras.layers.Dense(10), tf.keras.layers.Softmax() ]) data_loader = MNISTLoader() model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), loss=tf.keras.losses.sparse_categorical_crossentropy, metrics=[tf.keras.metrics.sparse_categorical_accuracy]) model.fit(data_loader.train_data, data_loader.train_label, epochs=num_epochs, batch_size=batch_size) tf.saved_model.save(model, "saved/1")
import json import numpy as np import requests from zh.model.utils import MNISTLoader data_loader = MNISTLoader() data = json.dumps({"instances": data_loader.test_data[0:3].tolist()}) headers = {"content-type": "application/json"} json_response = requests.post('http://localhost:8501/v1/models/MLP:predict', data=data, headers=headers) predictions = np.array(json.loads(json_response.text)['predictions']) print(np.argmax(predictions, axis=-1)) print(data_loader.test_label[0:10])
x = tf.keras.layers.MaxPool2D(pool_size=[2, 2], strides=2)(x) x = tf.keras.layers.Conv2D(filters=64, kernel_size=[5, 5], padding="same", activation=tf.nn.relu)(x) x = tf.keras.layers.MaxPool2D(pool_size=[2, 2], strides=2)(x) x = tf.keras.layers.Reshape(target_shape=(7 * 7 * 64, ))(x) x = tf.keras.layers.Dense(units=1024, activation=tf.nn.relu)(x) x = tf.keras.layers.Dense(units=10)(x) outputs = tf.keras.layers.Softmax()(x) model = tf.keras.Model(inputs=inputs, outputs=outputs) if mode == 'subclassing': from zh.model.mnist.cnn import CNN model = CNN() data_loader = MNISTLoader() if training_loop == 'keras': model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), loss=tf.keras.losses.sparse_categorical_crossentropy, metrics=[tf.keras.metrics.sparse_categorical_accuracy]) model.fit(data_loader.train_data, data_loader.train_label, epochs=num_epochs, batch_size=batch_size) print(model.evaluate(data_loader.test_data, data_loader.test_label)) if training_loop == 'custom': optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate) num_batches = int(data_loader.num_train_data // batch_size * num_epochs) for batch_index in range(num_batches): X, y = data_loader.get_batch(batch_size) with tf.GradientTape() as tape: