示例#1
0
def main():
    (X_train, y_train), (X_test, y_test) = tf.keras.datasets.mnist.load_data()
    X_train = X_train.astype(np.float32).reshape((60000, 28, 28, 1)) / 255.0
    X_test = X_test.astype(np.float32).reshape((10000, 28, 28, 1)) / 255.0

    model = create_model()
    loss = tf.keras.losses.SparseCategoricalCrossentropy()
    acc = tf.keras.metrics.SparseCategoricalAccuracy()
    optim = tf.keras.optimizers.Adam()

    # train
    model.compile(optimizer=optim, loss=loss, metrics=[acc])
    model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=1, batch_size=2048)
    val_loss, val_acc = model.evaluate(X_test, y_test, batch_size=2048)
    parser = tfcg.from_graph_def(tf.get_default_graph().as_graph_def())
    parser.dump_img("outputs/mnist_graph.png")
    parser.dump_yml("outputs/mnist_graph.yml")

    print(val_loss, val_acc)
示例#2
0
    x = tf.keras.layers.Conv2D(16, 3, input_shape=[28, 28, 3])(x)
    x = tf.keras.layers.Conv2D(32, 1)(x)
    x = tf.keras.layers.Conv2D(64, 2)(x)
    x = tf.keras.layers.Conv2D(128, 2)(x)
    x = tf.keras.layers.Flatten()(x)
    x1 = tf.keras.layers.Dense(32)(x)
    x1 = tf.keras.layers.ReLU()(x1)
    x1 = tf.keras.layers.Dense(16)(x1)

    x2 = tf.keras.layers.Dense(128)(x)
    x2 = tf.keras.layers.ReLU()(x2)
    x2 = tf.keras.layers.Dense(64)(x2)
    x2 = tf.keras.layers.ReLU()(x2)
    x2 = tf.keras.layers.Dense(32)(x2)
    x2 = tf.keras.layers.ReLU()(x2)
    x2 = tf.keras.layers.Dense(16)(x2)
    return x, x2


with tf.Graph().as_default() as graph:
    x = np.random.rand(128, 28, 28, 3)
    x_p = tf.placeholder(tf.float32, [None, 28, 28, 3])
    out1, out2 = build(x_p)
    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())
        o = sess.run((out1, out2), feed_dict={x_p: x})
        parser = tfcg.from_graph_def(sess.graph_def)
        parser.dump_img('outputs/multitask_graph.png')
        parser.dump_yml('outputs/multitask_graph.yml')
        parser.dump_gml('outputs/multitask_graph.gml')