Exemple #1
0
def show_graph(graph_def, max_const_size=32):
    """Visualize TensorFlow graph."""

    graph_file = open('graph.html', 'w')

    if hasattr(graph_def, 'as_graph_def'):
        graph_def = graph_def.as_graph_def()
    strip_def = strip_consts(graph_def, max_const_size=max_const_size)
    code = """
        <script>
          function load() {{
            document.getElementById("{id}").pbtxt = {data};
          }}
        </script>
        <link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html"
        onload=load()>
        <div style="height:600px">
          <tf-graph-basic id="{id}"><tf-graph-basic>
        </div>
    """.format(data=repr(str(strip_def)), id='graph' + str(np.random.rand()))

    iframe = """
        <iframe seamless style="width:1800px;height:1620px;border:0" srcdoc="{}"></iframe>
    """.format(code.replace('"', '&quot;'))
    display(HTML(iframe))

    with graph_file as g:
        g.write(iframe)

    filename = os.path.join(PROJECT_DIR, 'graph.html')
    webbrowser.open_new_tab(filename)

    # Use Graphviz to view graph
    graph = tf.get_default_graph()
    tfg.board(graph, depth=2).view()
import tensorflow as tf

# Graph
import tfgraphviz as tfg


# Declare Functions
def relu(X):
    # Rectified Linear Unit
    # h_{w,b} (X) = max ( X \cdot w + b, 0 )
    with tf.name_scope("relu"):
        w_shape = (int(X.get_shape()[1]), 1)
        w = tf.Variable(tf.random_normal(w_shape), name="weights")
        b = tf.Variable(0.0, name="bias")
        z = tf.add(tf.matmul(X, w), b, name="z")
        return tf.maximum(z, 0, name="relu")


# Create nodes
n_features = 3
X = tf.placeholder(tf.float32, shape=(None, n_features), name="X")
relus = [relu(X) for i in range(5)]  # Compute multiple ReLU
output = tf.add_n(relus, name="output")  # Sum a list of tensors

# Write out files
file_writer = tf.summary.FileWriter("logs/relu1", tf.get_default_graph())
file_writer.close()

# Generate Graph
tfg.board(tf.get_default_graph()).view()
Exemple #3
0
#!/usr/bin/env python
import tensorflow as tf
import tfgraphviz as tfg

a = tf.constant(1, name="a")
b = tf.constant(2, name="b")
c = tf.add(a, b, name="add")
g = tfg.board(tf.get_default_graph())
g.view()
X_train = X_train.astype(np.float32).reshape(-1, 28 * 28) / 255.0
X_test = X_test.astype(np.float32).reshape(-1, 28 * 28) / 255.0
y_train = y_train.astype(np.int32)
y_test = y_test.astype(np.int32)
X_valid, X_train = X_train[:5000], X_train[5000:]
y_valid, y_train = y_train[:5000], y_train[5000:]

# Load Model
saver = tf.train.import_meta_graph("./tf_mnist_model_final.ckpt.meta")

# Display opeartions in model from graph
for op in tf.get_default_graph().get_operations():
    print(op.name)

# Display graph
tfg.board(tf.get_default_graph(), depth=3).view()

# Getting tensors by name
X = tf.get_default_graph().get_tensor_by_name("X:0")
y = tf.get_default_graph().get_tensor_by_name("y:0")

accuracy = tf.get_collection().get_tensor_by_name("eval/acc:0")
training_op = tf.get_default_graph().get_operation_by_name("GradientDescent")

# To restore training model operations from imported graph
X, y, accuracy, training_op = tf.get_collection("my_important_ops")

# To restore model
with tf.Session() as sess:
    saver.restore(sess, "./tf_mnist_model_final.ckpt")
    # Continue training model
def graph2pdf(sess, directory, **kw):
    print('Saving graph PDF in', directory, end=' ... '); sys.stdout.flush()
    g = tfg.board(sess.graph, **kw)
    g.render(filename='graph', directory=directory)
    print('done.')