np.allclose(best_theta, best_theta_restored)

# This means that you can import a pretrained model without having to have the corresponding Python code to build the graph. This is very handy when you keep tweaking and saving your model: you can load a previously saved model without having to search for the version of the code that built it.

# # Visualizing the graph
# ## inside Jupyter

# To visualize the graph within Jupyter, we will use a TensorBoard server available online at https://tensorboard.appspot.com/ (so this will not work if you do not have Internet access).  As far as I can tell, this code was originally written by Alex Mordvintsev in his [DeepDream tutorial](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/tutorials/deepdream/deepdream.ipynb). Alternatively, you could use a tool like [tfgraphviz](https://github.com/akimach/tfgraphviz).

# In[57]:

from tensorflow_graph_in_jupyter import show_graph

# In[58]:

show_graph(tf.get_default_graph())

# ## Using TensorBoard

# In[59]:

reset_graph()

from datetime import datetime

now = datetime.utcnow().strftime("%Y%m%d%H%M%S")
root_logdir = "tf_logs"
logdir = "{}/run-{}/".format(root_logdir, now)

# In[60]:
示例#2
0
    y = tf.Variable(4, name="y")
    f = x * x * y + y + 2


# I got a deprecation warning when I ran the above code on my own laptop, but I don't know if you'll get it on the lab machines or not. If you do, ignore it. As best I can tell, there is no problem with the code above, it's just that the library functions that we're calling directly are using an out-of-date function in a lower-level library.

# The `tensorflow` library has a visualization tool called TensorBoard. We can use this to look at the graph that we made. To make full use of TensorBoard we would need to modify the code to print information about the graphs (and other things like training statistics) to a log file and then start a TensorBoard server which we'd then point our browser to.
# But we can shortcut all that and use the basic TensorBoard functionality using the following code. 
# 
# (Be aware, though, that this is not part of the `tensorflow` library itself but something found in the file `tensorflow_graph_in_jupyter.py` that you grabbed along with the jupyter notebook. It's part of the code that accompanies Geron's book.)

# In[9]:


from tensorflow_graph_in_jupyter import show_graph
show_graph(g)


# We have not executed this computation yet, just set it up as a graph. To run it, we start a `tensorflow` session:

# In[6]:


sess = tf.Session(graph=g)
sess.run(x.initializer)
sess.run(y.initializer)
result = sess.run(f)
print(result)


# Of course this seems like overkill just to compute $3^2 4 + 4 + 2$, but perhaps you can begin to see how this set up will be useful in much more complicated computations... like artificial neural nets.
示例#3
0
# Mini-batch Gradient Descent
X = tf.placeholder(tf.float32, shape=(None, n+1), name="X")
y = tf.placeholder(tf.float32, shape=(None, 1), name="y")

batch_size = 100
n_batches = int(np.ceil(m/batch_size))

def fetch_batch(epoch, batch_index, batch_size):
    return X_batch, y_batch

with tf.Session() as sess:
    sess.run(init)
    
    for epoch in range(n_epochs):
        for batch_index in range(n_batches):
            X_batch, y_batch = fetch_batch(epoch, batch_index, batch_size)
            sess.run(training_op, feed_dict={X: X_batch, y:y_batch})
    
    best_theta = theta.eval()        


#--------------------------------
# Visualize graph within Jupyter 
#--------------------------------
from tensorflow_graph_in_jupyter import show_graph
show_graph(tf.get_default_graph())   


#-------------------------
# Using TensorBoard
#------------------------- 
示例#4
0
print(housing.target)
print(housing.target.reshape(-1, 1))

# 构建计算图
X = tf.constant(housing_data_plus_bias, dtype=tf.float32, name='X')
y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name='y')

# 计算矩阵的转置
XT = tf.transpose(X)
theta = tf.matmul(tf.matmul(tf.matrix_inverse(tf.matmul(XT, X)), XT), y)

with tf.Session() as session:
    theta_value = theta.eval()
    print(theta_value)

print(housing_data_plus_bias[0])

values = np.array([
    1., 8.3252, 41., 6.98412698, 1.02380952, 322., 2.55555556, 37.88, -122.23
]).reshape(1, 9)
Values = tf.constant(values, dtype=tf.float32, name='values')
with tf.Session() as session:
    predict = tf.matmul(Values, theta_value).eval()
    print(predict[0][0])  # 4.131298

print(housing.target[0])  # 4.526

from tensorflow_graph_in_jupyter import show_graph
print(show_graph(tf.get_default_graph()))