Tensorboard可視化
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 5 09:29:36 2017
@author: Admin
"""
import tensorflow as tf
def add_layer(inputs ,in_size, out_size, activation_function = None):
with tf.name_scope(‘layer‘):
with tf.name_scope(‘weights‘):
Weights = tf.Variable(tf.random_normal([in_size, out_size]), name=‘W‘)
with tf.name_scope(‘biases‘):
biases = tf.Variable(tf.zeros([1, out_size])+0.1, name = ‘b‘)
with tf.name_scope(‘Wx_plus_b‘):
Wx_plus_b = tf.add(tf.matmul(inputs, Weights), biases)
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
with tf.name_scope(‘inputs‘):
xs = tf.placeholder(tf.float32, [None, 1], name=‘x_input‘)
ys = tf.placeholder(tf.float32, [None, 1], name=‘y_input‘)
l1 = add_layer(xs, 1, 10, activation_function = tf.nn.relu)
prediction = add_layer(l1, 10, 1, activation_function = None)
with tf.name_scope(‘loss‘):
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction, name=‘square‘), reduction_indices = [1], name=‘reduce_sum‘),
name=‘reduce_mean‘)
with tf.name_scope(‘train‘):
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
sess = tf.Session()
#writer = tf.train.SummaryWriter("/logs",sess.graph)
writer = tf.summary.FileWriter("D://path/to/log", sess.graph)
sess.run(tf.global_variables_initializer())
cmd >> tensorboard --logdir=D://path/to/log
瀏覽器:http://localhost:6006/#graphs
(控制臺每運行一次,train一個神經網絡進行可視化)
(控制臺每多run一個文件,多可視化一個文件的圖像,清除 -> 關閉控制臺重啟)
(D://path/to/log 表示在D盤下生成path文件夾,關閉控制臺後才可以刪除該文件夾)
Tensorboard可視化