莫煩TensorFlow_07 tensorboard可視化
阿新 • • 發佈:2018-03-31
nbsp des edi reduce put puts lac 瀏覽器 between
import tensorflow as tf import numpy as np import matplotlib.pyplot as plt def add_layer(inputs, in_size, out_size, activation_function = None): with tf.name_scope(‘layer‘): with tf.name_scope(‘Weights‘): Weights = tf.Variable(tf.random_normal([in_size, out_size]), name=‘W‘) # hang lie with tf.name_scope(‘biases‘): biases = tf.Variable(tf.zeros([1, out_size]) + 0.1, name = ‘b‘) with tf.name_scope(‘Wx_plus_b‘): Wx_plus_b = tf.matmul(inputs, Weights) + biases if activation_function is None: outputs = Wx_plus_b else: outputs = activation_function(Wx_plus_b) return outputs #define placeholder with tf.name_scope(‘inputs‘): xs = tf.placeholder(tf.float32, [None, 1], name = ‘x_input‘) ys = tf.placeholder(tf.float32, [None, 1], name = ‘y_input‘) #add hidden layer l1 = add_layer(xs, 1, 10, activation_function = tf.nn.relu) #add output layer prediction = add_layer(l1, 10, 1, activation_function = None) #the error between prediction and real data with tf.name_scope(‘loss‘): loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction), reduction_indices=[1] )) with tf.name_scope(‘train‘): train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss) sess = tf.Session() writer = tf.summary.FileWriter("logs/", sess.graph) #import step sess.run(tf.global_variables_initializer() )
註意:有些瀏覽器可能支持的不好,推薦使用最新的Chrome
命令行輸入:
tensorboard --logdir=logs/
莫煩TensorFlow_07 tensorboard可視化