Tensorboard可视化

# -*- coding: utf-8 -*-
"""
Created on Sun Nov 5 09:29:36 2017

@author: Admin
"""

import tensorflow as tf

def add_layer(inputs ,in_size, out_size, activation_function = None):
with tf.name_scope('layer'):
with tf.name_scope('weights'):
Weights = tf.Variable(tf.random_normal([in_size, out_size]), name='W')
with tf.name_scope('biases'):
biases = tf.Variable(tf.zeros([1, out_size])+0.1, name = 'b')
with tf.name_scope('Wx_plus_b'):
Wx_plus_b = tf.add(tf.matmul(inputs, Weights), biases)
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs

with tf.name_scope('inputs'):
xs = tf.placeholder(tf.float32, [None, 1], name='x_input')
ys = tf.placeholder(tf.float32, [None, 1], name='y_input')

l1 = add_layer(xs, 1, 10, activation_function = tf.nn.relu)
prediction = add_layer(l1, 10, 1, activation_function = None)

with tf.name_scope('loss'):
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction, name='square'), reduction_indices = [1], name='reduce_sum'),
name='reduce_mean')
with tf.name_scope('train'):
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)

sess = tf.Session()
#writer = tf.train.SummaryWriter("/logs",sess.graph)
writer = tf.summary.FileWriter("D://path/to/log", sess.graph)
sess.run(tf.global_variables_initializer())

cmd  >>  tensorboard --logdir=D://path/to/log

浏览器:http://localhost:6006/#graphs

(控制台每运行一次,train一个神经网络进行可视化)

(控制台每多run一个文件,多可视化一个文件的图像,清除  ->  关闭控制台重启)

(D://path/to/log  表示在D盘下生成path文件夹,关闭控制台后才可以删除该文件夹)

原文地址:https://www.cnblogs.com/aqianer/p/7787095.html