tensorflow 笔记2 正反向传播神经网络小记

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import tensorflow as tf
import numpy as np

BATCH_SIZE = 8
seed = 23455

rng = np.random.RandomState(seed)
X = rng.rand(32, 2)
Y = [[int(x0 + x1)] for (x0, x1) in X]

print("X: \n", X)
print("Y: \n", Y)

x = tf.placeholder(tf.float32, shape=[None, 2])
y_ = tf.placeholder(tf.float32, shape=[None, 1])
w1 = tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1))
w2 = tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))

a = tf.matmul(x, w1)
y = tf.matmul(a, w2)

# loss func
loss = tf.reduce_mean(tf.square(y - y_))
train_step = tf.train.GradientDescentOptimizer(0.001).minimize(loss)

with tf.Session() as sess:
init_top = tf.global_variables_initializer()
sess.run(init_top)
print("w1:\n", sess.run(w1))
print("w2:\n", sess.run(w2))
print("\n")
SETPS = 3000
for i in range(SETPS):
start = (i * BATCH_SIZE) % 32
end = start + BATCH_SIZE
sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})
if i % 500 == 0:
total_loss = sess.run(loss, feed_dict={x: X, y_: Y})
print("After %d training step(s) ,loss on all data is %g " % (i, total_loss))
print("\n")
print("w1:\n", sess.run(w1))
print("w2:\n", sess.run(w2))