3.1tensorflow2.0实战前向传播

import tensorflow as tf
from tensorflow.keras import datasets

#减少不必要的输出
import  os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

#使用mnist手写数据集,没有该数据集会自动网上下载,注意科学上网提高下载速度
(x,y),_=datasets.mnist.load_data()#_用来占位,这里不适用测试集
# print(x.shape,y.shape)#查看x和y的shape
x=tf.convert_to_tensor(x,dtype=tf.float32)/255.#将值从0-255——>0-1,并变成tensor类型
y=tf.convert_to_tensor(y,dtype=tf.int32)

train_db=tf.data.Dataset.from_tensor_slices((x,y)).batch(256)#一次喂入256张图片

#stddev用来设置方差
w1=tf.Variable(tf.random.truncated_normal([784,512],stddev=0.1))
w2=tf.Variable(tf.random.truncated_normal([512,256],stddev=0.1))
w3=tf.Variable(tf.random.truncated_normal([256,64],stddev=0.1))
w4=tf.Variable(tf.random.truncated_normal([64,10],stddev=0.1))
b1=tf.Variable(tf.zeros([512]))
b2=tf.Variable(tf.zeros([256]))
b3=tf.Variable(tf.zeros([64]))
b4=tf.Variable(tf.zeros([10]))

lr=0.001
for i in range(100):
    for (x,y) in train_db:
        #把训练过程放在with tf.GradientTape() as tape中,之后可以用tape.gradient()自动求得梯度
        with tf.GradientTape() as tape:
            x=tf.reshape(x,[-1,784])
            h1=x@w1+b1
            h2=h1@w2+b2
            h3=h2@w3+b3
            out=h3@w4+b4

            y_onehot=tf.one_hot(y,depth=10)
            loss=tf.reduce_mean(tf.square(y_onehot-out))

        grad=tape.gradient(loss,[w1,w2,w3,w4,b1,b2,b3,b4])
        w1.assign_sub(lr * grad[0])
        w2.assign_sub(lr * grad[1])
        w3.assign_sub(lr * grad[2])
        w4.assign_sub(lr * grad[3])
        b1.assign_sub(lr * grad[4])
        b2.assign_sub(lr * grad[5])
        b3.assign_sub(lr * grad[6])
        b4.assign_sub(lr * grad[7])
    print("epoch:",i,"loss:",float(loss))


发布了43 篇原创文章 · 获赞 3 · 访问量 3339

猜你喜欢

转载自blog.csdn.net/a1920993165/article/details/105025880