tensorflow-chp05
Posted rongyongfeikai2
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了tensorflow-chp05相关的知识,希望对你有一定的参考价值。
#coding:utf-8
import tensorflow as tf
lr = 0.01
def preprocess(x, y):
x = tf.cast(x, dtype=tf.float32)/255.
x = tf.reshape(x, [-1,28*28])
y = tf.cast(y, dtype=tf.int32)
y = tf.one_hot(y, depth=10)
return x,y
if __name__ == '__main__':
(x_train,y_train),(x_test,y_test) = tf.keras.datasets.mnist.load_data()
train_db = tf.data.Dataset.from_tensor_slices((x_train,y_train))
train_db = train_db.shuffle(10000)
train_db = train_db.batch(128)
train_db = train_db.map(preprocess)
w1 = tf.Variable(tf.random.truncated_normal([784,256],stddev=0.1))
b1 = tf.Variable(tf.zeros([256]))
w2 = tf.Variable(tf.random.truncated_normal([256,128],stddev=0.1))
b2 = tf.Variable(tf.zeros([128]))
w3 = tf.Variable(tf.random.truncated_normal([128,10],stddev=0.1))
b3 = tf.Variable(tf.zeros([10]))
for epoch in range(20):
for step,(x,y) in enumerate(train_db):
with tf.GradientTape() as tape:
h1 = x@w1 + b1
h1 = tf.nn.relu(h1)
h2 = h1@w2 + b2
h2 = tf.nn.relu(h2)
out = h2@w3 + b3
loss = tf.keras.losses.mse(y,out)
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss,[w1,b1,w2,b2,w3,b3])
w1.assign_sub(lr*grads[0])
b1.assign_sub(lr*grads[1])
w2.assign_sub(lr*grads[2])
b2.assign_sub(lr*grads[3])
w3.assign_sub(lr*grads[4])
b3.assign_sub(lr*grads[5])
test_db = tf.data.Dataset.from_tensor_slices((x_test,y_test))
test_db = test_db.map(preprocess)
total_correct = 0
total = x_test.shape[0]
for x,y in test_db:
h1 = x@w1 + b1
h1 = tf.nn.relu(h1)
h2 = h1@w2 + b2
h2 = tf.nn.relu(h2)
out = h2@w3 + b3
pred = tf.argmax(out, axis=1)
y = tf.argmax(y, axis=0)
correct = tf.equal(pred, y)
total_correct += tf.reduce_sum(tf.cast(correct,dtype=tf.int32)).numpy()
print("acc:"+str(total_correct/total))
以上是关于tensorflow-chp05的主要内容,如果未能解决你的问题,请参考以下文章
tensorflow -----AttributeError: module ‘tensorflo
Windows下Pycharm安装Tensorflow:ERROR: Could not find a version that satisfies the requirement tensorflo
如何在 jupyter notebook 中将 spark 数据帧写入 avro 文件格式?
为啥在 sotfmax_cross_entropy_with_logits 中将 logit 解释为“未缩放的对数概率”?