tensorflow-chp06
Posted rongyongfeikai2
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了tensorflow-chp06相关的知识,希望对你有一定的参考价值。
#coding:utf-8
import tensorflow as tf
def pre_process(x,y):
x = tf.cast(x,tf.float32)/255.
x = tf.reshape(x,[-1,28*28])
y = tf.cast(y,tf.int32)
y = tf.one_hot(y, depth=10)
return x,y
def load_dataset():
(x_train,y_train),(x_test,y_test) = tf.keras.datasets.mnist.load_data()
train_dataset = tf.data.Dataset.from_tensor_slices((x_train,y_train))
test_dataset = tf.data.Dataset.from_tensor_slices((x_test,y_test))
train_dataset = train_dataset.shuffle(10000)
train_dataset = train_dataset.batch(128)
train_dataset = train_dataset.map(pre_process)
test_dataset = test_dataset.map(pre_process)
return (x_train,y_train,x_test,y_test,train_dataset,test_dataset)
if __name__ == '__main__':
x_train,y_train,x_test,y_test,train_dataset,test_dataset = load_dataset()
model = tf.keras.Sequential([
tf.keras.layers.Dense(256,activation=tf.nn.relu),
tf.keras.layers.Dense(128,activation=tf.nn.relu),
tf.keras.layers.Dense(10,activation=None)
])
model.build(input_shape=[None,28*28])
optimizer = tf.keras.optimizers.SGD(0.001)
for epoch in range(20):
for step,(x,y) in enumerate(train_dataset):
with tf.GradientTape() as tape:
out = model(x)
loss = tf.keras.losses.MSE(y,out)
loss = tf.reduce_mean(loss)
if step%100 == 0:
print("loss:"+str(loss.numpy()))
grads = tape.gradient(loss,model.trainable_variables)
optimizer.apply_gradients(zip(grads,model.trainable_variables))
total_correct = 0
total = y_test.shape[0]
for x,y in test_dataset:
out_test = model(x)
pred = tf.argmax(out_test,axis=1)
y_out = tf.argmax(y,axis=0)
correct = tf.equal(pred,y_out)
total_correct += tf.reduce_sum(tf.cast(correct,dtype=tf.int32)).numpy()
print("acc="+str(total_correct/total))
以上是关于tensorflow-chp06的主要内容,如果未能解决你的问题,请参考以下文章
tensorflow -----AttributeError: module ‘tensorflo
Windows下Pycharm安装Tensorflow:ERROR: Could not find a version that satisfies the requirement tensorflo
为啥在 sotfmax_cross_entropy_with_logits 中将 logit 解释为“未缩放的对数概率”?