python tf_eager.py
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了python tf_eager.py相关的知识,希望对你有一定的参考价值。
X = tfe.Variable(X_train)
Y = tfe.Variable(y_train)
X_t = tfe.Variable(X_test)
Y_t = tfe.Variable(y_test)
def compute_accuracy(preds, label):
p = tf.argmax(preds, axis=1)
y = tf.cast(label, tf.int64)
size = int(p.shape[0])
return (tf.reduce_sum(tf.cast(tf.equal(p, y), tf.int64)) / size).numpy()
class Model(tf.keras.Model):
def __init__(self):
super(Model, self).__init__()
self.dense1 = tf.layers.Dense(30, activation=tf.nn.relu)
self.dense2 = tf.layers.Dense(20, activation=tf.nn.relu)
self.dense3 = tf.layers.Dense(len(np.unique(y_train)))
self.optimizer = tf.train.AdamOptimizer()
self.cross_entropy = tf.losses.sparse_softmax_cross_entropy
def predict(self, input_data):
out = self.dense1(input_data)
out = self.dense2(out)
logits = self.dense3(out)
return logits
def forward_pass(self, input_data):
preds = self.predict(input_data)
return preds
def backward_pass(self, loss, tape):
grad = tape.gradient(loss, self.variables)
self.optimizer.apply_gradients(zip(grad, self.variables))
def fit(self, X, Y, val_X=None, val_Y=None, epoch=1000, print_every=100):
for i in range(epoch):
with tfe.GradientTape() as tape:
preds = self.forward_pass(X)
loss = self.cross_entropy(labels=Y, logits=preds)
self.backward_pass(loss, tape)
if (i+1) % print_every == 0:
loss = tf.cast(loss, tf.float64).numpy()
acc = compute_accuracy(preds, Y)
if val_X is not None and val_Y is not None:
p = self.predict(val_X)
val_acc = compute_accuracy(p, val_Y)
print("epoch", i+1, "loss", round(loss, 5), "acc", acc, "val_acc", round(val_acc, 2))
else:
print("epoch", i+1, "loss", round(loss, 5), "acc", round(acc,2))
model = Model()
model.fit(X, Y, X_t, Y_t, 500, 50)
以上是关于python tf_eager.py的主要内容,如果未能解决你的问题,请参考以下文章
001--python全栈--基础知识--python安装
Python代写,Python作业代写,代写Python,代做Python
Python开发
Python,python,python
Python 介绍
Python学习之认识python