with tf.GradientTape() as tape

Tensorflow 会把 ‘tf.GradientTape’ 上下文中执行的所有操作都记录在一个磁带上 (“tape”)。 然后基于这个磁带和每次操作产生的导数 

import tensorflow as tf

#对于不可训练的变量,比如tf.constant()  必须指定tape.watch对其监控
x = tf.constant(3.0)
with tf.GradientTape() as g:
  g.watch(x)
  with tf.GradientTape() as gg:
    gg.watch(x)
    y = x * x
  dy_dx = gg.gradient(y, x)     # Will compute to 6.0
d2y_dx2 = g.gradient(dy_dx, x)  # Will compute to 2.0
print(dy_dx)
print(d2y_dx2)
import tensorflow as tf

x = tf.Variable(4.0)
y = tf.Variable(2.0)
with tf.GradientTape(persistent=True) as tape:
    z = x + y
    w = tf.pow(x, 2)
    # tape.watch(x)
    # tape.watch(y)
dz_dy = tape.gradient(z, x)
print(dz_dy)
dz_dx = tape.gradient(z, x)
print(dz_dx)
dw_dx = tape.gradient(w, x)
print(dw_dx)
# Release the resources
del tape

猜你喜欢

转载自blog.csdn.net/qq_40107571/article/details/131347715