def fisher_vec_fw(ys, xs, vs):
"""Implements Fisher vector product using backward and forward AD.
Args:
ys: Loss function or output variables.
xs: Weights, list of tensors.
vs: List of tensors to multiply, for each weight tensor.
Returns:
J'Jv: Fisher vector product.
"""
# Validate the input
if type(xs) == list:
if len(vs) != len(xs):
raise ValueError("xs and vs must have the same length.")
jv = forward_gradients(ys, xs, vs, gate_gradients=True)
jjv = tf.gradients(ys, xs, jv, gate_gradients=True)
return jjv
评论列表
文章目录