def testPathwiseDerivativeDoesNotAddSurrogateLosses(self):
with self.test_session():
mu = [0.0, 0.1, 0.2]
sigma = constant_op.constant([1.1, 1.2, 1.3])
with st.value_type(st.SampleValue()):
prior = st.StochasticTensor(distributions.Normal(loc=mu, scale=sigma))
likelihood = st.StochasticTensor(
distributions.Normal(
loc=prior, scale=sigma))
self.assertEqual(
prior.distribution.reparameterization_type,
distributions.FULLY_REPARAMETERIZED)
self.assertEqual(
likelihood.distribution.reparameterization_type,
distributions.FULLY_REPARAMETERIZED)
loss = math_ops.square(array_ops.identity(likelihood) - [0.0, 0.1, 0.2])
sum_loss = math_ops.reduce_sum(loss)
surrogate_loss = sg.surrogate_loss([loss])
with self.assertRaisesRegexp(ValueError, "dimensionality 1 or greater"):
_ = sg.surrogate_loss([sum_loss])
surrogate_from_both = sg.surrogate_loss(
[loss, sum_loss * array_ops.ones_like(loss)])
# Pathwise derivative terms do not require add'l surrogate loss terms.
with self.test_session() as sess:
self.assertAllClose(*sess.run([loss, surrogate_loss]))
self.assertAllClose(*sess.run([(loss + sum_loss), surrogate_from_both]))
stochastic_graph_test.py 文件源码
python
阅读 24
收藏 0
点赞 0
评论 0
评论列表
文章目录