def base_conditional(Kmn, Kmm, Knn, f, *, full_cov=False, q_sqrt=None, white=False):
# compute kernel stuff
num_func = tf.shape(f)[1] # K
Lm = tf.cholesky(Kmm)
# Compute the projection matrix A
A = tf.matrix_triangular_solve(Lm, Kmn, lower=True)
# compute the covariance due to the conditioning
if full_cov:
fvar = Knn - tf.matmul(A, A, transpose_a=True)
shape = tf.stack([num_func, 1, 1])
else:
fvar = Knn - tf.reduce_sum(tf.square(A), 0)
shape = tf.stack([num_func, 1])
fvar = tf.tile(tf.expand_dims(fvar, 0), shape) # K x N x N or K x N
# another backsubstitution in the unwhitened case
if not white:
A = tf.matrix_triangular_solve(tf.transpose(Lm), A, lower=False)
# construct the conditional mean
fmean = tf.matmul(A, f, transpose_a=True)
if q_sqrt is not None:
if q_sqrt.get_shape().ndims == 2:
LTA = A * tf.expand_dims(tf.transpose(q_sqrt), 2) # K x M x N
elif q_sqrt.get_shape().ndims == 3:
L = tf.matrix_band_part(tf.transpose(q_sqrt, (2, 0, 1)), -1, 0) # K x M x M
A_tiled = tf.tile(tf.expand_dims(A, 0), tf.stack([num_func, 1, 1]))
LTA = tf.matmul(L, A_tiled, transpose_a=True) # K x M x N
else: # pragma: no cover
raise ValueError("Bad dimension for q_sqrt: %s" %
str(q_sqrt.get_shape().ndims))
if full_cov:
fvar = fvar + tf.matmul(LTA, LTA, transpose_a=True) # K x N x N
else:
fvar = fvar + tf.reduce_sum(tf.square(LTA), 1) # K x N
fvar = tf.transpose(fvar) # N x K or N x N x K
return fmean, fvar
评论列表
文章目录