def test_gradE(adjcube):
"""Tests the gradient of `E` using finite difference methods.
"""
from pydft.bases.fourier import gradE, E
from numpy.matlib import randn
cell = adjcube
V = QHO(cell)
Ns=4
#He set the random seed; we could do the same, but the
#implementation is probably different between numpy and matlab:
#randn('seed', 0.2004)
W = np.array(randn(np.prod(cell.S), Ns) + 1j*randn(np.prod(cell.S), Ns))
# Compute intial energy and gradient
E0 = E(V, W, cell)
g0 = gradE(V, W, cell)
# Choose a random direction to explore
dW = np.array(randn(W.shape) + 1j*randn(W.shape))
# Explore a range of step sizes decreasing by powers of ten
steps = np.logspace(np.log10(1e-3), np.log10(1e-7), 8)
for delta in steps:
# Directional derivative formula
dE = 2*np.real(np.trace(np.dot(g0.conjugate().T, delta*dW)))
# Print ratio of actual change to expected change, along with estimate
# of the error in this quantity due to rounding
ratio = abs(1.-(E(V, W+delta*dW, cell)-E0)/dE)
print(int(np.log10(ratio)), int(np.log10(delta)), ratio)
assert abs(int(np.log10(ratio)) - int(np.log10(delta))) <= 2
评论列表
文章目录