def gradcheck_naive(f, x):
#Return an object capturing the current internal state of the generator
rndstate = random.getstate() #why use state??????
random.setstate(rndstate)
fx, grad = f(x) #fx=np.sum(x ** 2), grad=x * 2
h = 1e-4
#Efficient multi-dimensional iterator object to iterate over arrays
# Iterate over all indexes in x
it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
while not it.finished:
ix = it.multi_index #starts from (0, 0) then (0, 1)
x[ix] += h #To calculate [f(xi+h)-f(xi-h)] / 2h
random.setstate(rndstate)
fxh, _ = f(x)
x[ix] -= 2*h
random.setstate(rndstate)
fxnh, _ = f(x)
x[ix] += h
numgrad = (fxh - fxnh) / 2 / h
#To compare gradient calculated by formular and calculus
reldiff = abs(numgrad - grad[ix]) / max(1, abs(numgrad), abs(grad[ix]))
if reldiff > 1e-5:
print "Gradient check failed."
print "First gradient error found at index %s" % str(ix)
print "Your gradient: %f \t Numerical gradient: %f" % (grad[ix], numgrad)
return
it.iternext()
print "Gradient check passed"
评论列表
文章目录