In [1]:
import numpy as np
import random

# First implement a gradient checker by filling in the following functions
""" Gradient check for a function f.

Arguments:
f -- a function that takes a single argument and outputs the
x -- the point (numpy array) to check the gradient at
"""

rndstate = random.getstate()
random.setstate(rndstate)
fx, grad = f(x) # Evaluate function value at original point
h = 1e-4        # Do not change this!

# Iterate over all indexes in x
while not it.finished:
ix = it.multi_index
# Try modifying x[ix] with h defined above to compute
# numerical gradients. Make sure you call random.setstate(rndstate)
# before calling f(x) each time. This will make it possible
# to test cost functions with built in randomness later.

x[ix] += h
random.setstate(rndstate)
new_f1 = f(x)[0]
x[ix] -= 2*h
random.setstate(rndstate)
new_f2 = f(x)[0]
x[ix] += h
numgrad = (new_f1 - new_f2) / (2 * h)

if reldiff > 1e-5:
print("First gradient error found at index %s" % str(ix))
return

it.iternext() # Step to next dimension

def sanity_check():
"""
Some basic sanity checks.
"""
quad = lambda x: (np.sum(x ** 2), x * 2)

print("Running sanity checks...")
print("")

if __name__ == "__main__":
sanity_check()

Running sanity checks...