## Linear Regression With Theano
import theano
from theano import tensor as T
import numpy as np
import matplotlib.pylab as plt
from scipy.optimize import fmin_cg
## data
n_samples, n_feats = 50, 3
X = np.random.uniform(low = 0.0, high = 10.0, size = (n_samples, n_feats))
y = X[:, 0]* 2 + X[:, 1]*(-.15) + 11. + np.random.random(size = n_samples)
## equations
var_X = theano.shared(value = X, name = 'X', borrow = True)
var_y = theano.shared(value = y, name = 'y', borrow = True)
var_theta = theano.shared(value = np.zeros(shape = (n_feats+1, ), dtype = theano.config.floatX),
name = 'theta', borrow = True)
var_W = var_theta[:n_feats].reshape((n_feats, ))
var_b = var_theta[n_feats:].reshape((1, 1))
var_pred = T.dot(var_X, var_W) + var_b
var_negative_ll = T.mean((var_pred - var_y) ** 2)
var_grad_negative_ll = T.grad(var_negative_ll, var_theta)
## optimization with scipy optimization
def f(theta_value):
var_theta.set_value(theta_value, borrow = True)
return var_negative_ll.eval()
def fprime(theta_value):
var_theta.set_value(theta_value, borrow = True)
return var_grad_negative_ll.eval()
def callback(theta_value):
var_theta.set_value(theta_value, borrow = True)
print 'current error', var_negative_ll.eval()
theta0 = np.random.random(size = ((n_feats+1, )))
fmin_cg(f = f, x0 = theta0, fprime = fprime, callback=callback)