In [224]:
import numpy as np
import pylab as pl
from sklearn.linear_model.sgd_fast import SquaredHinge
from sklearn.linear_model.sgd_fast import Hinge
from sklearn.linear_model.sgd_fast import ModifiedHuber
from sklearn.linear_model.sgd_fast import SquaredLoss

###############################################################################
# Define loss functions
xmin, xmax = -4, 4
hinge = Hinge(1)
log_loss = lambda z, p: np.log2(1.0 + np.exp(-z))

###############################################################################
# Plot loss funcitons
xx = np.linspace(xmin, xmax, 100)
pl.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss", linewidth=2)
pl.plot(xx, [hinge.loss(x, 1) for x in xx], 'g-',
label="Hinge loss", linewidth=2)
pl.plot(xx, [log_loss(x, 1) for x in xx], 'r-',
label="Logistic loss", linewidth=2)
pl.ylim((0, 8))

fig = matplotlib.pyplot.gcf()
fig.set_size_inches(8.5,6.0)
pl.legend(loc="upper right", fontsize='xx-large', fancybox=True)
pl.xlabel(r"$y_i \cdot f(x_i)$", fontsize='xx-large')
pl.ylabel("$L(y_i, f(x_i))$", fontsize='xx-large')
pl.axis('tight')
pl.savefig('loss_functions.png')
pl.title('Common loss functions', fontsize='xx-large')
pl.show()

In [242]:
import numpy as np
import pylab as pl
from sklearn.linear_model.sgd_fast import SquaredHinge
from sklearn.linear_model.sgd_fast import Hinge
from sklearn.linear_model.sgd_fast import ModifiedHuber
from sklearn.linear_model.sgd_fast import SquaredLoss

###############################################################################
# Define loss functions
xmin, xmax = -1, 5
hinge = Hinge(.8)
log_loss = lambda z, p: np.log2(1.0 + np.exp(-z))

###############################################################################
# Plot loss funcitons
xx = np.linspace(xmin, xmax, 100)
pl.plot(xx, [hinge.loss(x + 1, 1) for x in xx],
label="$f(x_i) > \\theta_0$", linewidth=2)
pl.plot(xx, [hinge.loss(x, 1) for x in xx],
label="$f(x_i) > \\theta_1$", linewidth=2)
pl.plot(xx, [hinge.loss(x-1, 1) for x in xx],
label="$f(x_i) > \\theta_2$", linewidth=2)
pl.plot(xx, [hinge.loss(-(x-2) + 1, 1) for x in xx],
label="$f(x_i) < \\theta_3$", linewidth=2)
pl.plot(xx, [hinge.loss(-(x-3) + 1, 1) for x in xx],
label="$f(x_i) < \\theta_4$",linewidth=2)
pl.plot(xx, [hinge.loss(-(x-4) + 1, 1) for x in xx],
label="$f(x_i) < \\theta_5$", linewidth=2)

fig = matplotlib.pyplot.gcf()
fig.set_size_inches(8.5,6.0)
pl.xticks(np.arange(6) - .5, ["$\\theta_%i$" % i for i in range(6)], fontsize='xx-large')
pl.legend(loc="upper center", fontsize='xx-large', fancybox=True, ncol=2)
pl.xlabel(r"$f(x_i)$", fontsize='xx-large')
pl.ylabel("$L(y_i, f(x_i))$", fontsize='xx-large')
pl.title('Loss functions in the pairwise problem', fontsize='xx-large')
pl.yticks(())
pl.savefig('loss_functions_ordinal.png')
pl.show()

In [244]:
import numpy as np
import pylab as pl
from sklearn.linear_model.sgd_fast import SquaredHinge
from sklearn.linear_model.sgd_fast import Hinge
from sklearn.linear_model.sgd_fast import ModifiedHuber
from sklearn.linear_model.sgd_fast import SquaredLoss

###############################################################################
# Define loss functions
xmin, xmax = -1, 5
K = 7
yi = 2
def hinge(x):
out = 0.
h = h = Hinge(.8)
for j in range(K):
if j <= yi:
out += h.loss(x + 1 - j, 1)
else:
out += h.loss(-(x + 1- j) + 1, 1)
return out

def log(x):
out = 0.
for j in range(K):
if j <= yi:
out += np.log2(1.0 + np.exp(-(x + 1 - j)))
else:
out += np.log2(1.0 + np.exp(-(-(x + 1- j) + 1)))
return out

###############################################################################
# Plot loss funcitons
xx = np.linspace(xmin, xmax, 100)

pl.plot(xx, [hinge(x) for x in xx], 'g-',
label="Hinge loss", linewidth=2)
pl.plot(xx, [log(x) for x in xx], 'r-',
label="Logistic loss", linewidth=2)
pl.plot([xmin, 1.5, 1.5, 1.5], [.3, .3, 0, 0], 'k-',
label="Zero-one loss", linewidth=2)
pl.plot([2.5, 2.5, 4, xmax], [0, .3, .3, .3], 'k-', linewidth=2)
pl.ylim((0, 8))

fig = matplotlib.pyplot.gcf()
fig.set_size_inches(8.5,6.0)
pl.legend(loc="upper center", fontsize='xx-large', fancybox=True)
pl.xlabel(r"$f(x_i)$", fontsize='xx-large')
pl.ylabel("$L(y_i, f(x_i))$", fontsize='xx-large')
pl.xticks(np.arange(6) - .5, ["$\\theta_%i$" % i for i in range(6)], fontsize='xx-large')
pl.axis('tight')
pl.yticks(())
pl.title('Ordinal regression loss function', fontsize='xx-large')
pl.savefig('loss_functions_ordinal2.png')
pl.show()