# 활성화 함수 (Activation Function)¶

## 1. 시그모이드 함수 (Sigmoid Function)¶

In [2]:
import numpy as np
import matplotlib.pylab as plt
In [3]:
def sigmoid(x):
return 1 / (1 + np.exp(-x))

X = np.arange(-5.0, 5.0, 0.1)
Y = sigmoid(X)
plt.plot(X, Y)
plt.ylim(-0.1, 1.1)
plt.show()

## 2 계단 함수 (Step Function)¶

In [4]:
def step_function(x):
y = x > 0
return y.astype(dtype=np.int)

X = np.arange(-5.0, 5.0, 0.1)
Y = step_function(X)
plt.plot(X, Y)
plt.ylim(-0.1, 1.1)
plt.show()
In [5]:
def sigmoid(x):
return 1 / (1 + np.exp(-x))

def step_function(x):
return np.array(x > 0, dtype=np.int)

x = np.arange(-5.0, 5.0, 0.1)
y1 = sigmoid(x)
y2 = step_function(x)

plt.plot(x, y1)
plt.plot(x, y2, 'k--')
plt.ylim(-0.1, 1.1)
plt.show()

## 3. ReLU 함수 (Rectified Linear Unit Function)¶

In [6]:
def relu(x):
return np.maximum(0, x)

x = np.arange(-5.0, 5.0, 0.1)
y = relu(x)
plt.plot(x, y)
plt.ylim(-1.0, 5.5)
plt.show()

## 4. 소프트맥스 함수 (Softmax Function)¶

In [21]:
def softmax_(x):
exp_x = np.exp(x)
sum_exp_x = np.sum(exp_x)
y = exp_x / sum_exp_x
return y

def softmax(x):
c = np.max(x)
exp_x = np.exp(x-c)
sum_exp_x = np.sum(exp_x)
y = exp_x / sum_exp_x
return y

x = np.array([0.3, 2.9, 4.0])
y = softmax(x)
print(x)
print(y)
print("np.sum(y):", np.sum(y))
[ 0.3  2.9  4. ]
[ 0.01821127  0.24519181  0.73659691]
np.sum(y): 1.0

## 5. 항등 함수 (Identity Function)¶

In [23]:
def identity(x):
return x

x = np.arange(-5.0, 5.0, 0.1)
y = identity(x)
plt.plot(x, y)
plt.ylim(-5.5, 5.5)
plt.show()