深度学习入门
基础知识代码:
import numpyas np
import matplotlib.pyplotas plt
感知机,与门(0.5,0.5,0.7)
def AND(x1, x2):
w1, w2, delta =0.5, 0.5, 0.7
temp = x1 * w1 + x2 * w2
print(temp)
if temp > delta:
return 1
elif 0.5 * w1 +0.5*w2 < delta:
return 0
print(AND(1, 0), AND(0, 0), AND(1, 1), AND(0, 0))
numpy 数组实现与门
def AND(x1, x2):
delta = -0.7
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
temp = np.sum(x*w) + delta
if temp >0:
return 1
elif temp <0:
return 0
def OR(x1, x2):
delta = -0.2
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
temp = np.sum(x*w) + delta
if temp >0:
return 1
elif temp <0:
return 0
print(AND(1, 0), AND(0, 0), AND(1, 1), AND(0, 0))
#与非门
def NAND(x1, x2):
delta =0.2
x = np.array([x1, x2])
w = np.array([-0.5, -0.5])
temp = np.sum(x*w) + delta
if temp >0:
return 1
elif temp <0:
return 0
#异或门,通过与非门、与门和或门组合完成非线性的表示
def XOR(y1, y2):
a = OR(y1, y2)
b = NAND(y1, y2)
return AND(a, b)
print(XOR(1, 1))
阶越函数
def step_function(x):
return np.array(x >0, dtype=np.int)
x = np.arange(-9, 9, 0.1)
y = step_function(x)
plt.plot(x, y)
plt.ylim(-0.1, 1.1)
plt.show()
#sigmoid函数,支持输入参数为numpy数组
def sigmoid(x):
return 1 / (1 + np.exp(-x))
#print(sigmoid(np.array([-1, 2, 1, 2, 3, 4])))
x = np.arange(-9, 9, 0.1)
y = sigmoid(x)
plt.plot(x, y)
plt.ylim(-0.1, 1.1)
plt.show()
ReLU函数,最近使用较多
def relu(x):
return np.maximum(0, x)
numpy的多维数组使用
A = np.array([[1, 2], [3, 4]])
print(np.ndim(A), A.shape)#ndim数组的维数,A.shape的结果是一个数组(tuple)
B = np.array([[5, 6], [7, 8]])
C = np.array([5, 6])
print(np.ndim(C), C.shape)
D = np.dot(A, C)
print(D)
神经网络的内积,np一次性计算,不需要使用for循环
X = np.array([1, 2])
print(X.shape)
W = np.array([[1, 3, 5], [2, 4, 6]])
print(W)
Y = np.dot(X, W)
print(Y)
信号传递
X = np.array([1, 0.5])
W1 = np.array([[0.1, 0.3, 0.5], [0.2, 0.4, 0.6]])
B1 = np.array([0.1, 0.2, 0.3])
A = np.dot(X, W1) + B1
print(A)
print(sigmoid(A))
代码实现小结
def init_network():
network = {}
network['W1'] = np.array([[0.1, 0.2, 0.3], [0.2, 0.4, 0.6]])
network['b1'] = np.array([0.1, 0.2, 0.3])
network['W2'] = np.array([[0.1, 0.4], [0.2, 0.5], [0.3, 0.6]])
network['b2'] = np.array([0.1, 0.2])
network['W3'] = np.array([[0.1, 0.3], [0.2, 0.4]])
network['b3'] = np.array([0.1, 0.2])
return network
def forward(network, x):
W1, W2, W3 = network['W1'], network['W2'], network['W3']
b1, b2, b3 = network['b1'], network['b2'], network['b3']
z1 = np.dot(x, W1) + b1
a1 = sigmoid(z1)
print(a1)
z2 = np.dot(a1, W2) + b2
a2 = sigmoid(z2)
print(a2)
z3 = np.dot(a2, W3) + b3
print(z3)
y = z3
return y
network = init_network()
x = np.array([1, 0.5])
y = forward(network, x)
print(y)
softmax 函数,用于多个分类
def softmax(a):
exp_a = np.exp(a)
print(exp_a)
sum_exp_a = np.sum(exp_a)
y = exp_a / sum_exp_a
return y
a = np.array([0.3, 2.9, 4.0])
print(softmax(a))
#解决数据过大的溢出问题,新的softmax函数,输出总和为1
def softmax(a):
c = np.max(a)
exp_a = np.exp(a-c)#溢出对策
print(exp_a)
sum_exp_a = np.sum(exp_a)
y = exp_a / sum_exp_a
print(y)
return y
a = np.array([1010, 1000, 990])
print(softmax(a))
print(np.sum(softmax(a)))