神经网络01

2020-09-14  本文已影响0人  平头哥2

激活函数

import numpy as np
import matplotlib.pylab as plt

def step_function(x):
    return np.array(x> 0, dtype=np.int)
    
def sigmoid(x):
    return 1/(1 + np.exp(-x))

x =  np.arange(-6, 6, 0.1)
print(x)

y1 = step_function(x)
print(y1)
y2 = sigmoid(x)
print(y2)

 
fig, ax1 = plt.subplots(2,1, figsize=(12,9))

#ax1 = ax1.twinx()                         # 让2个子图的x轴一样,同时创建副坐标轴。

ax1[0][0].plot(x, y1)
ax1[1][0].plot(x, y2)

plt.tight_layout()

矩阵乘法

import numpy as np
#import matplotlib.pylab as plt



A = np.array([[1,2],[3,4]])

B = np.array([[5,6],[7,8]])  

print(np.dot(A,B))


A1 = np.array([[1,2,3],[3,4,5]])
B1 = np.array([[1,2],[3,4],[4,5]])  # 3 2 

C = np.dot(A1,B1)


print(C)
print(C.shape) # (2, 2)
print(np.ndim(C)) # 2


A2 = np.array([7,8])  # 1 2
print(A2 ) # [7 8]
A3 = np.array([[7],[8]])
print(A3 ) # [7 8]


print(np.dot(B1,A2)) 

print(np.dot(B1,A3)) 

# 神经网络的内积
W = np.array([[1,3,5],[2,4,6]])
X = np.array([1,2])  
print(np.dot(X,W)) # [ 5 11 17]

W1 = np.array([[1,2],[3,4],[5,6]])
X1 = np.array([[1],[2]]) 
print(np.dot(W1,X1))  

神经网络前向计算

def sigmoid(x):
    return 1/(1 + np.exp(-x))

import numpy as np
#import matplotlib.pylab as plt


X = np.array([1.0,0.5])
W1 = np.array([[0.1,0.3,0.5],[0.2,0.4,0.6]])
B1 = np.array([0.1,0.2,0.3])


print(X.shape)   # (2,)
print(W1.shape) # (2, 3)
print(B1.shape) # (3,)
  

# 第一层神经网络的传递
A1 = np.dot(X, W1) + B1

print(A1) # [0.3 0.7 1.1]

# 使用激活函数
Z1 = sigmoid(A1)

print(Z1) #  [0.57444252 0.66818777 0.75026011]


# 第二层神经网络的传递
W2 = np.array([[0.1,0.4],[0.2,0.5],[0.3,0.6]])
B2 = np.array([0.1,0.2])
A2 = np.dot(Z1, W2) + B2
print(A2) # [0.51615984 1.21402696]
Z2 = sigmoid(A2)
print(Z2) #[0.62624937 0.7710107 ]
    
    
    
# 第二层到输出层的传递

# 输出层的激活函数
# 一般地: 回归问题可以使用恒等函数
# 二元分类问题可以使用 sigmoid 函数
# 多元分类问题可以使用 softmax 函数
def identity_function(x):
    return x
     
W3 = np.array([[0.1,0.3],[0.2,0.4]])    
B3 = np.array([0.1,0.2]) 
A3 = np.dot(Z2, W3) + B3
print(A2)  # [0.51615984 1.21402696]
Y = identity_function(A3) 
print(Y)  # [0.31682708 0.69627909]

代码小结

import numpy as np

def sigmoid(x):
    return 1/(1 + np.exp(-x))

def identity_function(x):
    return x

def init_network():
    network = {}
    network['W1'] = np.array([[0.1,0.3,0.5],[0.2,0.4,0.6]])
    network['B1'] = np.array([0.1,0.2,0.3])
    network['W2'] = np.array([[0.1,0.4],[0.2,0.5],[0.3,0.6]])
    network['B2'] = np.array([0.1,0.2])
    network['W3'] = np.array([[0.1,0.3],[0.2,0.4]])
    network['B3'] = np.array([0.1,0.2]) 
    
    return network


def forward(network, X):
    W1, W2, W3 = network['W1'], network['W2'], network['W3']
    B1, B2, B3 = network['B1'], network['B2'], network['B3']
    
    A1 = np.dot(X, W1) + B1
    Z1 = sigmoid(A1)
    A2 = np.dot(Z1, W2) + B2
    Z2 = sigmoid(A2)
    A3 = np.dot(Z2, W3) + B3
    Y = identity_function(A3)
    
    return Y

network2 = init_network()

X = np.array([1.0,0.5])
Y = forward(network2, X)
print(Y)  # [0.31682708 0.69627909]
上一篇下一篇

猜你喜欢

热点阅读