ML:自己动手实现单变量线性回归算法

2018-09-05  本文已影响19人  ACphart

介绍

这里是实现单变量线性回归算法的训练过程

import numpy as np
import matplotlib.pyplot as plt
from IPython.core.interactiveshell import InteractiveShell

InteractiveShell.ast_node_interactivity = 'all'

生成模拟数据

np.random.seed(20180822)

m = 100
Theta = [[2.0], [2.9]]

x0 = np.ones((m,1))
x1 = np.linspace(-2, 5, m).reshape(m, 1)

X = np.hstack((x0, x1))
y = np.dot(X, Theta) + 2.0*np.random.randn(100,1)

_ = plt.scatter(x1, y)

损失函数、梯度函数

def loss_func(X, y, theta,):
    loss = np.dot(X, theta) - y
    return 1./(2*m) * np.dot(loss.T, loss)

def grad_func(X, y, theta):
    loss = np.dot(X, theta) - y
    return 1./m * np.dot(X.T, loss)

梯度下降

# 设置学习率和收敛停止的开关
alpha = 0.01
accuracy = 1e-5

# 初始化参数
theta = np.random.randn(2,1)*0.1

i = 1
index = 1
c = np.array([0.8, 0.8, 0.8])   # 设置颜色,颜色逐渐加深
grad = grad_func(X, y, theta)   # 初始梯度
while not np.all(abs(grad) < accuracy):
    theta = theta - alpha*grad
    grad = grad_func(X, y, theta)
    
    # 作出学习过程
    i = i+1
    if i%index == 0:
        _ = plt.plot(x1, np.dot(X, theta), color=c)
        c = c - 0.1
        index = index*4

_ = plt.scatter(x1, y, alpha=0.7)
theta    
    array([[ 2.0953245],
           [ 2.9086616]])
上一篇下一篇

猜你喜欢

热点阅读