最小二乘法实现的线性回归误差逆向传递的简单演示

import numpy as np
import matplotlib.pyplot as plt
import time

x_data = np.arange(100,)
print(x_data)
k0 = np.random.random()
b0 = np.random.random()

y_data = x_data*k0+np.random.random(100,)*3
print()
print(y_data)
# plt.ion()
plt.figure(figsize=(12,9))
plt.scatter(x_data,y_data,label='true data')
# 假设 y = k * x + b

def wucha(x_真值,y_真值,k,b):
y_预测 = x_真值*k + b
误差 = y_真值 - y_预测
return abs(误差.sum())


def tiaocan(x_真值,y_真值,k,b,步长):
s1 = wucha(x_真值,y_真值,k+步长,b+步长)
s2 = wucha(x_真值,y_真值,k-步长,b-步长)
s3 = wucha(x_真值,y_真值,k+步长,b-步长)
s4 = wucha(x_真值,y_真值,k-步长,b+步长)
s = wucha(x_真值,y_真值,k,b)
s_list = list((s1,s2,s3,s4,s))
print(s_list)
s_list.sort()
min = s_list[0]
print(s_list)
if min == s1:
return True,True
elif min == s2:
return False,False
elif min == s3:
return True,False
elif min == s4:
return False,True
else:
return None


if __name__ == '__main__':

#运行100次
k = np.random.random()
b = np.random.random()
k1 = k
b1 = b
print('随机初始化的k,b初始值:',k1,b1)
for i in range(1000000):
n = 0.01
s = tiaocan(x_data,y_data,k1,b1,n)
print(s)
if s:
if s[0]:
k1+=n
else:
k1-=n
if s[1]:
b1+=n
else:
b1-=n
else:
break
print('第{}次运行k,b {} {}'.format(i,k1,b1))
lines = plt.plot(x_data,x_data*k1+b1,color='black')

print('初始化的k0 b0 真值为:',k0,b0*3)
print('k,b的初始化随机值为:',k,b)
print('一共运行了{}次'.format(i+1))
print('k,b的预测值为',k1,b1)
plt.plot(x_data,x_data*k+b,color='green',label='first')
plt.plot(x_data,k1*x_data+b1,color='red',label='final')
plt.legend()
# plt.ioff()
plt.show()


运行以上代码,可以实现拟合随机生成的数据,拟合过后的线为红色,拟合前的为绿色,拟合过程中的线为黑色。为了便于理解,有些参数我使用了中文,Python3默认编码UTF8

原文地址:https://www.cnblogs.com/lycsdhr/p/9606640.html