参考资料:

随机梯度下降法_通俗易懂讲解梯度下降法_weixin_39653442的博客-CSDN博客

梯度下降(Gradient Descent)_AI耽误的大厨的博客-CSDN博客

梯度下降法_踢开新世界的大门的博客-CSDN博客

仅做学习笔记

#给定样本求最佳 w 与 b
import matplotlib.pyplot as plot
import numpy as np

#预测目标变量y的函数
def fun(w,b,x):
    f = w*x+b
    return f

#损失函数: J = 1/m*∑(f(x)-y)^2 其中f(x)=
#求导推理得
# w = w-alpha*1/m*∑[(f(x)-y)*x] 其中(f(x)-y)*x 为J对w的偏导
# b = b-alpha*1/m*∑[(f(x)-y)] 其中(f(x)-y) 为J对b的偏导

#遍历整个样本数据,计算偏差,使用批量梯度下降法
def loopsum(w,b,x,y):
    m = len(x)
    w_sum = 0
    b_sum = 0
    loss_sum = 0
    for i in range(m):
        w_ = (fun(w,b,x[i])-y[i])*x[i]
        b_ = fun(w,b,x[i])-y[i]
        loss = b_*b_
        w_sum += w_
        b_sum += b_
        loss_sum += loss
    return (w_sum,b_sum,loss_sum)

#批量梯度下降法更新w与b
def bacth_update_gradient(w,b,x,y,alpha):
    m = len(x)
    loss = 0
    w_tmp = (loopsum(w,b,x,y)[0]/m) 
    while abs(w_tmp)>1e-5: #梯度近似于0,则不再更新w与b的值
      result = loopsum(w,b,x,y)
      w_tmp = (result[0]/m)
      b_tmp = (result[1]/m)
      loss  = (result[2]/m)
      w = w-alpha*w_tmp
      b = b-alpha*b_tmp
    
    return (w,b,loss)


def main():

    #样本数据
    x = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]
    y = [1,4,7,4,1,6,7,3,9,6,11,10,13,14,11,16,19,18,19,20]
    alpha = 0.01 #学习率--步长
    w = 1 #初始化w值为1
    b = 1 

    #计算
    result = bacth_update_gradient(w,b,x,y,alpha)
    print(" w=%f\n b=%f\n loss=%f" %(result[0],result[1],result[2]))
    plot.figure(figsize=(6,4)) #新建6*4画布
    plot.scatter(x,y,label='y') #绘制散点图
    #plot.xlim(0,21)
    #plot.ylim(0,21)
    plot.xlabel('x',fontsize=20)
    plot.ylabel('y',fontsize=20)

    x=np.array(x)
    f=np.array(w*x+b)

    plot.plot(x,f,color ='red')#绘制拟合曲线
    plot.show()

if __name__=="__main__":
    main()
07-28 18:19