用numpy实现梯度下降法,参考一下这篇文章:
https://www.cnblogs.com/noluye/p/11108513.html
import numpy as np
import matplotlib.pyplot as plt
def get_loss(x):
c,r = x.shape
loss = (x**2 - 2*x + 3) + (0.01*(2*np.random.rand(c,r)-1))
return(loss)
x = np.arange(0,3,0.01).reshape(-1,1)
"""plt.title("loss")
plt.plot(get_loss(np.array(x)))
plt.show()"""
def get_grad(x):
grad = 2 * x -2
return(grad)
np.random.seed(31415)
x_ = np.random.rand(1)*3
x_s = []
alp = 0.001
print("X0",x_)
for e in range(2000):
x_ = x_ - alp*(get_grad(x_))
x_s.append(x_)
if(e%100 == 0):
print(e,"steps,x_ = ",x_)
plt.title("loss")
plt.plot(get_loss(np.array(x_s)))
plt.show()