[关闭]
@leona1992 2022-08-03T15:38:10.000000Z 字数 2105 阅读 100

梯度下降法2

python 机器学习 梯度下降法


生成数据集

  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. np.random.seed(666)
  4. x = 2 * np.random.random(size=100)
  5. y = x * 3. + 4. + np.random.normal(size=100)
  6. X = x.reshape(-1, 1)
  7. plt.scatter(x,y,marker='.')
  8. plt.title('database')
  9. plt.show()

数据集

梯度下降法进行训练

目标:使得尽可能小


生成:

  1. def J(theta, X_b, y):
  2. try:
  3. return np.sum((y - X_b.dot(theta))**2) / len(X_b)
  4. except:
  5. return float('inf')

  1. def dJ(theta, X_b, y):
  2. res = np.empty(len(theta))
  3. res[0] = np.sum(X_b.dot(theta) - y)
  4. for i in range(1, len(theta)):
  5. res[i] = (X_b.dot(theta) - y).dot(X_b[:, i])
  6. return res * 2 / len(X_b)
  1. def gradient_descent(X_b, y, initial_theta, eta, n_iters=1e4, epsilon=1e-8):
  2. theta = initial_theta
  3. i_iter = 0
  4. xb = (np.array(X_b)[:, 1])
  5. while i_iter < n_iters:
  6. plt.ion()
  7. gradient = dJ(theta, X_b, y)
  8. last_theta = theta
  9. theta = theta - eta * gradient
  10. #while (i_iter % 1000) == 1:
  11. while (i_iter < 150) & ((i_iter % 5) == 1):
  12. print(i_iter)
  13. xq = [0, 2]
  14. yq = theta[0] * np.array(xq) + theta[1] * np.ones(np.shape(theta))
  15. fig = plt.figure()
  16. fig = plt.scatter((np.array(X_b)[:, 1]), y)
  17. fig = plt.plot(xq, yq, linewidth=4, color='darkgoldenrod')
  18. fig = plt.legend(['data', 'simulate'], loc='upper left')
  19. fig = plt.title('gradient')
  20. fig = plt.xlabel(i_iter)
  21. plt.savefig(r"C:\\Users\\xiaoy\\Pictures\\s\\" + format(i_iter+100) +
  22. '.png')
  23. plt.show()
  24. print(theta)
  25. plt.close()
  26. i_iter = i_iter + 1
  27. if (abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon):
  28. break
  29. i_iter = i_iter + 1
  30. return theta
  31. theta = gradient_descent(X_b, y, initial_theta, eta)
  32. print("荣耀不朽")

1000多次迭代结束
1001.png-26.9kB

gradient

python 生成Gif动图程序(网上找的)

添加新批注
在作者公开此批注前,只有你和作者可见。
回复批注