[关闭]
@Pigmon 2017-04-05T14:36:59.000000Z 字数 2018 阅读 989

硬编码单隐层Ann求解异或问题

Python


  1. # -*- coding: utf-8 -*-
  2. import random
  3. import numpy as np
  4. # (x1, x2, y)
  5. train_set = ((0, 0, 0), (1, 1, 0), (0, 1, 1), (1, 0, 1))
  6. eta = 0.2
  7. # 阈值
  8. threshold = 0.1
  9. # [
  10. # [w_13, w_23, w_b13],
  11. # [w_14, w_24, w_b14],
  12. # [w_1b2, w_2b2, w_b1b2],
  13. # [w35, w45, wb25]
  14. # ]
  15. w = [[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]]
  16. def sigmoid(x):
  17. return 1. / (1. + np.e ** (-x))
  18. def total_err_var():
  19. global train_set, threshold
  20. total = 0
  21. for sample in train_set:
  22. total += (result_of(sample[0], sample[1]) - sample[2]) ** 2
  23. return total / len(train_set)
  24. def make_data():
  25. global w
  26. for arr in w:
  27. r = 2 * np.random.random((3, 1)) - 1
  28. for i in range(0, 3):
  29. arr[i] = r[i][0]
  30. def result_of(x1, x2):
  31. global w
  32. o3 = sigmoid(w[0][0] * x1 + w[0][1] * x2 + w[0][2])
  33. o4 = sigmoid(w[1][0] * x1 + w[1][1] * x2 + w[1][2])
  34. ob2 = sigmoid(w[2][0] * x1 + w[2][1] * x2 + w[2][2])
  35. o5 = sigmoid(w[3][0] * o3 + w[3][1] * o4 + w[3][2] * ob2)
  36. return o5
  37. def outputs(x1, x2):
  38. global w
  39. # O3 = Sigmoid[w_13 * x1 + w_23 * x2 + w_b13 * 1]
  40. o3 = sigmoid(w[0][0] * x1 + w[0][1] * x2 + w[0][2])
  41. # O4 = Sigmoid[w_14 * x1 + w_24 * x2 + w_b14 * 1]
  42. o4 = sigmoid(w[1][0] * x1 + w[1][1] * x2 + w[1][2])
  43. # Ob2 = Sigmoid[w_1b2 * x1 + w_2b2 * x2 + w_b1b2 * 1]
  44. ob2 = sigmoid(w[2][0] * x1 + w[2][1] * x2 + w[2][2])
  45. # O5 = Sigmoid[w_35 * O3 + w_45 * O4 + w_b25 * Ob2]
  46. o5 = sigmoid(w[3][0] * o3 + w[3][1] * o4 + w[3][2] * ob2)
  47. arr = [x1, x2, o3, o4, ob2, o5]
  48. return x1, x2, o3, o4, ob2, o5, arr
  49. def err_of(y, o3, o4, ob2, o5):
  50. global w
  51. e5 = o5 * (1 - o5) * (y - o5)
  52. e3 = o3 * (1 - o3) * e5 * w[3][0]
  53. e4 = o4 * (1 - o4) * e5 * w[3][1]
  54. eb2 = ob2 * (1 - ob2) * e5 * w[3][2]
  55. return e5, e3, e4, eb2, [e5, e3, e4, eb2]
  56. def train():
  57. global train_set, w
  58. go_through = False
  59. cnter = 0
  60. while (cnter < 20000):
  61. if total_err_var() < threshold:
  62. go_through = True
  63. print ("Counter: %d" % cnter)
  64. break
  65. # 随机选择一个样本
  66. sample = random.choice(train_set)
  67. # 计算每个神经元的值
  68. x1, x2 = sample[0], sample[1]
  69. O1, O2, O3, O4, Ob2, O5, arr_o = outputs(x1, x2)
  70. # 计算误差
  71. e5, e3, e4, eb2, arr_e = err_of(sample[2], O3, O4, Ob2, O5)
  72. # 调整权重
  73. param = [x1, x2, 1]
  74. errs = [e3, e4, eb2]
  75. outs = [O3, O4, Ob2]
  76. for i in range(0, 3):
  77. for j in range(0, 3):
  78. w[i][j] += eta * errs[i] * param[j]
  79. w[3][i] += eta * e5 * outs[i]
  80. # 计数
  81. cnter += 1
  82. test_output()
  83. print (go_through)
  84. return go_through
  85. def test_output():
  86. print ("Result of (0, 0) = %f" % result_of(0, 0))
  87. print ("Result of (1, 1) = %f" % result_of(1, 1))
  88. print ("Result of (0, 1) = %f" % result_of(0, 1))
  89. print ("Result of (1, 0) = %f" % result_of(1, 0))
  90. if __name__ == '__main__':
  91. make_data()
  92. train()
添加新批注
在作者公开此批注前,只有你和作者可见。
回复批注