反向傳播神經網路的簡單實現
阿新 • • 發佈:2019-01-23
先把程式碼放這裡,後面再寫字
# coding=utf-8 # 反向傳播神經網路 import numpy as np import math import sys np.random.seed(0) def sigmoid(x): return 1.0 / (1.0 + np.exp(-x)) def dsigmoid(x): return x * (1 - x) class NN: def __init__(self, ni, nh, no): """ ni: num of input neural nh: num of hiden neural no: num of output neural """ self.wi = np.random.uniform(-0.2, 0.2, size = (ni + 1, nh)) self.wo = np.random.uniform(-2.0, 2.0, size = (nh, no)) def run_nn(self, inputs): self.ai = inputs.copy() input_with_b = np.column_stack((self.ai, np.ones(self.ai.shape[0]))) # 計算隱藏層的輸出 self.ah = np.array(map(sigmoid, np.dot(input_with_b, self.wi))) # 計算輸出層的輸出 self.ao = np.array(map(sigmoid, np.dot(self.ah, self.wo))) return self.ao def back_propagate(self, y, learn_rate): # 計算輸出層delta err = y - self.ao output_delta = err * np.array(map(dsigmoid, self.ao)) # 更新輸出層權值 change = np.dot(self.ah.T, output_delta) self.wo += change * learn_rate # 計算隱藏層delta err = np.dot(output_delta, self.wo.T) hidden_delta = err * dsigmoid(self.ah) # 更新輸入層權值 input_with_b = np.column_stack((self.ai, np.ones(self.ai.shape[0]))) change = np.dot(input_with_b.T, hidden_delta) self.wi += change * learn_rate # 計算誤差平方和 err = 0.5 * (y - self.ao) ** 2 return err def test(self, inputs, outputs): for i in range(0, inputs.shape[0]): print self.run_nn(inputs[i:i+1]), outputs[i:i+1] def train(self, inputs, outputs, max_iterations = 1000, learn_rate = 0.5): for n in range(max_iterations): for i in range(0, inputs.shape[0]): self.run_nn(inputs[i:i+1]) err = self.back_propagate(outputs[i:i+1], learn_rate) if n % 50 == 0: print 'err: ', err self.test(inputs, outputs) def main(): x = np.array ([ [0, 0], [0, 1], [1, 0], [1, 1], ]) y = np.array ([ [1], [1], [1], [0], ]) nn = NN(2, 2, 1) nn.train(x, y) if __name__ == '__main__': main()