Keras學習(六)——RNN迴圈神經網路迴歸
阿新 • • 發佈:2019-01-23
本文主要介紹使用RNN來實現迴歸,對曲線進行預測。
示例程式碼:
import numpy as np from keras.datasets import mnist from keras.utils import np_utils from keras.models import Sequential from keras.layers import Dense, TimeDistributed, LSTM from keras.optimizers import Adam import matplotlib.pyplot as plt # 使多次生成的隨機數相同 np.random.seed(1337) # 超引數 BATCH_START = 0 TIME_STEPS = 20 BATCH_SIZE = 50 INPUT_SIZE = 1 OUTPUT_SIZE = 1 CELL_SIZE = 20 LR = 0.006 # 生成資料 def get_batch(): global BATCH_START, TIME_STEPS # xs shape (50batch, 20steps) xs = np.arange(BATCH_START, BATCH_START+TIME_STEPS*BATCH_SIZE).reshape((BATCH_SIZE, TIME_STEPS)) / (10*np.pi) seq = np.sin(xs) res = np.cos(xs) BATCH_START += TIME_STEPS # plt.plot(xs[0, :], res[0, :], 'r', xs[0, :], seq[0, :], 'b--') # plt.show() return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs] # 檢視資料 # get_batch() # exit() # 搭建網路 model = Sequential() # 新增LSTM層 model.add(LSTM( batch_input_shape=(BATCH_SIZE, TIME_STEPS, INPUT_SIZE), output_dim=CELL_SIZE, return_sequences=True, # 對於每一個時間點需不需要輸出對應的output, True每個時刻都輸出, False最後的輸出output stateful=True, # batch與batch之間是否有聯絡,需不需要將狀態進行傳遞 )) # add output layer model.add(TimeDistributed(Dense(OUTPUT_SIZE))) # TimeDistributed:對每一個output進行全連線的計算 # 優化器 adam = Adam() model.compile( optimizer=adam, loss='mse', ) # 訓練 print('Training ------------') for step in range(501): # data shape = (batch_num, steps, inputs/outputs) X_batch, Y_batch, xs = get_batch() cost = model.train_on_batch(X_batch, Y_batch) pred = model.predict(X_batch, BATCH_SIZE) plt.plot(xs[0, :], Y_batch[0].flatten(), 'r', xs[0, :], pred.flatten()[:TIME_STEPS], 'b--') plt.ylim((-1.2, 1.2)) plt.draw() plt.pause(0.1) if step % 10 == 0: print('train cost: ', cost)
資料示例:
def get_batch(): global BATCH_START, TIME_STEPS # xs shape (50batch, 20steps) xs = np.arange(BATCH_START, BATCH_START+TIME_STEPS*BATCH_SIZE).reshape((BATCH_SIZE, TIME_STEPS)) / (1*np.pi) seq = np.sin(xs) res = np.cos(xs) BATCH_START += TIME_STEPS plt.plot(xs[0, :], res[0, :], 'r', xs[0, :], seq[0, :], 'b--') plt.show() return [seq[:, :, np.newaxis], res[:, :, np.newaxis], xs] # 檢視資料 get_batch() exit()
結果:
train cost: 0.50940645 train cost: 0.4966624 train cost: 0.48060146 train cost: 0.45672885 train cost: 0.4108651 train cost: 0.31347314 train cost: 0.12554297 train cost: 0.07388962 train cost: 0.10137392 train cost: 0.046597198 train cost: 0.05946522 train cost: 0.040294208 train cost: 0.053411756 train cost: 0.15622795 train cost: 0.17914045 train cost: 0.16356382 train cost: 0.21077277 train cost: 0.20014948 train cost: 0.18070495 train cost: 0.16142645 train cost: 0.19912449 train cost: 0.16934186 train cost: 0.16477375 train cost: 0.17521137 train cost: 0.20553884 train cost: 0.15104571 train cost: 0.16296455 train cost: 0.16819069 train cost: 0.11465822 train cost: 0.14150377 train cost: 0.13508156 train cost: 0.13755415 train cost: 0.13000277 train cost: 0.11969448 train cost: 0.09293661 train cost: 0.0819223 train cost: 0.06903682 train cost: 0.07125411 train cost: 0.08032415 train cost: 0.07321488 train cost: 0.096763514 train cost: 0.078285255 train cost: 0.07236056 train cost: 0.065320924 train cost: 0.057717755 train cost: 0.063192114 train cost: 0.047402352 train cost: 0.05537389 train cost: 0.051893406 train cost: 0.052938405 train cost: 0.05649735