tf.nn.bidirectional_dynamic_rnn和MultiRNNCell構建雙向多層RNN(LSTM)
阿新 • • 發佈:2018-12-10
import tensorflow as tf import numpy as np X = np.random.randn(10, 5, 5) # 輸入資料,批次、序列長度、樣本維度 # 第二個樣本的維度為3 X[1, 2:] = 0 stacked_rnn = [] stacked_bw_rnn = [] for i in range(3): stacked_rnn.append(tf.contrib.rnn.BasicLSTMCell(3)) stacked_bw_rnn.append(tf.contrib.rnn.BasicLSTMCell(4)) mcell = tf.contrib.rnn.MultiRNNCell(stacked_rnn) mcell_bw = tf.contrib.rnn.MultiRNNCell(stacked_bw_rnn) # bioutputs, output_state_fw, output_state_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn([mcell], [mcell_bw], X, # dtype=tf.float64) bioutputs, output_state_fw = tf.nn.bidirectional_dynamic_rnn(mcell, mcell_bw, X, dtype=tf.float64) print(bioutputs[0].shape)#(10, 5, 3),正向RNN print(bioutputs[1].shape)#(10, 5, 4),反向RNN print(len(output_state_fw))#2,正向RNN和反向RNN print(len(output_state_fw[0]))#3,正向RNN有三個隱藏層 print(len(output_state_fw[1]))#3,反向RNN有三個隱藏層 print(output_state_fw[0][0].h.shape)#(10, 3),正向RNN中第一個LSTM隱藏層的h狀態 print(output_state_fw[0][1].h.shape)#(10, 3),正向RNN中第二個LSTM隱藏層的h狀態 print(output_state_fw[0][0].c.shape)#(10, 3),正向RNN中第一個LSTM隱藏層的c狀態 print(output_state_fw[1][0].h.shape)#(10, 4),反向RNN中第一個LSTM隱藏層的h狀態