tensorflow邏輯迴歸例項筆記
阿新 • • 發佈:2021-10-31
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
data = pd.read_csv("C:\\Users\\94823\\Desktop\\tensorflow學習需要的資料集\\credit-a.csv",header=None)
data
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0 | 30.83 | 0.000 | 0 | 0 | 9 | 0 | 1.25 | 0 | 0 | 1 | 1 | 0 | 202 | 0.0 | -1 |
1 | 1 | 58.67 | 4.460 | 0 | 0 | 8 | 1 | 3.04 | 0 | 0 | 6 | 1 | 0 | 43 | 560.0 | -1 |
2 | 1 | 24.50 | 0.500 | 0 | 0 | 8 | 1 | 1.50 | 0 | 1 | 0 | 1 | 0 | 280 | 824.0 | -1 |
3 | 0 | 27.83 | 1.540 | 0 | 0 | 9 | 0 | 3.75 | 0 | 0 | 5 | 0 | 0 | 100 | 3.0 | -1 |
4 | 0 | 20.17 | 5.625 | 0 | 0 | 9 | 0 | 1.71 | 0 | 1 | 0 | 1 | 2 | 120 | 0.0 | -1 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
648 | 0 | 21.08 | 10.085 | 1 | 1 | 11 | 1 | 1.25 | 1 | 1 | 0 | 1 | 0 | 260 | 0.0 | 1 |
649 | 1 | 22.67 | 0.750 | 0 | 0 | 0 | 0 | 2.00 | 1 | 0 | 2 | 0 | 0 | 200 | 394.0 | 1 |
650 | 1 | 25.25 | 13.500 | 1 | 1 | 13 | 7 | 2.00 | 1 | 0 | 1 | 0 | 0 | 200 | 1.0 | 1 |
651 | 0 | 17.92 | 0.205 | 0 | 0 | 12 | 0 | 0.04 | 1 | 1 | 0 | 1 | 0 | 280 | 750.0 | 1 |
652 | 0 | 35.00 | 3.375 | 0 | 0 | 0 | 1 | 8.29 | 1 | 1 | 0 | 0 | 0 | 0 | 0.0 | 1 |
653 rows × 16 columns
data.iloc[:,-1].value_counts() # 最後一列值的統計個數
1 357
-1 296
Name: 15, dtype: int64
x = data.iloc[:,:-1]
y = data.iloc[:,-1].replace(-1,0) # 把最後一列的0替換成-1
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(4,input_shape=(15,),activation='relu'))model.add(tf.keras.layers.Dense(4,activation='relu')) # input_shape他會自己推斷model.add(tf.keras.layers.Dense(1,activation='sigmoid')) # 輸出層
model.summary()
Model: "sequential"_________________________________________________________________Layer (type) Output Shape Param # =================================================================dense (Dense) (None, 4) 64 _________________________________________________________________dense_1 (Dense) (None, 4) 20 _________________________________________________________________dense_2 (Dense) (None, 1) 5 =================================================================Total params: 89Trainable params: 89Non-trainable params: 0_________________________________________________________________
model.compile(optimizer='adam', loss='binary_crossentropy', # 資訊熵作為損失函式 metrics=['acc']) # 正確率
-
TP:正例預測正確的個數
FP:負例預測錯誤的個數
TN:負例預測正確的個數
FN:正例預測錯誤的個數
\(acc =\frac{TP+TN}{TP+TN+FP+FN}\)是一個評價函式
history = model.fit(x,y,epochs=100)
Epoch 95/10021/21 [==============================] - 0s 1ms/step - loss: 0.5220 - acc: 0.7458Epoch 96/10021/21 [==============================] - 0s 1ms/step - loss: 0.4917 - acc: 0.7565Epoch 97/10021/21 [==============================] - 0s 2ms/step - loss: 0.4887 - acc: 0.7580Epoch 98/10021/21 [==============================] - 0s 1ms/step - loss: 0.5376 - acc: 0.7534Epoch 99/10021/21 [==============================] - 0s 1ms/step - loss: 0.5540 - acc: 0.7427Epoch 100/10021/21 [==============================] - 0s 2ms/step - loss: 0.5571 - acc: 0.7519
history.history.keys()
dict_keys(['loss', 'acc'])
plt.plot(history.epoch,history.history.get('loss'))# 損失函式的變化
[<matplotlib.lines.Line2D at 0x2a2e4891250>]
plt.plot(history.epoch,history.history.get('acc'))# 準確率的變化
[<matplotlib.lines.Line2D at 0x2a2e4979d60>]