迴歸問題常用的損失函式總結
阿新 • • 發佈:2020-07-15
1. 均方誤差MSE
2. 平均絕對誤差MAE
# true: 真目標變數的陣列 # pred: 預測值的陣列 def mse(true, pred): return np.sum((true - pred)**2) def mae(true, pred): return np.sum(np.abs(true - pred)) # 呼叫sklearn from sklearn.metrics import mean_squared_error from sklearn.metrics import mean_absolute_error
3. Huber損失函式
4.Log-Cosh損失函式
# huber 損失 def huber(true, pred, delta): loss = np.where(np.abs(true-pred) < delta , 0.5*((true-pred)**2), delta*np.abs(true - pred) - 0.5*(delta**2)) return np.sum(loss) # log cosh 損失 def logcosh(true, pred): loss = np.log(np.cosh(pred - true)) return np.sum(loss)
5. 例項
import numpy as np import math true = [0,1,2,3,4] pred = [0,0,1,5,-11] # MSE mse = mean_squared_error(true,pred) print("RMSE: ",math.sqrt(mse)) loss =0 for i,j in zip(true,pred): loss += mse(i,j) mseloss = math.sqrt(loss / len(true)) print("RMSE: ",mseloss) #MAE mae = mean_absolute_error(true,pred)print("MAE: ",mae) loss = 0 for i,j in zip(true,pred): loss += mae(i,j) maeloss = loss / len(true) print("MAE: ",maeloss) #Huber loss = 0 for i,j in zip(true,pred): loss += huber(i,j,1) loss = loss / len(true) print("Huber: ",loss) #Log-Cosh loss = 0 for i,j in zip(true,pred): loss += logcosh(i,j) loss = loss / len(true) print("Log-Cosh: ",loss)
6. tanh
Python中直接呼叫np.tanh() 即可計算。