機器學習之Gradient Tree Boosting中GBDT-- GradientBoostingClassifier
阿新 • • 發佈:2018-12-23
- 機器學習之Gradient Tree Boosting中GBDT-- GradientBoostingClassifier
# -*- coding: utf-8 -*- """ Created on Mon Dec 3 22:24:34 2018 @author: muli """ import matplotlib.pyplot as plt import numpy as np from sklearn import datasets,cross_validation,ensemble def load_data_classification(): ''' 載入用於分類問題的資料集 :return: 一個元組,用於分類問題。元組元素依次為:訓練樣本集、測試樣本集、訓練樣本集對應的標記、測試樣本集對應的標記 ''' digits=datasets.load_digits() # 使用 scikit-learn 自帶的 digits 資料集 return cross_validation.train_test_split(digits.data,digits.target, test_size=0.25,random_state=0,stratify=digits.target) # 分層取樣拆分成訓練集和測試集,測試集大小為原始資料集大小的 1/4 def test_GradientBoostingClassifier(*data): ''' 測試 GradientBoostingClassifier 的用法 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data clf=ensemble.GradientBoostingClassifier() clf.fit(X_train,y_train) print("Traing Score:%f"%clf.score(X_train,y_train)) print("Testing Score:%f"%clf.score(X_test,y_test)) def test_GradientBoostingClassifier_num(*data): ''' 測試 GradientBoostingClassifier 的預測效能隨 n_estimators 引數的影響 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data nums=np.arange(1,100,step=2) fig=plt.figure() ax=fig.add_subplot(1,1,1) testing_scores=[] training_scores=[] for num in nums: clf=ensemble.GradientBoostingClassifier(n_estimators=num) clf.fit(X_train,y_train) training_scores.append(clf.score(X_train,y_train)) testing_scores.append(clf.score(X_test,y_test)) ax.plot(nums,training_scores,label="Training Score") ax.plot(nums,testing_scores,label="Testing Score") ax.set_xlabel("estimator num") ax.set_ylabel("score") ax.legend(loc="lower right") ax.set_ylim(0,1.05) plt.suptitle("GradientBoostingClassifier") # 設定 X 軸的網格線,風格為 點畫線 plt.grid(axis='x',linestyle='-.') plt.show() def test_GradientBoostingClassifier_maxdepth(*data): ''' 測試 GradientBoostingClassifier 的預測效能隨 max_depth 引數的影響 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data maxdepths=np.arange(1,20) fig=plt.figure() ax=fig.add_subplot(1,1,1) testing_scores=[] training_scores=[] for maxdepth in maxdepths: clf=ensemble.GradientBoostingClassifier(max_depth=maxdepth,max_leaf_nodes=None) clf.fit(X_train,y_train) training_scores.append(clf.score(X_train,y_train)) testing_scores.append(clf.score(X_test,y_test)) ax.plot(maxdepths,training_scores,label="Training Score") ax.plot(maxdepths,testing_scores,label="Testing Score") ax.set_xlabel("max_depth") ax.set_ylabel("score") ax.legend(loc="lower right") ax.set_ylim(0,1.05) plt.suptitle("GradientBoostingClassifier") # 設定 X 軸的網格線,風格為 點畫線 plt.grid(axis='x',linestyle='-.') plt.show() def test_GradientBoostingClassifier_learning(*data): ''' 測試 GradientBoostingClassifier 的預測效能隨 學習率 引數的影響 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data learnings=np.linspace(0.01,1.0) fig=plt.figure() ax=fig.add_subplot(1,1,1) testing_scores=[] training_scores=[] for learning in learnings: clf=ensemble.GradientBoostingClassifier(learning_rate=learning) clf.fit(X_train,y_train) training_scores.append(clf.score(X_train,y_train)) testing_scores.append(clf.score(X_test,y_test)) ax.plot(learnings,training_scores,label="Training Score") ax.plot(learnings,testing_scores,label="Testing Score") ax.set_xlabel("learning_rate") ax.set_ylabel("score") ax.legend(loc="lower right") ax.set_ylim(0,1.05) plt.suptitle("GradientBoostingClassifier") # 設定 X 軸的網格線,風格為 點畫線 plt.grid(axis='x',linestyle='-.') plt.show() def test_GradientBoostingClassifier_subsample(*data): ''' 測試 GradientBoostingClassifier 的預測效能隨 subsample 引數的影響 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data fig=plt.figure() ax=fig.add_subplot(1,1,1) subsamples=np.linspace(0.01,1.0) testing_scores=[] training_scores=[] for subsample in subsamples: clf=ensemble.GradientBoostingClassifier(subsample=subsample) clf.fit(X_train,y_train) training_scores.append(clf.score(X_train,y_train)) testing_scores.append(clf.score(X_test,y_test)) ax.plot(subsamples,training_scores,label="Training Score") ax.plot(subsamples,testing_scores,label="Training Score") ax.set_xlabel("subsample") ax.set_ylabel("score") ax.legend(loc="lower right") ax.set_ylim(0,1.05) plt.suptitle("GradientBoostingClassifier") # 設定 X 軸的網格線,風格為 點畫線 plt.grid(axis='x',linestyle='-.') plt.show() def test_GradientBoostingClassifier_max_features(*data): ''' 測試 GradientBoostingClassifier 的預測效能隨 max_features 引數的影響 :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的標記、測試樣本的標記 :return: None ''' X_train,X_test,y_train,y_test=data fig=plt.figure() ax=fig.add_subplot(1,1,1) max_features=np.linspace(0.01,1.0) testing_scores=[] training_scores=[] for features in max_features: clf=ensemble.GradientBoostingClassifier(max_features=features) clf.fit(X_train,y_train) training_scores.append(clf.score(X_train,y_train)) testing_scores.append(clf.score(X_test,y_test)) ax.plot(max_features,training_scores,label="Training Score") ax.plot(max_features,testing_scores,label="Training Score") ax.set_xlabel("max_features") ax.set_ylabel("score") ax.legend(loc="lower right") ax.set_ylim(0,1.05) plt.suptitle("GradientBoostingClassifier") plt.show() if __name__=='__main__': X_train,X_test,y_train,y_test=load_data_classification() # 獲取分類資料 # test_GradientBoostingClassifier(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier # test_GradientBoostingClassifier_num(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier_num # test_GradientBoostingClassifier_maxdepth(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier_maxdepth # test_GradientBoostingClassifier_learning(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier_learning # test_GradientBoostingClassifier_subsample(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier_subsample test_GradientBoostingClassifier_max_features(X_train,X_test,y_train,y_test) # 呼叫 test_GradientBoostingClassifier_max_features
- 如圖: