sklearn實現特徵選擇--遞迴消除法
阿新 • • 發佈:2018-12-22
import numpy as np
from sklearn.feature_selection import VarianceThreshold.SelectKBest
from sklearn.feature_selection import f_regression
from sklearn.feature_selection import chi2
from sklearn.feature_selection import RFE
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVR
from sklearn.ensemble import GradientBoostingClassifier
# 基於SVM
X = np.array([
[0,2,0,3],
[0,1,4,3],
[0.1,1,1,3],
],dtype = np.float64)
Y = np.array([0,0,1,1])
estimator = SVR(kernel = 'linear')
selector = RFE(estimator,2,step=1)
selector = selector.fit( X,Y)
print(selector.support_)
print(selector.n_features_)
print(selector.ranking_)
print(selector.transform(X))
# 基於邏輯迴歸
estimator_2 = LogisticRegression(penalty="l1",C=0.1)
sfm = SelectFromModel(estimator_2)
sfm.fit(X,Y)
print(sfm.transform(X2))
# 基於GBDT
estimator_3 = GradientBoostingClassifier()
sfm = SelectFromModel(estimator_3)
sfm.fit(X2,Y2)
print(sfm.transform(X2))