1. 程式人生 > 其它 >5-9.scikit-learn中的線性迴歸問題

5-9.scikit-learn中的線性迴歸問題

import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
boston_data = datasets.load_boston()
X = boston_data.data
y = boston_data.target
X = X[y < 50]
y = y[y < 50]
print(X.shape)
print(y.shape)

(490, 13)
(490,)

from sklearn.model_selection import train_test_split

X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=666)

線性迴歸

from sklearn.linear_model import LinearRegression

lin_reg = LinearRegression()
lin_reg.fit(X_train, y_train)

LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)

lin_reg.coef_

array([-1.15625837e-01, 3.13179564e-02, -4.35662825e-02, -9.73281610e-02,
-1.09500653e+01, 3.49898935e+00, -1.41780625e-02, -1.06249020e+00,
2.46031503e-01, -1.23291876e-02, -8.79440522e-01, 8.31653623e-03,
-3.98593455e-01])

lin_reg.intercept_

32.59756158869991

lin_reg.score(X_test, y_test)

0.8009390227581037

kNN演算法的思路解決迴歸問題

kNN Regressor

from sklearn.neighbors import KNeighborsRegressor

knn_reg = KNeighborsRegressor()
knn_reg.fit(X_train, y_train)

KNeighborsRegressor(algorithm='auto', leaf_size=30, metric='minkowski',
metric_params=None, n_jobs=None, n_neighbors=5, p=2,
weights='uniform')

knn_reg.score(X_test, y_test)
#可以看出kNN演算法的思想得到的模型準確度差於線性迴歸

0.602674505080953

關於網格搜尋請檢視18.網格搜尋

from sklearn.model_selection import GridSearchCV

para_grid = [
    {
        'weights':['uniform'],
        'n_neighbors':[i for i in range(1, 11)]
    },
    {
        'weights':['distance'],
        'n_neighbors':[i for i in range(1, 11)],
        'p':[i for i in range(1, 6)]
    }
]

knn_reg = KNeighborsRegressor()
grid_search = GridSearchCV(knn_reg, para_grid, n_jobs=-1, verbose=1)
grid_search.fit(X_train, y_train)

[Parallel(n_jobs=-1)]: Using backend LokyBackend with 12 concurrent workers.
Fitting 3 folds for each of 60 candidates, totalling 180 fits
[Parallel(n_jobs=-1)]: Done 26 tasks | elapsed: 1.3s
[Parallel(n_jobs=-1)]: Done 180 out of 180 | elapsed: 1.5s finished
GridSearchCV(cv='warn', error_score='raise-deprecating',
estimator=KNeighborsRegressor(algorithm='auto', leaf_size=30,
metric='minkowski',
metric_params=None, n_jobs=None,
n_neighbors=5, p=2,
weights='uniform'),
iid='warn', n_jobs=-1,
param_grid=[{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'weights': ['uniform']},
{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'p': [1, 2, 3, 4, 5], 'weights': ['distance']}],
pre_dispatch='2*n_jobs', refit=True, return_train_score=False,
scoring=None, verbose=1)

grid_search.best_params_
#搜尋到的最好結果

{'n_neighbors': 6, 'p': 1, 'weights': 'distance'}

grid_search.best_score_
#因為求解方式不同,網格搜尋使用交叉驗證的方式

0.6060528490355778

#為了得到和線性迴歸同樣的衡量標準
grid_search.best_estimator_.score(X_test, y_test)
#仍然低於線性迴歸的準確度,因為使用的是網格搜尋中的計算方法

0.7353138117643773