機器學習經典演算法7-線性迴歸
阿新 • • 發佈:2019-02-14
1.簡單介紹
分類處理的是離散預測,而對於連續的值型別則可以利用迴歸進行預測,這裡對主要的幾個線性迴歸方法進行初步介紹。這裡也有訓練集和測試集。2.單變數線性迴歸的引數求解
3.多變數線性迴歸
4.利用矩陣進行引數求解
5.區域性加權線性迴歸
6.嶺迴歸
7.程式設計實現
這裡standMaReg實現的是利用矩陣分解進行最基本的線性迴歸引數求解;而standBaGradReg中是利用梯度下降的batch方式進行引數求解,這裡設定了iter_num迴圈求解的次數,每次都基於m個訓練集資料進行引數更新,這裡對引數weights進行更新時沒有考慮引數m、對引數alpha也沒有考慮變速而是固定值;lwlr函式實現的是區域性加權線性迴歸,求解採用的是矩陣方式;ridgeRegres實現的是嶺迴歸,這裡的lamda預設設定是0.2。 [python]- from numpy import *
- import matplotlib.pyplot as plt
- def loadDataSet(filename):
- numFeat = len(open(filename).readline().split('\t'))-1
- dataMat = []
- labelMat = []
- fr = open(filename)
- for line in fr.readlines():
- lineArr = []
-
curLine = line.strip('\n'
- for i in range(numFeat):
- lineArr.append(float(curLine[i]))
- dataMat.append(lineArr)
- labelMat.append(float(curLine[-1]))
- return dataMat, labelMat
- def standMaReg(xArr, yArr):
- xMat = mat(xArr)
- yMat = mat(yArr).T
-
xTx = xMat.T*xMat
- if linalg.det(xTx)==0.0:
- print'This matrix is singular, connot do inverse'
- return
- ws = xTx.I*(xMat.T*yMat)
- return ws
- def standBaGradReg(xArr, yArr, alpha=0.001, iter_num=15):
- xMat = mat(xArr)
- yMat = mat(yArr).T
- m,n=shape(xMat)
- weights = mat(ones((n,1)))
- for i in range(iter_num):
- yPredict = mat(xMat*weights)
- tmp=mat(zeros((n,1)))
- for j in range(n):
- tmp[j,:] += alpha*sum(multiply((yMat-yPredict),xMat[:,j]))
- weights = weights + tmp
- return weights
- def lwlr(testPoint, xArr, yArr, k=1.0):
- xMat = mat(xArr)
- yMat = mat(yArr).T
- m = shape(xMat)[0]
- weights = mat(eye((m)))
- for j in range(m):
- diffMat = testPoint - xMat[j,:]
- weights[j,j] = exp(diffMat*diffMat.T/(-2.0*k**2))
- xTx = xMat.T*(weights*xMat)
- if linalg.det(xTx) == 0.0:
- print"This matrix is singular, cannot do inverse"
- return
- ws = xTx.I*(xMat.T*(weights*yMat))
- return testPoint*ws
- def lwlrTest(testArr, xArr, yArr, k=1.0):
- m = shape(testArr)[0]
- yPre = zeros(m)
- for i in range(m):
- yPre[i] = lwlr(testArr[i], xArr, yArr, k)
- return yPre
- def ridgeRegres(xMat, yMat, lam=0.2):
- xTx = xMat.T*xMat
- denom = xTx + eye(shape(xMat)[1])*lam
- if linalg.det(denom) == 0.0:
- print"This matrix is singular, cannot do inverse"
- ws = denom.I*(xMat.T*yMat)
- return ws
- def ridgeTest(xArr, yArr, numIter=30):
- xMat = mat(xArr)
- yMat = mat(yArr).T
- yMean = mean(yMat,0)
- yMat = yMat - yMean
- xMeans = mean(xMat, 0)
- xVar = var(xMat, 0)
- xMat = (xMat - xMeans)/xVar
- wMat = zeros((numIter,shape(xMat)[1]))
- lamList = []
- for i in range(numIter):
- lamList.append(exp(i-10))
- ws = ridgeRegres(xMat, yMat, exp(i-10))
- wMat[i,:]=ws.T
- return wMat, lamList
- def plotReg(weights, xArr, yArr, xIndex=0):
- xMat = mat(xArr)
- yMat = mat(yArr)
- fig = plt.figure()
- ax = fig.add_subplot(111)
- ax.scatter(xMat[:,xIndex].flatten().A[0], yMat.T[:,0].flatten().A[0])
- yPredict = xMat*weights
- ax.plot(xMat[:,xIndex], yPredict)
- plt.show()
- xArr, yArr = loadDataSet("ex0.txt")
- '''''
- ws1 = standMaReg(xArr, yArr)
- print "ws1", ws1
- plotReg(ws1, xArr, yArr, 1)
- ws2 = standBaGradReg(xArr, yArr, 0.001, 1000)
- print "ws2", ws2
- yPre = lwlrTest(xArr, xArr, yArr, 0.01)
- xMat = mat(xArr)
- srtInde = xMat[:,1].argsort(0)
- xSort = xMat[srtInde][:,0,:]
- fig = plt.figure()
- ax = fig.add_subplot(111)
- ax.plot(xSort[:,1], yPre[srtInde])
- ax.scatter(xMat[:,1].flatten().A[0], mat(yArr).T.flatten().A[0], s=2, c='red')
- plt.show()
- '''
- abX, abY = loadDataSet('abalone.txt')
- weights, lam = ridgeTest(abX, abY)
- plt.plot(weights)
- plt.show()