1. 程式人生 > >ML-matlab實現linear regression線性迴歸

ML-matlab實現linear regression線性迴歸

最近在系統學習ML(機器學習)

這裡給出部分實驗所需程式碼

說明

環境: matlab r2014b macos

程式碼

說明:線性迴歸是簡單的不能再簡單的入門操作了,關於證明這裡就不再給出了。資料很多。 此外,最小二乘問題本身是有閉形解的。這裡也給出實現。

一元線性迴歸

clc,clear
close all
x = load('ex1_1x.dat')
y = load('ex1_1y.dat')
figure % open a new f i g u r e window
plot (x , y , ' o ' ) ;
title('The data');
ylabel ( ' Height in meter s ' )
xlabel ( 'Age in year s ' )
learning_rate = 0.07
iteration =1500;

m = length(y)
x = [ones(m,1),x]; %Œ™x‘ˆº”–¬µƒ“ª¡–
theta1 = 0;
theta2 = 0;
Jvals = zeros (100 , 100 ); %≥ı ºªØÀ ß∫Ø ˝ø’º‰
theta0vals = linspace (-3 , 3 , 100 ) ;
theta1vals = linspace (-1 , 1 , 100 ) ;
 for i = 1 : length (theta0vals)
 for j = 1 : length (theta1vals )
 t = [ theta0vals(i) , theta1vals( j ) ] 
 Jvals( i , j ) = 0.5*(x*t' - y)'*(x*t' - y)/m;
 end
 end
Jvals = Jvals'
figure;
surf (theta0vals , theta1vals , Jvals );
title('the search space')
xlabel ('\theta_0' ) ; ylabel ('\theta_1 ');
for step = 1:iteration
theta = [theta1,theta2]
temp = x* theta' - y;
sums = 0;
sums2 = 0;
sums3 = 0;
for i  = 1:m
    sums = sums+temp(i);
    sums2 = sums2 +temp(i)*x(i,2);
    sums3 = sums3+temp(i)*temp(i);    
end
sums = sums/m;
sums2 = sums2 /m;
sums3 = sums3 /m;

theta1 = theta1 - learning_rate*sums;
theta2 = theta2 - learning_rate*sums2;

Jcost(step) = (1/2)*sums3;
end
figure;
plot(Jcost)
title('The relation between J and iteration ');
ylabel ( 'J' )
xlabel ( 'iteration' )
legend('\alpha = 0.07')
figure
plot (x(:,2) , y , ' o ' ) ;
hold on
plot(x(:,2), x*theta', '-');
hold on
plot(3.5,[1,3.5]*theta','x','Color','r')
plot(7,[1,7]*theta','x','Color','r')
xlabel('Age in year s')
ylabel('Height in meter s ')
legend('Training Data','Linear Regression','Prediction1&2')
title('Training Result')

一元線性迴歸-直接法

clc,clear
close all
x = load('ex1_1x.dat')
y = load('ex1_1y.dat')
m = length(y)
x = [ones(m,1),x]; %為x增加新的一列
theta = inv(x'*x)*x'*y
figure
plot (x(:,2) , y , ' o ' ) ;
hold on;
plot(x(:,2), x*theta, '-b','Linewidth',2);
hold on;
theta = [0.75015,0.063883]'
plot(x(:,2), x*theta, '-','Linewidth',1);
xlabel('Age in year s')
ylabel('Height in meter s ')
legend('Training Data','Least squares','Linear Regression')
title('Least squares Method')

多元線性迴歸

clc,clear
close all
x = load('ex1_2x.dat')
y = load('ex1_2y.dat')
figure 
plot3(x(:,1) , x(:,2) ,y, ' o ' ) ;
title('Data Before Normalization')
xlabel ('Living Area' ) ; ylabel ('Number Of Bedrooms');
grid on;
learning_rate = 0.05;       %學習率
x_numebr = 2;            %多元x的個數
m = length(y);           %訓練資料的個數
x = [ones(m,1),x];       %為x增加新的一列(常數)
iteration =200;          %迭代次數
sigma = std ( x ) ;
mu = mean( x ) ;
x ( : , 2 ) = ( x ( : , 2 ) - mu( 2 ) ) ./ sigma ( 2 ) ;
x ( : , 3 ) = ( x ( : , 3 ) - mu( 3 ) ) ./ sigma ( 3 ) ;
sigma2 = std ( y ) ;
mu2 = mean( y ) ;
y  = ( y - mu2 )./ sigma2 ;
figure 
plot3(x(:,2) , x(:,3) ,y, ' o ' ) ;
title('Data After Normalization')
xlabel ('Living Area' ) ; ylabel ('Number Of Bedrooms');
grid on;
theta = zeros(1,x_numebr+1)
for step = 1:iteration
temp = x* theta' - y;
costs = temp'*temp;
Jcost(step) = (0.5)*costs/m;
sums=zeros(1,x_numebr+1)
for j  = 1:x_numebr+1
for i  = 1:m
     sums(j) = sums(j) +temp(i)*x(i,j);
end
end
sums =learning_rate*sums/m;
theta = theta - sums
end
figure
plot(Jcost)
title('The relation between J and iteration ');
ylabel ( 'J' )
xlabel ( 'iteration' )
legend('\alpha = 2')
figure
 x1=linspace(-4,4,100);
 x2=linspace(-4,4,100);
 [X1, X2]=meshgrid(x1,x2);
Z=theta(1)+theta(2)*X1+theta(3)*X2;
mesh(X1,X2,Z)
hold on;
plot3(x(:,2),x(:,3),y,'o','Color','r')
hold on;
x = [1,1650,3];
x ( 2 ) = ( x ( 2 ) - mu( 2 ) ) ./ sigma ( 2 ) ;
x ( 3 ) = ( x ( 3 ) - mu( 3 ) ) ./ sigma ( 3 ) ;

plot3(x(2),x(3),x* theta','o','Color','b')
xlabel ('Living Area' ) ;
ylabel ('Number Of Bedrooms');
title('Training Result')
legend('linear regression','data','prediction')
y= x* theta'
y = y*sigma2+mu2