作业 网址 http://openclassroom.stanford.edu/MainFolder/DocumentPage.php?course=MachineLearning&doc=exercises/ex5/ex5.html
所编写的代码
% Exercise 5 -- Linear Regression
close all;clear;clc;
x = load('ex5Linx.dat');
y = load('ex5Liny.dat');
figure;
plot(x,y,'o','MarkerFaceColor', 'r', 'MarkerSize', 8);
m = length(y);
x = [ones(m, 1), x, x.^2, x.^3, x.^4, x.^5];
lamda = 1; % 惩罚系数
matrix = diag([0,ones(1,5)] );
theta = (x'*x+lamda.*matrix)\x'*y;
hold on
plot_x = [min(x(:,2)):0.05:max(x(:,2))+0.2]';
plot_y = [ones(length(plot_x), 1), plot_x, plot_x.^2, plot_x.^3, plot_x.^4, plot_x.^5]*theta;
plot(plot_x,plot_y,'--');
legend('Training data','5thorder fit ')
% Exercise 5 -- Logistic Regression using Newton's method
close all;clear;clc;
x = load('ex5Logx.dat');
y = load('ex5Logy.dat');
figure;
% Find the indices for the 2 classes
pos = find(y); neg = find(y == 0);
plot(x(pos, 1), x(pos, 2), '+')
hold on
plot(x(neg, 1), x(neg, 2), 'o','MarkerFaceColor', 'y', 'MarkerSize', 8)
legend('y=1','y=0');
u = x(:,1);
v = x(:,2);
x = map_feature(u, v);
m=length(y);
g = inline('1.0 ./ (1.0 + exp(-z))');
theta = zeros(28,1);
MAX_ITR = 15;
lamda = 1;
matrix = diag([0,ones(1,27)] );
J = zeros(MAX_ITR, 1);
for i=1:MAX_ITR
zz = x * theta;
h = g(zz);
grad = (1/m).*x' * (h-y)+lamda/m.*matrix*theta;
H = (1/m).*x' * diag(h) * diag(1-h) * x+(lamda/m).*matrix;
theta = theta - H\grad;
J(i) =(1/m)*sum(-y.*log(h) - (1-y).*log(1-h));
end
% Define the ranges of the grid
u = linspace(-1, 1.5, 200);
v = linspace(-1, 1.5, 200);
% Initialize space for the values to be plotted
z = zeros(length(u), length(v));
% Evaluate z = theta*x over the grid
for i = 1:length(u)
for j = 1:length(v)
% Notice the order of j, i here!
z(j,i) = map_feature(u(i), v(j))*theta;
end
end
contour(u,v,z, [0, 0], 'LineWidth', 2)
% Plot J
figure
plot(0:MAX_ITR-1, J, 'o--', 'MarkerFaceColor', 'r', 'MarkerSize', 8)
xlabel('Iteration'); ylabel('J')