function g = sigmoid(z)
g = zeros(size(z));
g = 1./(1+exp.^(-z));
end
代价函数及梯度下降
function [J, grad] = costFunction(theta, X, y)
m = length(y);
J = 0;
grad = zeros(size(theta));
h = sigmoid(X*theta);
J = (1/m)*sum((-y)*log(h)-(1-y)*log(1-h));
for j = 1 : size(X,2)
grad(j) = (1/m)*sum((h-y).*X(:,j));
end
使用fminunc学习参数
% Set options for fminunc
options = optimset('GradObj', 'on', 'MaxIter', 400);
% Run fminunc to obtain the optimal theta
% This function will return theta and the cost
[theta, cost] = fminunc(@(t)(costFunction(t, X, y)), initial_theta, options);
评估
function p = predict(theta, X)
m = size(X, 1);
p = zeros(m, 1);
p = sigmoid(X*theta)>=0.5;
end