Angrew Machine Learning ex5

linearRegCostFunction

h = X * theta; %Linear Regression doesn't need sigmoid while Logistic Regression need it;

theta(1,:) = 0; %This is the same with the file 'costFunctionReg.m' of ex2 except this line.
				%For adding the multi-classification,we should take the matrix instead of vector.Therefore, we set the first row to 0 in order to get them away from the computation of regularization.
grad = (X' * (h - y) + lambda * theta) / m;
J = (sum((h - y) .^ 2) + lambda * sum(theta .^ 2)) / (2 * m);

learningCurve

%We must train m times for every error_train and error_val in order to gain the curve of the function J of m on train set and validation set.
for i = 1:m
  x = X(1:i, :);
  y_ = y(1:i);
  [theta] = trainLinearReg(x, y_, lambda);
  [error_train(i), _] = linearRegCostFunction(x, y_, theta, 0);
  [error_val(i),_] = linearRegCostFunction(Xval, yval, theta, 0);
end

validationCurve

%We execute one train for each lambda in order to get the function J of lambda on the train set and the validation set.
for i = 1:length(lambda_vec)
  [theta] = trainLinearReg(X, y, lambda_vec(i));
  [error_train(i), _] = linearRegCostFunction(X, y, theta, 0);
  [error_val(i),_] = linearRegCostFunction(Xval, yval, theta, 0);
end

polyFeatures

for i = 1:p
  X_poly(:, i) = X .^ i;

end

猜你喜欢

转载自blog.csdn.net/ti_an_di/article/details/81121341
今日推荐