functionA=warmUpExercise()%WARMUPEXERCISE Example functionin octave
%A=WARMUPEXERCISE() is an example function that returns the 5x5 identity matrix
A=[];%=============YOURCODEHERE==============% Instructions: Return the 5x5 identity matrix
% In octave, we return values by defining which variables
% represent the returnvalues(at the top of the file)% and then set them accordingly.A=eye(5);%===========================================
end
plotData.m
functionplotData(X, y)%PLOTDATA Plots the data points x and y into a newfigure%PLOTDATA(x,y) plots the data points and gives the figure axes labels of% population and profit.%figure;% open a newfigure window
%======================YOURCODEHERE======================% Instructions: Plot the training data into a figure using the
%"figure" and "plot" commands. Set the axes labels using
% the "xlabel" and "ylabel" commands. Assume the
% population and revenue data have been passed in%as the x and y arguments ofthisfunction.%% Hint: You can use the 'rx' option with plot to have the markers
% appear as red crosses. Furthermore, you can make the
% markers larger by using plot(...,'rx','MarkerSize',10);plot(X, y,'rx','MarkerSize',10);% Plot the data
ylabel('Profit in $10,000s');% Set the y?axis label
xlabel('Population of City in 10,000s');% Set the x?axis label
%============================================================
end
computeCost.m
functionJ=computeCost(X, y, theta)%COMPUTECOST Compute cost for linear regression
%J=COMPUTECOST(X, y, theta) computes the cost of using theta as the
% parameter for linear regression to fit the data points inX and y
% Initialize some useful values
m =length(y);% number of training examples
% You need to return the following variables correctly
J=0;%======================YOURCODEHERE======================% Instructions: Compute the cost of a particular choice of theta
% You should setJ to the cost.
sumsi =sum((X* theta - y).^2);J= sumsi /(2* m);%=========================================================================
end
gradientDescent.m
function[theta, J_history]=gradientDescent(X, y, theta, alpha, num_iters)%GRADIENTDESCENT Performs gradient descent to learn theta
% theta =GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m =length(y);% number of training examples
J_history =zeros(num_iters,1);for iter =1:num_iters
%======================YOURCODEHERE======================% Instructions: Perform a single gradient step on the parameter vector
% theta.%% Hint: While debugging, it can be useful to print out the values
%of the cost function(computeCost) and gradient here.%
theta = theta - alpha / m *X' *(X* theta - y);%============================================================% Save the cost Jin every iteration
J_history(iter)=computeCost(X, y, theta);
end
end
computeCostMulti.m
functionJ=computeCostMulti(X, y, theta)%COMPUTECOSTMULTI Compute cost for linear regression with multiple variables
%J=COMPUTECOSTMULTI(X, y, theta) computes the cost of using theta as the
% parameter for linear regression to fit the data points inX and y
% Initialize some useful values
m =length(y);% number of training examples
% You need to return the following variables correctly
J=0;%======================YOURCODEHERE======================% Instructions: Compute the cost of a particular choice of theta
% You should setJ to the cost.
sumsi =sum((X* theta - y).^2);J= sumsi /(2* m);%=========================================================================
end
gradientDescentMulti.m
function[theta, J_history]=gradientDescentMulti(X, y, theta, alpha, num_iters)%GRADIENTDESCENTMULTI Performs gradient descent to learn theta
% theta =GRADIENTDESCENTMULTI(x, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m =length(y);% number of training examples
J_history =zeros(num_iters,1);for iter =1:num_iters
%======================YOURCODEHERE======================% Instructions: Perform a single gradient step on the parameter vector
% theta.%% Hint: While debugging, it can be useful to print out the values
%of the cost function(computeCostMulti) and gradient here.%
theta = theta -alpha / m *X' *(X* theta -y);%============================================================% Save the cost Jin every iteration
J_history(iter)=computeCostMulti(X, y, theta);
end
end
featureNormalize.m
function[X_norm, mu, sigma]=featureNormalize(X)%FEATURENORMALIZE Normalizes the features inX%FEATURENORMALIZE(X) returns a normalized version ofX where
% the mean value of each feature is 0 and the standard deviation
% is 1. This is often a good preprocessing step to do when
% working with learning algorithms.% You need to set these values correctly
X_norm =X;
mu =zeros(1,size(X,2));
sigma =zeros(1,size(X,2));%======================YOURCODEHERE======================% Instructions: First,for each feature dimension, compute the mean
%of the feature and subtract it from the dataset,% storing the mean value in mu. Next, compute the
% standard deviation of each feature and divide
% each feature by it's standard deviation, storing
% the standard deviation in sigma.%% Note that X is a matrix where each column is a
% feature and each row is an example. You need
% to perform the normalization separately for% each feature.%% Hint: You might find the 'mean' and 'std' functions useful.%
mu =mean(X);
sigma =std(X);
X_norm =(X-repmat(mu,size(X,1),1))./repmat(sigma,size(X,1),1);%============================================================
end
normalEqn.m
function[theta]=normalEqn(X, y)%NORMALEQN Computes the closed-form solution to linear regression
%NORMALEQN(X,y) computes the closed-form solution to linear
% regression using the normal equations.
theta =zeros(size(X,2),1);%======================YOURCODEHERE======================% Instructions: Complete the code to compute the closed form solution
% to linear regression and put the result in theta.%%---------------------- Sample Solution ----------------------
theta =pinv(X' * X) * X'* y;%-------------------------------------------------------------%============================================================
end