一个上午做出来了
仅供参考
function J = computeCost(X, y, theta)
%COMPUTECOST Compute cost for linear regression
% J = COMPUTECOST(X, y, theta) computes the cost of using theta as the
% parameter for linear regression to fit the data points in X and y
% Initialize some useful values
m = length(y); % number of training examples
% You need to return the following variables correctly
J = 0;
% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta
% You should set J to the cost.
i = 1;
summ = 0;
while i <= m,
summ = summ+((theta(1)+theta(2)*X(i,2))-y(i))^2;
i = i+1;
endwhile
J=(1/(2*m))*summ;
% =========================================================================
end
function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters)
%GRADIENTDESCENT Performs gradient descent to learn theta
% theta = GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);
for iter = 1:num_iters
% ====================== YOUR CODE HERE ======================
% Instructions: Perform a single gradient step on the parameter vector
% theta.
%
% Hint: While debugging, it can be useful to print out the values
% of the cost function (computeCost) and gradient here.
%
summ1=0;
summ2=0;
for i = 1:m
summ1 = summ1 + ((theta(1)+theta(2)*X(i,2))-y(i))*X(i,1);
summ2 = summ2 + ((theta(1)+theta(2)*X(i,2))-y(i))*X(i,2);
endfor
theta(1) = theta(1) - (alpha/m)*summ1;
theta(2) = theta(2) - (alpha/m)*summ2;
% ============================================================
% Save the cost J in every iteration
J_history(iter) = computeCost(X, y, theta);
end
end