data=load ('ex1data1.txt');
x=data(:,1);y=data(:,2);
m = length(y);
X = [ones(m, 1), data(:,1)];
theta = zeros(2, 1);
iterations = 1500;
alpha = 0.01;
fprintf('\nTesting the cost function ...\n')
J = computeCost(X, y, theta);
fprintf('With theta = [0 ; 0]\nCost computed = %f\n', J);
fprintf('Expected cost value (approx) 32.07\n');
J = computeCost(X, y, [-1 ; 2]);
fprintf('\nWith theta = [-1 ; 2]\nCost computed = %f\n', J);
fprintf('Expected cost value (approx) 54.24\n');
fprintf('Program paused. Press enter to continue.\n');
pause;
fprintf('\nRunning Gradient Descent ...\n')
theta = gradientDescent(X, y, theta, alpha, iterations);
fprintf('Theta found by gradient descent:\n');
fprintf('%f\n', theta);
fprintf('Expected theta values (approx)\n');
fprintf(' -3.6303\n 1.1664\n\n');
其中compute cost部分为:
function J=computeCost(X,y,theta)
m=length(y)
J=0
predictions=X*theta;
sqrErrors=(predictions-y).^2;
J=sum(sqrErrors)/2*m;
end
gradientdescent部分为:
function [theta, J_history, theta_history] = gradientDescent(X, y, theta, alpha, num_iters)
J_history = zeros(num_iters, 1);
theta_history=zeros(2,num_iters)
for iter = 1:num_iters
theta=theta-(alpha/m)*X'*(X*theta-y)
theta_history(:,num_iters)=theta
J_history(iter) = computeCost(X, y, theta);
end
end
最后输出的theta还是最开始定义的【0;0】,拟合结果为:
只在学校开课的时候学过半吊子vb,不知道从何改起,求高人指点一二 TAT