warmUpExercise.m
A = eye(5);
plotData.m
plot(x, y, 'rx', 'MarkerSize', 10);
ylabel('Profit in $10,000s');
xlabel('Population of City in 10,000s');
gradientDescent.m
theta_temp = theta; %记录当前梯度for j=1:length(theta) %对其中的所有都进行更新theta_temp(j) = theta(j) -alpha/m*(X*theta-y)'*X(:,j); %endtheta = theta_temp; %更新参数
computeCost.m
predictions = X * theta;
sqrErrors = (predictions - y) .^ 2 ;% 求每一项的平方误差
J = 1 / (2 * m) * sum(sqrErrors); %求代价函数
gradientDescentMulti.m
S = (1 / m) * (X' * (X * theta - y)); %求导theta = theta - alpha .* S; %梯度更新
computeCostMulti.m
J = 1 / (2 * m) * sum( (X * theta - y) .^ 2);
featureNormalize.m
mu = mean(X); %计算均值
sigma = std(X, 1, 1); %计算标准差for i = 1:size(X, 2) %遍历所有列X_norm(:, i) = (X(:, i) - mu(i)) ./ sigma(i); %进行标准化
end
normalEqn.m
theta = pinv((X'*X))*X'*y;