linearRegCostFunction.m
J = 1/(2*m)*sum((X*theta-y).^2)+lambda/(2*m)*(sum(theta.^2)-theta(1).^2); grad = 1/m*X‘*(X*theta-y)+lambda/m*theta; grad(1) = grad(1) -lambda/m*theta(1)
learningCurve.m
for i = 1:m theta = trainLinearReg(X(1:i,:), y(1:i,:), lambda); error_train(i) = linearRegCostFunction(X(1:i,:), y(1:i,:), theta, 0); error_val(i) = linearRegCostFunction(Xval, yval, theta, 0); end
validationCurve.m
for i = 1 : length(lambda_vec) lambda = lambda_vec(i); theta = trainLinearReg(X, y, lambda); error_train(i) = linearRegCostFunction(X, y, theta, 0); error_val(i) = linearRegCostFunction(Xval, yval, theta, 0) end
原文地址:https://www.cnblogs.com/xingkongyihao/p/8436093.html
时间: 2024-10-14 01:23:50