function [lambda_vec, error_train, error_val] = validationCurve(X, y, Xval, yval)
% Selected values of lambda (you should not change this)
lambda_vec = [0 0.001 0.003 0.01 0.03 0.1 0.3 1 3 10]';
% You need to return these variables correctly.
error_train = zeros(length(lambda_vec), 1);
error_val = zeros(length(lambda_vec), 1);
for i = 1:length(lambda_vec)
lambda = lambda_vec(i);
% Train the linear model with X, y and lambda
[theta] = trainLinearReg(X, y, lambda);
% Compute training error
[J, grad] = linearRegCostFunction(X, y, theta, 0); %lambda=0 for error computing
error_train(i) = J;
% Compute cross valication error
[J, grad] = linearRegCostFunction(Xval, yval, theta, 0); %lambda=0 for error computing
error_val(i) = J;
end
end
Plot them:
[lambda_vec, error_train, error_val] = validationCurve(X, y, Xvalidation, yvalidation);
plot(lambda_vec, error_train, lambda_vec, error_val);
legend('Train', 'Cross Validation');
xlabel('lambda');
ylabel('Error');
Lambda = 3 gives the smallest cross validation error.
No comments:
Post a Comment