aboutsummaryrefslogtreecommitdiff
path: root/R_LogR/mlclass-ex2/ex2_reg.m
diff options
context:
space:
mode:
authorleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
committerleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
commit175ac10904d0f31c3ffeeeed507c8914f13d0b15 (patch)
tree671c68a03354c5084470c5cfcfd4fe87aae2aff8 /R_LogR/mlclass-ex2/ex2_reg.m
parent72b4edeadeafc9c54b3db9b0961a45da3d07b77c (diff)
linr, logr
Diffstat (limited to 'R_LogR/mlclass-ex2/ex2_reg.m')
-rw-r--r--R_LogR/mlclass-ex2/ex2_reg.m117
1 files changed, 117 insertions, 0 deletions
diff --git a/R_LogR/mlclass-ex2/ex2_reg.m b/R_LogR/mlclass-ex2/ex2_reg.m
new file mode 100644
index 0000000..a7b95c2
--- /dev/null
+++ b/R_LogR/mlclass-ex2/ex2_reg.m
@@ -0,0 +1,117 @@
+%% Machine Learning Online Class - Exercise 2: Logistic Regression
+%
+% Instructions
+% ------------
+%
+% This file contains code that helps you get started on the second part
+% of the exercise which covers regularization with logistic regression.
+%
+% You will need to complete the following functions in this exericse:
+%
+% sigmoid.m
+% costFunction.m
+% predict.m
+% costFunctionReg.m
+%
+% For this exercise, you will not need to change any code in this file,
+% or any other files other than those mentioned above.
+%
+
+%% Initialization
+clear ; close all; clc
+
+%% Load Data
+% The first two columns contains the exam scores and the third column
+% contains the label.
+
+data = load('ex2data2.txt');
+X = data(:, [1, 2]); y = data(:, 3);
+
+plotData(X, y);
+
+% Put some labels
+hold on;
+
+% Labels and Legend
+xlabel('Microchip Test 1')
+ylabel('Microchip Test 2')
+
+% Specified in plot order
+legend('y = 1', 'y = 0')
+hold off;
+
+
+%% =========== Part 1: Regularized Logistic Regression ============
+% In this part, you are given a dataset with data points that are not
+% linearly separable. However, you would still like to use logistic
+% regression to classify the data points.
+%
+% To do so, you introduce more features to use -- in particular, you add
+% polynomial features to our data matrix (similar to polynomial
+% regression).
+%
+
+% Add Polynomial Features
+
+% Note that mapFeature also adds a column of ones for us, so the intercept
+% term is handled
+X = mapFeature(X(:,1), X(:,2));
+
+% Initialize fitting parameters
+initial_theta = zeros(size(X, 2), 1);
+
+% Set regularization parameter lambda to 1
+lambda = 1;
+
+% Compute and display initial cost and gradient for regularized logistic
+% regression
+[cost, grad] = costFunctionReg(initial_theta, X, y, lambda);
+
+fprintf('Cost at initial theta (zeros): %f\n', cost);
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+% pause;
+
+%% ============= Part 2: Regularization and Accuracies =============
+% Optional Exercise:
+% In this part, you will get to try different values of lambda and
+% see how regularization affects the decision coundart
+%
+% Try the following values of lambda (0, 1, 10, 100).
+%
+% How does the decision boundary change when you vary lambda? How does
+% the training set accuracy vary?
+%
+
+% Initialize fitting parameters
+initial_theta = zeros(size(X, 2), 1);
+
+% Set regularization parameter lambda to 1 (you should vary this)
+lambda = 0.1;
+
+% Set Options
+options = optimset('GradObj', 'on', 'MaxIter', 400);
+
+% Optimize
+[theta, J, exit_flag] = ...
+ fminunc(@(t)(costFunctionReg(t, X, y, lambda)), initial_theta, options);
+
+% Plot Boundary
+plotDecisionBoundary(theta, X, y);
+hold on;
+title(sprintf('lambda = %g', lambda))
+
+% Labels and Legend
+xlabel('Microchip Test 1')
+ylabel('Microchip Test 2')
+
+legend('y = 1', 'y = 0', 'Decision boundary')
+hold off;
+
+% Compute accuracy on our training set
+p = predict(theta, X);
+
+fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
+
+
+