aboutsummaryrefslogtreecommitdiff
path: root/R_LogR/mlclass-ex2/ex2.m
diff options
context:
space:
mode:
authorleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
committerleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
commit175ac10904d0f31c3ffeeeed507c8914f13d0b15 (patch)
tree671c68a03354c5084470c5cfcfd4fe87aae2aff8 /R_LogR/mlclass-ex2/ex2.m
parent72b4edeadeafc9c54b3db9b0961a45da3d07b77c (diff)
linr, logr
Diffstat (limited to 'R_LogR/mlclass-ex2/ex2.m')
-rw-r--r--R_LogR/mlclass-ex2/ex2.m135
1 files changed, 135 insertions, 0 deletions
diff --git a/R_LogR/mlclass-ex2/ex2.m b/R_LogR/mlclass-ex2/ex2.m
new file mode 100644
index 0000000..c0a7774
--- /dev/null
+++ b/R_LogR/mlclass-ex2/ex2.m
@@ -0,0 +1,135 @@
+%% Machine Learning Online Class - Exercise 2: Logistic Regression
+%
+% Instructions
+% ------------
+%
+% This file contains code that helps you get started on the logistic
+% regression exercise. You will need to complete the following functions
+% in this exericse:
+%
+% sigmoid.m
+% costFunction.m
+% predict.m
+% costFunctionReg.m
+%
+% For this exercise, you will not need to change any code in this file,
+% or any other files other than those mentioned above.
+%
+
+%% Initialization
+clear ; close all; clc
+
+%% Load Data
+% The first two columns contains the exam scores and the third column
+% contains the label.
+
+data = load('ex2data1.txt');
+X = data(:, [1, 2]); y = data(:, 3);
+
+%% ==================== Part 1: Plotting ====================
+% We start the exercise by first plotting the data to understand the
+% the problem we are working with.
+
+fprintf(['Plotting data with + indicating (y = 1) examples and o ' ...
+ 'indicating (y = 0) examples.\n']);
+
+plotData(X, y);
+
+% Put some labels
+hold on;
+% Labels and Legend
+xlabel('Exam 1 score')
+ylabel('Exam 2 score')
+
+% Specified in plot order
+% legend('Admitted', 'Not admitted')
+hold off;
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+pause;
+
+
+%% ============ Part 2: Compute Cost and Gradient ============
+% In this part of the exercise, you will implement the cost and gradient
+% for logistic regression. You neeed to complete the code in
+% costFunction.m
+
+% Setup the data matrix appropriately, and add ones for the intercept term
+[m, n] = size(X);
+
+% Add intercept term to x and X_test
+X = [ones(m, 1) X];
+
+% Initialize fitting parameters
+initial_theta = zeros(n + 1, 1);
+
+% Compute and display initial cost and gradient
+[cost, grad] = costFunction(initial_theta, X, y);
+
+fprintf('Cost at initial theta (zeros): %f\n', cost);
+fprintf('Gradient at initial theta (zeros): \n');
+fprintf(' %f \n', grad);
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+%pause;
+
+
+%% ============= Part 3: Optimizing using fminunc =============
+% In this exercise, you will use a built-in function (fminunc) to find the
+% optimal parameters theta.
+
+% Set options for fminunc
+options = optimset('GradObj', 'on', 'MaxIter', 400);
+
+% Run fminunc to obtain the optimal theta
+% This function will return theta and the cost
+[theta, cost] = ...
+ fminunc(@(t)(costFunction(t, X, y)), initial_theta, options);
+
+% Print theta to screen
+fprintf('Cost at theta found by fminunc: %f\n', cost);
+fprintf('theta: \n');
+fprintf(' %f \n', theta);
+
+% Plot Boundary
+plotDecisionBoundary(theta, X, y);
+
+% Put some labels
+hold on;
+% Labels and Legend
+xlabel('Exam 1 score')
+ylabel('Exam 2 score')
+
+% Specified in plot order
+legend('Admitted', 'Not admitted')
+hold off;
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+%pause;
+
+%% ============== Part 4: Predict and Accuracies ==============
+% After learning the parameters, you'll like to use it to predict the outcomes
+% on unseen data. In this part, you will use the logistic regression model
+% to predict the probability that a student with score 20 on exam 1 and
+% score 80 on exam 2 will be admitted.
+%
+% Furthermore, you will compute the training and test set accuracies of
+% our model.
+%
+% Your task is to complete the code in predict.m
+
+% Predict probability for a student with score 45 on exam 1
+% and score 85 on exam 2
+
+prob = sigmoid([1 45 85] * theta);
+fprintf(['For a student with scores 45 and 85, we predict an admission ' ...
+ 'probability of %f\n\n'], prob);
+
+% Compute accuracy on our training set
+p = predict(theta, X);
+
+fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
+
+fprintf('\nProgram paused. Press enter to continue.\n');
+%pause;
+