aboutsummaryrefslogtreecommitdiff
path: root/R_LogR/mlclass-ex2/costFunctionReg.m
diff options
context:
space:
mode:
authorleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
committerleshe4ka46 <alex9102naid1@ya.ru>2025-12-13 19:41:40 +0300
commit175ac10904d0f31c3ffeeeed507c8914f13d0b15 (patch)
tree671c68a03354c5084470c5cfcfd4fe87aae2aff8 /R_LogR/mlclass-ex2/costFunctionReg.m
parent72b4edeadeafc9c54b3db9b0961a45da3d07b77c (diff)
linr, logr
Diffstat (limited to 'R_LogR/mlclass-ex2/costFunctionReg.m')
-rw-r--r--R_LogR/mlclass-ex2/costFunctionReg.m36
1 files changed, 36 insertions, 0 deletions
diff --git a/R_LogR/mlclass-ex2/costFunctionReg.m b/R_LogR/mlclass-ex2/costFunctionReg.m
new file mode 100644
index 0000000..cc73386
--- /dev/null
+++ b/R_LogR/mlclass-ex2/costFunctionReg.m
@@ -0,0 +1,36 @@
+function [J, grad] = costFunctionReg(theta, X, y, lambda)
+%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
+% J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
+% theta as the parameter for regularized logistic regression and the
+% gradient of the cost w.r.t. to the parameters.
+
+% Initialize some useful values
+m = length(y); % number of training examples
+
+% You need to return the following variables correctly
+% J = 0;
+% grad = zeros(size(theta));
+
+% ====================== YOUR CODE HERE ======================
+% Instructions: Compute the cost of a particular choice of theta.
+% You should set J to the cost.
+% Compute the partial derivatives and set grad to the partial
+% derivatives of the cost w.r.t. each parameter in theta
+
+
+pred = sigmoid(X*theta);
+
+J = 1./m .* sum(-y .* log(pred) - (1-y) .* log(1-pred)) + lambda ./ 2 ./ m .* sum(theta.^2);
+
+
+reg = lambda .* theta ./ m;
+reg(1) = 0;
+grad = 1 / m .* (X' * (pred - y)) + reg;
+
+
+
+
+
+% =============================================================
+
+end