From 175ac10904d0f31c3ffeeeed507c8914f13d0b15 Mon Sep 17 00:00:00 2001 From: leshe4ka46 Date: Sat, 13 Dec 2025 19:41:40 +0300 Subject: linr, logr --- R_LogR/mlclass-ex2/costFunction.m | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 R_LogR/mlclass-ex2/costFunction.m (limited to 'R_LogR/mlclass-ex2/costFunction.m') diff --git a/R_LogR/mlclass-ex2/costFunction.m b/R_LogR/mlclass-ex2/costFunction.m new file mode 100644 index 0000000..6e6118c --- /dev/null +++ b/R_LogR/mlclass-ex2/costFunction.m @@ -0,0 +1,35 @@ +function [J, grad] = costFunction(theta, X, y) +%COSTFUNCTION Compute cost and gradient for logistic regression +% J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the +% parameter for logistic regression and the gradient of the cost +% w.r.t. to the parameters. + +% Initialize some useful values +m = length(y); % number of training examples + +% You need to return the following variables correctly +% J = 0; +% grad = zeros(size(theta)); + +% ====================== YOUR CODE HERE ====================== +% Instructions: Compute the cost of a particular choice of theta. +% You should set J to the cost. +% Compute the partial derivatives and set grad to the partial +% derivatives of the cost w.r.t. each parameter in theta +% +% Note: grad should have the same dimensions as theta +% + +pred = sigmoid(X*theta); + +% J = 1 /m .* sum(-y .* log(pred) - (1-y) .* log(1-pred)); + +% grad = 1 / m .* (X' * (pred - y)); + +J = (1 / (2*m)) * sum((pred - y).^2); + +grad = (1 / m) * X' * (pred - y); + +% ============================================================= + +end -- cgit v1.2.3