aboutsummaryrefslogtreecommitdiff
path: root/R_LogR/mlclass-ex2/costFunction.m
diff options
context:
space:
mode:
Diffstat (limited to 'R_LogR/mlclass-ex2/costFunction.m')
-rw-r--r--R_LogR/mlclass-ex2/costFunction.m35
1 files changed, 35 insertions, 0 deletions
diff --git a/R_LogR/mlclass-ex2/costFunction.m b/R_LogR/mlclass-ex2/costFunction.m
new file mode 100644
index 0000000..6e6118c
--- /dev/null
+++ b/R_LogR/mlclass-ex2/costFunction.m
@@ -0,0 +1,35 @@
+function [J, grad] = costFunction(theta, X, y)
+%COSTFUNCTION Compute cost and gradient for logistic regression
+% J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
+% parameter for logistic regression and the gradient of the cost
+% w.r.t. to the parameters.
+
+% Initialize some useful values
+m = length(y); % number of training examples
+
+% You need to return the following variables correctly
+% J = 0;
+% grad = zeros(size(theta));
+
+% ====================== YOUR CODE HERE ======================
+% Instructions: Compute the cost of a particular choice of theta.
+% You should set J to the cost.
+% Compute the partial derivatives and set grad to the partial
+% derivatives of the cost w.r.t. each parameter in theta
+%
+% Note: grad should have the same dimensions as theta
+%
+
+pred = sigmoid(X*theta);
+
+% J = 1 /m .* sum(-y .* log(pred) - (1-y) .* log(1-pred));
+
+% grad = 1 / m .* (X' * (pred - y));
+
+J = (1 / (2*m)) * sum((pred - y).^2);
+
+grad = (1 / m) * X' * (pred - y);
+
+% =============================================================
+
+end