aboutsummaryrefslogtreecommitdiff
path: root/NN/mlclass-ex3/mlclass-ex3/predict.m
diff options
context:
space:
mode:
Diffstat (limited to 'NN/mlclass-ex3/mlclass-ex3/predict.m')
-rw-r--r--NN/mlclass-ex3/mlclass-ex3/predict.m47
1 files changed, 47 insertions, 0 deletions
diff --git a/NN/mlclass-ex3/mlclass-ex3/predict.m b/NN/mlclass-ex3/mlclass-ex3/predict.m
new file mode 100644
index 0000000..14c148b
--- /dev/null
+++ b/NN/mlclass-ex3/mlclass-ex3/predict.m
@@ -0,0 +1,47 @@
+function p = predict(Theta1, Theta2, X)
+%PREDICT Predict the label of an input given a trained neural network
+% p = PREDICT(Theta1, Theta2, X) outputs the predicted label of X given the
+% trained weights of a neural network (Theta1, Theta2)
+
+% Useful values
+m = size(X, 1);
+num_labels = size(Theta2, 1);
+
+% You need to return the following variables correctly
+p = zeros(size(X, 1), 1);
+
+% ====================== YOUR CODE HERE ======================
+% Instructions: Complete the following code to make predictions using
+% your learned neural network. You should set p to a
+% vector containing labels between 1 to num_labels.
+%
+% Hint: The max function might come in useful. In particular, the max
+% function can also return the index of the max element, for more
+% information see 'help max'. If your examples are in rows, then, you
+% can use max(A, [], 2) to obtain the max for each row.
+%
+
+% input layer
+a1 = [ones(m, 1) X];
+
+% hidden layer
+z2 = a1 * Theta1';
+a2 = sigmoid(z2);
+a2 = [ones(m, 1) a2];
+
+% output layer
+z3 = a2 * Theta2';
+a3 = sigmoid(z3);
+
+[~, p] = max(a3, [], 2);
+
+
+
+
+
+
+
+% =========================================================================
+
+
+end