aboutsummaryrefslogtreecommitdiff
path: root/NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m
diff options
context:
space:
mode:
Diffstat (limited to 'NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m')
-rw-r--r--NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m31
1 files changed, 31 insertions, 0 deletions
diff --git a/NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m b/NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m
new file mode 100644
index 0000000..7d4c36e
--- /dev/null
+++ b/NN/mlclass-ex4/mlclass-ex4/randInitializeWeights.m
@@ -0,0 +1,31 @@
+function W = randInitializeWeights(L_in, L_out)
+%RANDINITIALIZEWEIGHTS Randomly initialize the weights of a layer with L_in
+%incoming connections and L_out outgoing connections
+% W = RANDINITIALIZEWEIGHTS(L_in, L_out) randomly initializes the weights
+% of a layer with L_in incoming connections and L_out outgoing
+% connections.
+%
+% Note that W should be set to a matrix of size(L_out, 1 + L_in) as
+% the first row of W handles the "bias" terms
+%
+
+% You need to return the following variables correctly
+W = zeros(L_out, 1 + L_in);
+
+% ====================== YOUR CODE HERE ======================
+% Instructions: Initialize W randomly so that we break the symmetry while
+% training the neural network.
+%
+% Note: The first row of W corresponds to the parameters for the bias units
+%
+
+eps = 0.12;
+
+W = rand(L_out, 1 + L_in) * 2 * eps - eps;
+
+
+
+
+% =========================================================================
+
+end