From 2faf06ae30d0d40df3d76330e2bc73a3dd7f6d04 Mon Sep 17 00:00:00 2001 From: vasnake Date: Wed, 9 Apr 2014 17:35:58 +0400 Subject: [PATCH] added: Ex4 tests --- ex4/test_ex4.m | 20 +++++++++++++ ex4/test_nnCostFunction_costNoReg.m | 33 ++++++++++++++++++++++ ex4/test_nnCostFunction_costReg.m | 9 ++++++ ex4/test_nnCostFunction_gradCheck.m | 41 +++++++++++++++++++++++++++ ex4/test_nnCostFunction_gradNoReg.m | 41 +++++++++++++++++++++++++++ ex4/test_nnCostFunction_gradReg.m | 41 +++++++++++++++++++++++++++ ex4/test_sanity.m | 4 +++ ex4/test_sigmoidGradient.m | 44 +++++++++++++++++++++++++++++ 8 files changed, 233 insertions(+) create mode 100644 ex4/test_ex4.m create mode 100644 ex4/test_nnCostFunction_costNoReg.m create mode 100644 ex4/test_nnCostFunction_costReg.m create mode 100644 ex4/test_nnCostFunction_gradCheck.m create mode 100644 ex4/test_nnCostFunction_gradNoReg.m create mode 100644 ex4/test_nnCostFunction_gradReg.m create mode 100644 ex4/test_sanity.m create mode 100644 ex4/test_sigmoidGradient.m diff --git a/ex4/test_ex4.m b/ex4/test_ex4.m new file mode 100644 index 0000000..4006718 --- /dev/null +++ b/ex4/test_ex4.m @@ -0,0 +1,20 @@ +%!test test_sanity() + +% test nnCostFunction, cost w/o regularization +%!test test_nnCostFunction_costNoReg() + +% test nnCostFunction, cost with regularization +%!test test_nnCostFunction_costReg() + +% test gradient for sigmoid +%!test test_sigmoidGradient() + +% test nnCostFunction, gradient (backprop) w/o regularization +%!test test_nnCostFunction_gradNoReg() + +%!test test_nnCostFunction_gradCheck(0) + +% test nnCostFunction, gradient (backprop) with regularization +%!test test_nnCostFunction_gradReg() + +%!test test_nnCostFunction_gradCheck(3) diff --git a/ex4/test_nnCostFunction_costNoReg.m b/ex4/test_nnCostFunction_costNoReg.m new file mode 100644 index 0000000..f5c052b --- /dev/null +++ b/ex4/test_nnCostFunction_costNoReg.m @@ -0,0 +1,33 @@ +function test_nnCostFunction_costNoReg() + epsilon = 1e-4; + + % input descriptions + input_layer_size = 400; % 20x20 Input Images of Digits + hidden_layer_size = 25; % 25 hidden units + num_labels = 10; % 10 labels, from 1 to 10 + m = 5000; % number of examples in training set + + Theta1 = rand(hidden_layer_size, input_layer_size+1); + Theta2 = rand(num_labels, hidden_layer_size+1); + nn_params = [Theta1(:) ; Theta2(:)]; % Unroll parameters + X = rand(m, input_layer_size); % training set features + y = mod(1:m, num_labels)'; % training set results, labels {1 ... 10} + y(y == 0) = 10; + + % Weight regularization parameter (we set this to 0 here). + lambda = 0; + + [J grad] = nnCostFunction(nn_params, input_layer_size, hidden_layer_size, num_labels, X, y, lambda); + % J is a real number + % grad is a "unrolled" vector of the partial derivatives of the neural network + % grad size is (hidden_layer_size*(input_layer_size+1) + num_labels*(hidden_layer_size+1), 1) + %~ assert(J, 1); + %~ assert(grad, rand(hidden_layer_size*(input_layer_size+1) + num_labels*(hidden_layer_size+1), 1)); + + % tests + % https://class.coursera.org/ml-005/forum/thread?thread_id=1783 + % assignment #1, NN cost function w/o regularization + [J] = nnCostFunction(sec(1:1:32)', 2, 4, 4, reshape(tan(1:32), 16, 2) / 5, 1 + mod(1:16,4)', 0); + assert(J, 10.931, epsilon); + +endfunction diff --git a/ex4/test_nnCostFunction_costReg.m b/ex4/test_nnCostFunction_costReg.m new file mode 100644 index 0000000..a88f683 --- /dev/null +++ b/ex4/test_nnCostFunction_costReg.m @@ -0,0 +1,9 @@ +function test_nnCostFunction_costReg() + epsilon = 1e-4; + + % https://class.coursera.org/ml-005/forum/thread?thread_id=1783 + % Assignment #2, NN cost function with regularization + [J] = nnCostFunction(sec(1:1:32)', 2, 4, 4, reshape(tan(1:32), 16, 2) / 5, 1 + mod(1:16,4)', 0.1); + assert(J, 170.9933, epsilon); + +endfunction diff --git a/ex4/test_nnCostFunction_gradCheck.m b/ex4/test_nnCostFunction_gradCheck.m new file mode 100644 index 0000000..dca2eab --- /dev/null +++ b/ex4/test_nnCostFunction_gradCheck.m @@ -0,0 +1,41 @@ +function test_nnCostFunction_gradCheck(lambda) +% this test borrowing code from ML class Ex4 checkNNGradients.m +% and using Ex4 modules +% computeNumericalGradient.m +% debugInitializeWeights.m + + input_layer_size = 3; + hidden_layer_size = 5; + num_labels = 3; + m = 5; + + % We generate some 'random' test data + Theta1 = debugInitializeWeights(hidden_layer_size, input_layer_size); % 5x4 + Theta2 = debugInitializeWeights(num_labels, hidden_layer_size); % 3x6 + + % Reusing debugInitializeWeights to generate X + X = debugInitializeWeights(m, input_layer_size - 1); + y = 1 + mod(1:m, num_labels)'; + + % Unroll parameters + nn_params = [Theta1(:) ; Theta2(:)]; + + % Short hand for cost function + costFunc = @(p) nnCostFunction(p, input_layer_size, hidden_layer_size, num_labels, X, y, lambda); + + [cost, grad] = costFunc(nn_params); + numgrad = computeNumericalGradient(costFunc, nn_params); + if lambda == 0 + assert(cost, 2.1010, 1e-4); + elseif lambda == 3 + assert(cost, 2.1464, 1e-4); + end + assert(size(grad), size(numgrad)); + + % Evaluate the norm of the difference between two solutions. + % If you have a correct implementation, and assuming you used EPSILON = 0.0001 + % in computeNumericalGradient.m, then diff below should be less than 1e-9 + diff = norm(numgrad-grad)/norm(numgrad+grad); + assert(diff, 2.3e-11, 1e-9); + +endfunction diff --git a/ex4/test_nnCostFunction_gradNoReg.m b/ex4/test_nnCostFunction_gradNoReg.m new file mode 100644 index 0000000..41d4ed7 --- /dev/null +++ b/ex4/test_nnCostFunction_gradNoReg.m @@ -0,0 +1,41 @@ +function test_nnCostFunction_gradNoReg() + epsilon = 1e-4; + + % https://class.coursera.org/ml-005/forum/thread?thread_id=1783 + % Assignment #4, NN cost function, backprop, gradient w/o regularization + [J grad] = nnCostFunction(sec(1:1:32)', 2, 4, 4, reshape(tan(1:32), 16, 2) / 5, 1 + mod(1:16,4)', 0); + a = [ 3.0518e-001 + 7.1044e-002 + 5.1307e-002 + 6.2115e-001 + -7.4310e-002 + 5.2173e-002 + -2.9711e-003 + -5.5435e-002 + -9.5647e-003 + -4.6995e-002 + 1.0499e-004 + 9.0452e-003 + -7.4506e-002 + 7.4997e-001 + -1.7991e-002 + 4.4328e-001 + -5.9840e-002 + 5.3455e-001 + -7.8995e-002 + 3.5278e-001 + -5.3284e-003 + 8.4440e-002 + -3.4384e-002 + 6.6441e-002 + -3.4314e-002 + 3.3322e-001 + -7.0455e-002 + 1.5063e-001 + -1.7708e-002 + 2.7170e-001 + 7.1129e-002 + 1.4488e-001]; + assert(grad, a, epsilon); + +endfunction diff --git a/ex4/test_nnCostFunction_gradReg.m b/ex4/test_nnCostFunction_gradReg.m new file mode 100644 index 0000000..41cc505 --- /dev/null +++ b/ex4/test_nnCostFunction_gradReg.m @@ -0,0 +1,41 @@ +function test_nnCostFunction_gradReg() + epsilon = 1e-4; + + % https://class.coursera.org/ml-005/forum/thread?thread_id=1783 + % Assignment #5, NN cost function, backprop, gradient with regularization + [J grad] = nnCostFunction(sec(1:1:32)', 2, 4, 4, reshape(tan(1:32), 16, 2) / 5, 1 + mod(1:16,4)', 0.1); + a = [ 0.3051843 + 0.0710438 + 0.0513066 + 0.6211486 + -0.0522766 + 0.0586827 + 0.0053191 + -0.0983900 + -0.0164243 + -0.0544438 + 1.4123116 + 0.0164517 + -0.0745060 + 0.7499671 + -0.0179905 + 0.4432801 + -0.0825542 + 0.5440175 + -0.0726739 + 0.3680935 + -0.0167392 + 0.0781902 + -0.0461142 + 0.0811755 + -0.0280090 + 0.3428785 + -0.0918487 + 0.1441408 + -0.0260627 + 0.3122174 + 0.0779614 + 0.1523740]; + assert(grad, a, epsilon); + +endfunction diff --git a/ex4/test_sanity.m b/ex4/test_sanity.m new file mode 100644 index 0000000..6b10a83 --- /dev/null +++ b/ex4/test_sanity.m @@ -0,0 +1,4 @@ +function test_sanity () + % make sure tests are running correctly + assert(1,1); +endfunction diff --git a/ex4/test_sigmoidGradient.m b/ex4/test_sigmoidGradient.m new file mode 100644 index 0000000..dcb25d7 --- /dev/null +++ b/ex4/test_sigmoidGradient.m @@ -0,0 +1,44 @@ +function test_sigmoidGradient() + epsilon = 1e-4; + + x = sigmoidGradient(0); + a = 0.25000; + assert(x, a, epsilon); + + x = sigmoidGradient(1); + a = 0.19661; + assert(x, a, epsilon); + + x = sigmoidGradient(5); + a = 0.0066481; + assert(x, a, epsilon); + + x = sigmoidGradient([0 1 2; 3 4 5]); + a = [ 0.2500000 0.1966119 0.1049936 + 0.0451767 0.0176627 0.0066481 ]; + assert(x, a, epsilon); + + x = sigmoidGradient([0 1 2]); + a = [0.25000 0.19661 0.10499]; + assert(x, a, epsilon); + + x = sigmoidGradient([0; 1; 2]); + a = [ 0.25000 + 0.19661 + 0.10499]; + assert(x, a, epsilon); + + x = sigmoidGradient([-6 -7]); + a = [2.4665e-03 9.1022e-04]; + assert(x, a, epsilon); + + % https://class.coursera.org/ml-005/forum/thread?thread_id=1783 + x = sigmoidGradient(sec(1:1:5)'); + a = [ 0.117342 + 0.076065 + 0.195692 + 0.146323 + 0.027782]; + assert(x, a, epsilon); + +endfunction