diff options
author | jeanne <jeanne@localhost.localdomain> | 2022-05-11 09:54:38 -0700 |
---|---|---|
committer | jeanne <jeanne@localhost.localdomain> | 2022-05-11 09:54:38 -0700 |
commit | 411f66a2540fa17c736116d865e0ceb0cfe5623b (patch) | |
tree | fa92c69ec627642c8452f928798ff6eccd24ddd6 /src/lib/test | |
parent | 7705b07456dfd4b89c272613e98eda36cc787254 (diff) |
Initial commit.
Diffstat (limited to 'src/lib/test')
-rw-r--r-- | src/lib/test/matrix_test.c | 350 | ||||
-rw-r--r-- | src/lib/test/neuralnet_test.c | 92 | ||||
-rw-r--r-- | src/lib/test/test.h | 185 | ||||
-rw-r--r-- | src/lib/test/test_main.c | 3 | ||||
-rw-r--r-- | src/lib/test/test_util.h | 22 | ||||
-rw-r--r-- | src/lib/test/train_linear_perceptron_non_origin_test.c | 67 | ||||
-rw-r--r-- | src/lib/test/train_linear_perceptron_test.c | 62 | ||||
-rw-r--r-- | src/lib/test/train_sigmoid_test.c | 66 | ||||
-rw-r--r-- | src/lib/test/train_xor_test.c | 66 |
9 files changed, 913 insertions, 0 deletions
diff --git a/src/lib/test/matrix_test.c b/src/lib/test/matrix_test.c new file mode 100644 index 0000000..8191c97 --- /dev/null +++ b/src/lib/test/matrix_test.c | |||
@@ -0,0 +1,350 @@ | |||
1 | #include <neuralnet/matrix.h> | ||
2 | |||
3 | #include "test.h" | ||
4 | #include "test_util.h" | ||
5 | |||
6 | #include <assert.h> | ||
7 | #include <stdlib.h> | ||
8 | |||
9 | // static void PrintMatrix(const nnMatrix* matrix) { | ||
10 | // assert(matrix); | ||
11 | |||
12 | // for (int i = 0; i < matrix->rows; ++i) { | ||
13 | // for (int j = 0; j < matrix->cols; ++j) { | ||
14 | // printf("%f ", nnMatrixAt(matrix, i, j)); | ||
15 | // } | ||
16 | // printf("\n"); | ||
17 | // } | ||
18 | // } | ||
19 | |||
20 | TEST_CASE(nnMatrixMake_1x1) { | ||
21 | nnMatrix A = nnMatrixMake(1, 1); | ||
22 | TEST_EQUAL(A.rows, 1); | ||
23 | TEST_EQUAL(A.cols, 1); | ||
24 | } | ||
25 | |||
26 | TEST_CASE(nnMatrixMake_3x1) { | ||
27 | nnMatrix A = nnMatrixMake(3, 1); | ||
28 | TEST_EQUAL(A.rows, 3); | ||
29 | TEST_EQUAL(A.cols, 1); | ||
30 | } | ||
31 | |||
32 | TEST_CASE(nnMatrixInit_3x1) { | ||
33 | nnMatrix A = nnMatrixMake(3, 1); | ||
34 | nnMatrixInit(&A, (R[]) { 1, 2, 3 }); | ||
35 | TEST_EQUAL(A.values[0], 1); | ||
36 | TEST_EQUAL(A.values[1], 2); | ||
37 | TEST_EQUAL(A.values[2], 3); | ||
38 | } | ||
39 | |||
40 | TEST_CASE(nnMatrixCopyCol_test) { | ||
41 | nnMatrix A = nnMatrixMake(3, 2); | ||
42 | nnMatrix B = nnMatrixMake(3, 1); | ||
43 | |||
44 | nnMatrixInit(&A, (R[]) { | ||
45 | 1, 2, | ||
46 | 3, 4, | ||
47 | 5, 6, | ||
48 | }); | ||
49 | |||
50 | nnMatrixCopyCol(&A, &B, 1, 0); | ||
51 | |||
52 | TEST_EQUAL(nnMatrixAt(&B, 0, 0), 2); | ||
53 | TEST_EQUAL(nnMatrixAt(&B, 1, 0), 4); | ||
54 | TEST_EQUAL(nnMatrixAt(&B, 2, 0), 6); | ||
55 | |||
56 | nnMatrixDel(&A); | ||
57 | nnMatrixDel(&B); | ||
58 | } | ||
59 | |||
60 | TEST_CASE(nnMatrixMul_square_3x3) { | ||
61 | nnMatrix A = nnMatrixMake(3, 3); | ||
62 | nnMatrix B = nnMatrixMake(3, 3); | ||
63 | nnMatrix O = nnMatrixMake(3, 3); | ||
64 | |||
65 | nnMatrixInit(&A, (const R[]){ | ||
66 | 1, 2, 3, | ||
67 | 4, 5, 6, | ||
68 | 7, 8, 9, | ||
69 | }); | ||
70 | nnMatrixInit(&B, (const R[]){ | ||
71 | 2, 4, 3, | ||
72 | 6, 8, 5, | ||
73 | 1, 7, 9, | ||
74 | }); | ||
75 | nnMatrixMul(&A, &B, &O); | ||
76 | |||
77 | const R expected[3][3] = { | ||
78 | { 17, 41, 40 }, | ||
79 | { 44, 98, 91 }, | ||
80 | { 71, 155, 142 }, | ||
81 | }; | ||
82 | for (int i = 0; i < O.rows; ++i) { | ||
83 | for (int j = 0; j < O.cols; ++j) { | ||
84 | TEST_TRUE(double_eq(nnMatrixAt(&O, i, j), expected[i][j], EPS)); | ||
85 | } | ||
86 | } | ||
87 | |||
88 | nnMatrixDel(&A); | ||
89 | nnMatrixDel(&B); | ||
90 | nnMatrixDel(&O); | ||
91 | } | ||
92 | |||
93 | TEST_CASE(nnMatrixMul_non_square_2x3_3x1) { | ||
94 | nnMatrix A = nnMatrixMake(2, 3); | ||
95 | nnMatrix B = nnMatrixMake(3, 1); | ||
96 | nnMatrix O = nnMatrixMake(2, 1); | ||
97 | |||
98 | nnMatrixInit(&A, (const R[]){ | ||
99 | 1, 2, 3, | ||
100 | 4, 5, 6, | ||
101 | }); | ||
102 | nnMatrixInit(&B, (const R[]){ | ||
103 | 2, | ||
104 | 6, | ||
105 | 1, | ||
106 | }); | ||
107 | nnMatrixMul(&A, &B, &O); | ||
108 | |||
109 | const R expected[2][1] = { | ||
110 | { 17 }, | ||
111 | { 44 }, | ||
112 | }; | ||
113 | for (int i = 0; i < O.rows; ++i) { | ||
114 | for (int j = 0; j < O.cols; ++j) { | ||
115 | TEST_TRUE(double_eq(nnMatrixAt(&O, i, j), expected[i][j], EPS)); | ||
116 | } | ||
117 | } | ||
118 | |||
119 | nnMatrixDel(&A); | ||
120 | nnMatrixDel(&B); | ||
121 | nnMatrixDel(&O); | ||
122 | } | ||
123 | |||
124 | TEST_CASE(nnMatrixMulAdd_test) { | ||
125 | nnMatrix A = nnMatrixMake(2, 3); | ||
126 | nnMatrix B = nnMatrixMake(2, 3); | ||
127 | nnMatrix O = nnMatrixMake(2, 3); | ||
128 | const R scale = 2; | ||
129 | |||
130 | nnMatrixInit(&A, (const R[]){ | ||
131 | 1, 2, 3, | ||
132 | 4, 5, 6, | ||
133 | }); | ||
134 | nnMatrixInit(&B, (const R[]){ | ||
135 | 2, 3, 1, | ||
136 | 7, 4, 3 | ||
137 | }); | ||
138 | nnMatrixMulAdd(&A, &B, scale, &O); // O = A + B * scale | ||
139 | |||
140 | const R expected[2][3] = { | ||
141 | { 5, 8, 5 }, | ||
142 | { 18, 13, 12 }, | ||
143 | }; | ||
144 | for (int i = 0; i < O.rows; ++i) { | ||
145 | for (int j = 0; j < O.cols; ++j) { | ||
146 | TEST_TRUE(double_eq(nnMatrixAt(&O, i, j), expected[i][j], EPS)); | ||
147 | } | ||
148 | } | ||
149 | |||
150 | nnMatrixDel(&A); | ||
151 | nnMatrixDel(&B); | ||
152 | nnMatrixDel(&O); | ||
153 | } | ||
154 | |||
155 | TEST_CASE(nnMatrixMulSub_test) { | ||
156 | nnMatrix A = nnMatrixMake(2, 3); | ||
157 | nnMatrix B = nnMatrixMake(2, 3); | ||
158 | nnMatrix O = nnMatrixMake(2, 3); | ||
159 | const R scale = 2; | ||
160 | |||
161 | nnMatrixInit(&A, (const R[]){ | ||
162 | 1, 2, 3, | ||
163 | 4, 5, 6, | ||
164 | }); | ||
165 | nnMatrixInit(&B, (const R[]){ | ||
166 | 2, 3, 1, | ||
167 | 7, 4, 3 | ||
168 | }); | ||
169 | nnMatrixMulSub(&A, &B, scale, &O); // O = A - B * scale | ||
170 | |||
171 | const R expected[2][3] = { | ||
172 | { -3, -4, 1 }, | ||
173 | { -10, -3, 0 }, | ||
174 | }; | ||
175 | for (int i = 0; i < O.rows; ++i) { | ||
176 | for (int j = 0; j < O.cols; ++j) { | ||
177 | TEST_TRUE(double_eq(nnMatrixAt(&O, i, j), expected[i][j], EPS)); | ||
178 | } | ||
179 | } | ||
180 | |||
181 | nnMatrixDel(&A); | ||
182 | nnMatrixDel(&B); | ||
183 | nnMatrixDel(&O); | ||
184 | } | ||
185 | |||
186 | TEST_CASE(nnMatrixMulPairs_2x3) { | ||
187 | nnMatrix A = nnMatrixMake(2, 3); | ||
188 | nnMatrix B = nnMatrixMake(2, 3); | ||
189 | nnMatrix O = nnMatrixMake(2, 3); | ||
190 | |||
191 | nnMatrixInit(&A, (const R[]){ | ||
192 | 1, 2, 3, | ||
193 | 4, 5, 6, | ||
194 | }); | ||
195 | nnMatrixInit(&B, (const R[]){ | ||
196 | 2, 3, 1, | ||
197 | 7, 4, 3 | ||
198 | }); | ||
199 | nnMatrixMulPairs(&A, &B, &O); | ||
200 | |||
201 | const R expected[2][3] = { | ||
202 | { 2, 6, 3 }, | ||
203 | { 28, 20, 18 }, | ||
204 | }; | ||
205 | for (int i = 0; i < O.rows; ++i) { | ||
206 | for (int j = 0; j < O.cols; ++j) { | ||
207 | TEST_TRUE(double_eq(nnMatrixAt(&O, i, j), expected[i][j], EPS)); | ||
208 | } | ||
209 | } | ||
210 | |||
211 | nnMatrixDel(&A); | ||
212 | nnMatrixDel(&B); | ||
213 | nnMatrixDel(&O); | ||
214 | } | ||
215 | |||
216 | TEST_CASE(nnMatrixAdd_square_2x2) { | ||
217 | nnMatrix A = nnMatrixMake(2, 2); | ||
218 | nnMatrix B = nnMatrixMake(2, 2); | ||
219 | nnMatrix C = nnMatrixMake(2, 2); | ||
220 | |||
221 | nnMatrixInit(&A, (R[]) { | ||
222 | 1, 2, | ||
223 | 3, 4, | ||
224 | }); | ||
225 | nnMatrixInit(&B, (R[]) { | ||
226 | 2, 1, | ||
227 | 5, 3, | ||
228 | }); | ||
229 | |||
230 | nnMatrixAdd(&A, &B, &C); | ||
231 | |||
232 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 0), 3, EPS)); | ||
233 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 1), 3, EPS)); | ||
234 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 0), 8, EPS)); | ||
235 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 1), 7, EPS)); | ||
236 | |||
237 | nnMatrixDel(&A); | ||
238 | nnMatrixDel(&B); | ||
239 | nnMatrixDel(&C); | ||
240 | } | ||
241 | |||
242 | TEST_CASE(nnMatrixSub_square_2x2) { | ||
243 | nnMatrix A = nnMatrixMake(2, 2); | ||
244 | nnMatrix B = nnMatrixMake(2, 2); | ||
245 | nnMatrix C = nnMatrixMake(2, 2); | ||
246 | |||
247 | nnMatrixInit(&A, (R[]) { | ||
248 | 1, 2, | ||
249 | 3, 4, | ||
250 | }); | ||
251 | nnMatrixInit(&B, (R[]) { | ||
252 | 2, 1, | ||
253 | 5, 3, | ||
254 | }); | ||
255 | |||
256 | nnMatrixSub(&A, &B, &C); | ||
257 | |||
258 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 0), -1, EPS)); | ||
259 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 1), +1, EPS)); | ||
260 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 0), -2, EPS)); | ||
261 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 1), +1, EPS)); | ||
262 | |||
263 | nnMatrixDel(&A); | ||
264 | nnMatrixDel(&B); | ||
265 | nnMatrixDel(&C); | ||
266 | } | ||
267 | |||
268 | TEST_CASE(nnMatrixAddRow_test) { | ||
269 | nnMatrix A = nnMatrixMake(2, 3); | ||
270 | nnMatrix B = nnMatrixMake(1, 3); | ||
271 | nnMatrix C = nnMatrixMake(2, 3); | ||
272 | |||
273 | nnMatrixInit(&A, (R[]) { | ||
274 | 1, 2, 3, | ||
275 | 4, 5, 6, | ||
276 | }); | ||
277 | nnMatrixInit(&B, (R[]) { | ||
278 | 2, 1, 3, | ||
279 | }); | ||
280 | |||
281 | nnMatrixAddRow(&A, &B, &C); | ||
282 | |||
283 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 0), 3, EPS)); | ||
284 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 1), 3, EPS)); | ||
285 | TEST_TRUE(double_eq(nnMatrixAt(&C, 0, 2), 6, EPS)); | ||
286 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 0), 6, EPS)); | ||
287 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 1), 6, EPS)); | ||
288 | TEST_TRUE(double_eq(nnMatrixAt(&C, 1, 2), 9, EPS)); | ||
289 | |||
290 | nnMatrixDel(&A); | ||
291 | nnMatrixDel(&B); | ||
292 | nnMatrixDel(&C); | ||
293 | } | ||
294 | |||
295 | TEST_CASE(nnMatrixTranspose_square_2x2) { | ||
296 | nnMatrix A = nnMatrixMake(2, 2); | ||
297 | nnMatrix B = nnMatrixMake(2, 2); | ||
298 | |||
299 | nnMatrixInit(&A, (R[]) { | ||
300 | 1, 2, | ||
301 | 3, 4 | ||
302 | }); | ||
303 | |||
304 | nnMatrixTranspose(&A, &B); | ||
305 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 0), 1, EPS)); | ||
306 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 1), 3, EPS)); | ||
307 | TEST_TRUE(double_eq(nnMatrixAt(&B, 1, 0), 2, EPS)); | ||
308 | TEST_TRUE(double_eq(nnMatrixAt(&B, 1, 1), 4, EPS)); | ||
309 | |||
310 | nnMatrixDel(&A); | ||
311 | nnMatrixDel(&B); | ||
312 | } | ||
313 | |||
314 | TEST_CASE(nnMatrixTranspose_non_square_2x1) { | ||
315 | nnMatrix A = nnMatrixMake(2, 1); | ||
316 | nnMatrix B = nnMatrixMake(1, 2); | ||
317 | |||
318 | nnMatrixInit(&A, (R[]) { | ||
319 | 1, | ||
320 | 3, | ||
321 | }); | ||
322 | |||
323 | nnMatrixTranspose(&A, &B); | ||
324 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 0), 1, EPS)); | ||
325 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 1), 3, EPS)); | ||
326 | |||
327 | nnMatrixDel(&A); | ||
328 | nnMatrixDel(&B); | ||
329 | } | ||
330 | |||
331 | TEST_CASE(nnMatrixGt_test) { | ||
332 | nnMatrix A = nnMatrixMake(2, 3); | ||
333 | nnMatrix B = nnMatrixMake(2, 3); | ||
334 | |||
335 | nnMatrixInit(&A, (R[]) { | ||
336 | -3, 2, 0, | ||
337 | 4, -1, 5 | ||
338 | }); | ||
339 | |||
340 | nnMatrixGt(&A, 0, &B); | ||
341 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 0), 0, EPS)); | ||
342 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 1), 1, EPS)); | ||
343 | TEST_TRUE(double_eq(nnMatrixAt(&B, 0, 2), 0, EPS)); | ||
344 | TEST_TRUE(double_eq(nnMatrixAt(&B, 1, 0), 1, EPS)); | ||
345 | TEST_TRUE(double_eq(nnMatrixAt(&B, 1, 1), 0, EPS)); | ||
346 | TEST_TRUE(double_eq(nnMatrixAt(&B, 1, 2), 1, EPS)); | ||
347 | |||
348 | nnMatrixDel(&A); | ||
349 | nnMatrixDel(&B); | ||
350 | } | ||
diff --git a/src/lib/test/neuralnet_test.c b/src/lib/test/neuralnet_test.c new file mode 100644 index 0000000..14d9438 --- /dev/null +++ b/src/lib/test/neuralnet_test.c | |||
@@ -0,0 +1,92 @@ | |||
1 | #include <neuralnet/neuralnet.h> | ||
2 | |||
3 | #include <neuralnet/matrix.h> | ||
4 | #include "activation.h" | ||
5 | #include "neuralnet_impl.h" | ||
6 | |||
7 | #include "test.h" | ||
8 | #include "test_util.h" | ||
9 | |||
10 | #include <assert.h> | ||
11 | |||
12 | TEST_CASE(neuralnet_perceptron_test) { | ||
13 | const int num_layers = 1; | ||
14 | const int layer_sizes[] = { 1, 1 }; | ||
15 | const nnActivation layer_activations[] = { nnSigmoid }; | ||
16 | const R weights[] = { 0.3 }; | ||
17 | |||
18 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
19 | assert(net); | ||
20 | nnSetWeights(net, weights); | ||
21 | |||
22 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1); | ||
23 | |||
24 | const R input[] = { 0.9 }; | ||
25 | R output[1]; | ||
26 | nnQueryArray(net, query, input, output); | ||
27 | |||
28 | const R expected_output = sigmoid(input[0] * weights[0]); | ||
29 | printf("\nOutput: %f, Expected: %f\n", output[0], expected_output); | ||
30 | TEST_TRUE(double_eq(output[0], expected_output, EPS)); | ||
31 | |||
32 | nnDeleteQueryObject(&query); | ||
33 | nnDeleteNet(&net); | ||
34 | } | ||
35 | |||
36 | TEST_CASE(neuralnet_xor_test) { | ||
37 | const int num_layers = 2; | ||
38 | const int layer_sizes[] = { 2, 2, 1 }; | ||
39 | const nnActivation layer_activations[] = { nnRelu, nnIdentity }; | ||
40 | const R weights[] = { | ||
41 | 1, 1, 1, 1, // First (hidden) layer. | ||
42 | 1, -2 // Second (output) layer. | ||
43 | }; | ||
44 | const R biases[] = { | ||
45 | 0, -1, // First (hidden) layer. | ||
46 | 0 // Second (output) layer. | ||
47 | }; | ||
48 | |||
49 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
50 | assert(net); | ||
51 | nnSetWeights(net, weights); | ||
52 | nnSetBiases(net, biases); | ||
53 | |||
54 | // First layer weights. | ||
55 | TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 0), 1); | ||
56 | TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 1), 1); | ||
57 | TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 2), 1); | ||
58 | TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 3), 1); | ||
59 | // Second layer weights. | ||
60 | TEST_EQUAL(nnMatrixAt(&net->weights[1], 0, 0), 1); | ||
61 | TEST_EQUAL(nnMatrixAt(&net->weights[1], 0, 1), -2); | ||
62 | // First layer biases. | ||
63 | TEST_EQUAL(nnMatrixAt(&net->biases[0], 0, 0), 0); | ||
64 | TEST_EQUAL(nnMatrixAt(&net->biases[0], 0, 1), -1); | ||
65 | // Second layer biases. | ||
66 | TEST_EQUAL(nnMatrixAt(&net->biases[1], 0, 0), 0); | ||
67 | |||
68 | // Test. | ||
69 | |||
70 | #define M 4 | ||
71 | |||
72 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/M); | ||
73 | |||
74 | const R test_inputs[M][2] = { { 0., 0. }, { 1., 0. }, { 0., 1. }, { 1., 1. } }; | ||
75 | nnMatrix test_inputs_matrix = nnMatrixMake(M, 2); | ||
76 | nnMatrixInit(&test_inputs_matrix, (const R*)test_inputs); | ||
77 | nnQuery(net, query, &test_inputs_matrix); | ||
78 | |||
79 | const R expected_outputs[M] = { 0., 1., 1., 0. }; | ||
80 | for (int i = 0; i < M; ++i) { | ||
81 | const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); | ||
82 | printf("\nInput: (%f, %f), Output: %f, Expected: %f\n", | ||
83 | test_inputs[i][0], test_inputs[i][1], test_output, expected_outputs[i]); | ||
84 | } | ||
85 | for (int i = 0; i < M; ++i) { | ||
86 | const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); | ||
87 | TEST_TRUE(double_eq(test_output, expected_outputs[i], OUTPUT_EPS)); | ||
88 | } | ||
89 | |||
90 | nnDeleteQueryObject(&query); | ||
91 | nnDeleteNet(&net); | ||
92 | } | ||
diff --git a/src/lib/test/test.h b/src/lib/test/test.h new file mode 100644 index 0000000..fd8dc22 --- /dev/null +++ b/src/lib/test/test.h | |||
@@ -0,0 +1,185 @@ | |||
1 | // SPDX-License-Identifier: MIT | ||
2 | #pragma once | ||
3 | |||
4 | #ifdef UNIT_TEST | ||
5 | |||
6 | #include <stdbool.h> | ||
7 | #include <stdio.h> | ||
8 | #include <stdlib.h> | ||
9 | #include <string.h> | ||
10 | |||
11 | #if defined(__DragonFly__) || defined(__FreeBSD__) || defined(__FreeBSD_kernel__) || \ | ||
12 | defined(__NetBSD__) || defined(__OpenBSD__) | ||
13 | #define USE_SYSCTL_FOR_ARGS 1 | ||
14 | // clang-format off | ||
15 | #include <sys/types.h> | ||
16 | #include <sys/sysctl.h> | ||
17 | // clang-format on | ||
18 | #include <unistd.h> // getpid | ||
19 | #endif | ||
20 | |||
21 | struct test_file_metadata; | ||
22 | |||
23 | struct test_failure { | ||
24 | bool present; | ||
25 | const char *message; | ||
26 | const char *file; | ||
27 | int line; | ||
28 | }; | ||
29 | |||
30 | struct test_case_metadata { | ||
31 | void (*fn)(struct test_case_metadata *, struct test_file_metadata *); | ||
32 | struct test_failure failure; | ||
33 | const char *name; | ||
34 | struct test_case_metadata *next; | ||
35 | }; | ||
36 | |||
37 | struct test_file_metadata { | ||
38 | bool registered; | ||
39 | const char *name; | ||
40 | struct test_file_metadata *next; | ||
41 | struct test_case_metadata *tests; | ||
42 | }; | ||
43 | |||
44 | struct test_file_metadata __attribute__((weak)) * test_file_head; | ||
45 | |||
46 | #define SET_FAILURE(_message) \ | ||
47 | metadata->failure = (struct test_failure) { \ | ||
48 | .message = _message, .file = __FILE__, .line = __LINE__, .present = true, \ | ||
49 | } | ||
50 | |||
51 | #define TEST_EQUAL(a, b) \ | ||
52 | do { \ | ||
53 | if ((a) != (b)) { \ | ||
54 | SET_FAILURE(#a " != " #b); \ | ||
55 | return; \ | ||
56 | } \ | ||
57 | } while (0) | ||
58 | |||
59 | #define TEST_TRUE(a) \ | ||
60 | do { \ | ||
61 | if (!(a)) { \ | ||
62 | SET_FAILURE(#a " is not true"); \ | ||
63 | return; \ | ||
64 | } \ | ||
65 | } while (0) | ||
66 | |||
67 | #define TEST_STREQUAL(a, b) \ | ||
68 | do { \ | ||
69 | if (strcmp(a, b) != 0) { \ | ||
70 | SET_FAILURE(#a " != " #b); \ | ||
71 | return; \ | ||
72 | } \ | ||
73 | } while (0) | ||
74 | |||
75 | #define TEST_CASE(_name) \ | ||
76 | static void __test_h_##_name(struct test_case_metadata *, \ | ||
77 | struct test_file_metadata *); \ | ||
78 | static struct test_file_metadata __test_h_file; \ | ||
79 | static struct test_case_metadata __test_h_meta_##_name = { \ | ||
80 | .name = #_name, \ | ||
81 | .fn = __test_h_##_name, \ | ||
82 | }; \ | ||
83 | static void __attribute__((constructor(101))) __test_h_##_name##_register(void) { \ | ||
84 | __test_h_meta_##_name.next = __test_h_file.tests; \ | ||
85 | __test_h_file.tests = &__test_h_meta_##_name; \ | ||
86 | if (!__test_h_file.registered) { \ | ||
87 | __test_h_file.name = __FILE__; \ | ||
88 | __test_h_file.next = test_file_head; \ | ||
89 | test_file_head = &__test_h_file; \ | ||
90 | __test_h_file.registered = true; \ | ||
91 | } \ | ||
92 | } \ | ||
93 | static void __test_h_##_name( \ | ||
94 | struct test_case_metadata *metadata __attribute__((unused)), \ | ||
95 | struct test_file_metadata *file_metadata __attribute__((unused))) | ||
96 | |||
97 | extern void __attribute__((weak)) (*test_h_unittest_setup)(void); | ||
98 | /// Run defined tests, return true if all tests succeeds | ||
99 | /// @param[out] tests_run if not NULL, set to whether tests were run | ||
100 | static inline void __attribute__((constructor(102))) run_tests(void) { | ||
101 | bool should_run = false; | ||
102 | #ifdef USE_SYSCTL_FOR_ARGS | ||
103 | int mib[] = { | ||
104 | CTL_KERN, | ||
105 | #if defined(__NetBSD__) || defined(__OpenBSD__) | ||
106 | KERN_PROC_ARGS, | ||
107 | getpid(), | ||
108 | KERN_PROC_ARGV, | ||
109 | #else | ||
110 | KERN_PROC, | ||
111 | KERN_PROC_ARGS, | ||
112 | getpid(), | ||
113 | #endif | ||
114 | }; | ||
115 | char *arg = NULL; | ||
116 | size_t arglen; | ||
117 | sysctl(mib, sizeof(mib) / sizeof(mib[0]), NULL, &arglen, NULL, 0); | ||
118 | arg = malloc(arglen); | ||
119 | sysctl(mib, sizeof(mib) / sizeof(mib[0]), arg, &arglen, NULL, 0); | ||
120 | #else | ||
121 | FILE *cmdlinef = fopen("/proc/self/cmdline", "r"); | ||
122 | char *arg = NULL; | ||
123 | int arglen; | ||
124 | fscanf(cmdlinef, "%ms%n", &arg, &arglen); | ||
125 | fclose(cmdlinef); | ||
126 | #endif | ||
127 | for (char *pos = arg; pos < arg + arglen; pos += strlen(pos) + 1) { | ||
128 | if (strcmp(pos, "--unittest") == 0) { | ||
129 | should_run = true; | ||
130 | break; | ||
131 | } | ||
132 | } | ||
133 | free(arg); | ||
134 | |||
135 | if (!should_run) { | ||
136 | return; | ||
137 | } | ||
138 | |||
139 | if (&test_h_unittest_setup) { | ||
140 | test_h_unittest_setup(); | ||
141 | } | ||
142 | |||
143 | struct test_file_metadata *i = test_file_head; | ||
144 | int failed = 0, success = 0; | ||
145 | while (i) { | ||
146 | fprintf(stderr, "Running tests from %s:\n", i->name); | ||
147 | struct test_case_metadata *j = i->tests; | ||
148 | while (j) { | ||
149 | fprintf(stderr, "\t%s ... ", j->name); | ||
150 | j->failure.present = false; | ||
151 | j->fn(j, i); | ||
152 | if (j->failure.present) { | ||
153 | fprintf(stderr, "failed (%s at %s:%d)\n", j->failure.message, | ||
154 | j->failure.file, j->failure.line); | ||
155 | failed++; | ||
156 | } else { | ||
157 | fprintf(stderr, "passed\n"); | ||
158 | success++; | ||
159 | } | ||
160 | j = j->next; | ||
161 | } | ||
162 | fprintf(stderr, "\n"); | ||
163 | i = i->next; | ||
164 | } | ||
165 | int total = failed + success; | ||
166 | fprintf(stderr, "Test results: passed %d/%d, failed %d/%d\n", success, total, | ||
167 | failed, total); | ||
168 | exit(failed == 0 ? EXIT_SUCCESS : EXIT_FAILURE); | ||
169 | } | ||
170 | |||
171 | #else | ||
172 | |||
173 | #include <stdbool.h> | ||
174 | |||
175 | #define TEST_CASE(name) static void __attribute__((unused)) __test_h_##name(void) | ||
176 | |||
177 | #define TEST_EQUAL(a, b) \ | ||
178 | (void)(a); \ | ||
179 | (void)(b) | ||
180 | #define TEST_TRUE(a) (void)(a) | ||
181 | #define TEST_STREQUAL(a, b) \ | ||
182 | (void)(a); \ | ||
183 | (void)(b) | ||
184 | |||
185 | #endif | ||
diff --git a/src/lib/test/test_main.c b/src/lib/test/test_main.c new file mode 100644 index 0000000..4cce7f6 --- /dev/null +++ b/src/lib/test/test_main.c | |||
@@ -0,0 +1,3 @@ | |||
1 | int main() { | ||
2 | return 0; | ||
3 | } | ||
diff --git a/src/lib/test/test_util.h b/src/lib/test/test_util.h new file mode 100644 index 0000000..8abb99a --- /dev/null +++ b/src/lib/test/test_util.h | |||
@@ -0,0 +1,22 @@ | |||
1 | #pragma once | ||
2 | |||
3 | #include <neuralnet/types.h> | ||
4 | |||
5 | #include <math.h> | ||
6 | |||
7 | // General epsilon for comparing values. | ||
8 | static const R EPS = 1e-10; | ||
9 | |||
10 | // Epsilon for comparing network weights after training. | ||
11 | static const R WEIGHT_EPS = 0.01; | ||
12 | |||
13 | // Epsilon for comparing network outputs after training. | ||
14 | static const R OUTPUT_EPS = 0.01; | ||
15 | |||
16 | static inline bool double_eq(double a, double b, double eps) { | ||
17 | return fabs(a - b) <= eps; | ||
18 | } | ||
19 | |||
20 | static inline R lerp(R a, R b, R t) { | ||
21 | return a + t*(b-a); | ||
22 | } | ||
diff --git a/src/lib/test/train_linear_perceptron_non_origin_test.c b/src/lib/test/train_linear_perceptron_non_origin_test.c new file mode 100644 index 0000000..5a320ac --- /dev/null +++ b/src/lib/test/train_linear_perceptron_non_origin_test.c | |||
@@ -0,0 +1,67 @@ | |||
1 | #include <neuralnet/train.h> | ||
2 | |||
3 | #include <neuralnet/matrix.h> | ||
4 | #include <neuralnet/neuralnet.h> | ||
5 | #include "activation.h" | ||
6 | #include "neuralnet_impl.h" | ||
7 | |||
8 | #include "test.h" | ||
9 | #include "test_util.h" | ||
10 | |||
11 | #include <assert.h> | ||
12 | |||
13 | TEST_CASE(neuralnet_train_linear_perceptron_non_origin_test) { | ||
14 | const int num_layers = 1; | ||
15 | const int layer_sizes[] = { 1, 1 }; | ||
16 | const nnActivation layer_activations[] = { nnIdentity }; | ||
17 | |||
18 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
19 | assert(net); | ||
20 | |||
21 | // Train. | ||
22 | |||
23 | // Try to learn the Y = 2X + 1 line. | ||
24 | #define N 2 | ||
25 | const R inputs[N] = { 0., 1. }; | ||
26 | const R targets[N] = { 1., 3. }; | ||
27 | |||
28 | nnMatrix inputs_matrix = nnMatrixMake(N, 1); | ||
29 | nnMatrix targets_matrix = nnMatrixMake(N, 1); | ||
30 | nnMatrixInit(&inputs_matrix, inputs); | ||
31 | nnMatrixInit(&targets_matrix, targets); | ||
32 | |||
33 | nnTrainingParams params = { | ||
34 | .learning_rate = 0.7, | ||
35 | .max_iterations = 20, | ||
36 | .seed = 0, | ||
37 | .weight_init = nnWeightInit01, | ||
38 | .debug = false, | ||
39 | }; | ||
40 | |||
41 | nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); | ||
42 | |||
43 | const R weight = nnMatrixAt(&net->weights[0], 0, 0); | ||
44 | const R expected_weight = 2.0; | ||
45 | printf("\nTrained network weight: %f, Expected: %f\n", weight, expected_weight); | ||
46 | TEST_TRUE(double_eq(weight, expected_weight, WEIGHT_EPS)); | ||
47 | |||
48 | const R bias = nnMatrixAt(&net->biases[0], 0, 0); | ||
49 | const R expected_bias = 1.0; | ||
50 | printf("Trained network bias: %f, Expected: %f\n", bias, expected_bias); | ||
51 | TEST_TRUE(double_eq(bias, expected_bias, WEIGHT_EPS)); | ||
52 | |||
53 | // Test. | ||
54 | |||
55 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1); | ||
56 | |||
57 | const R test_input[] = { 2.3 }; | ||
58 | R test_output[1]; | ||
59 | nnQueryArray(net, query, test_input, test_output); | ||
60 | |||
61 | const R expected_output = test_input[0] * expected_weight + expected_bias; | ||
62 | printf("Output: %f, Expected: %f\n", test_output[0], expected_output); | ||
63 | TEST_TRUE(double_eq(test_output[0], expected_output, OUTPUT_EPS)); | ||
64 | |||
65 | nnDeleteQueryObject(&query); | ||
66 | nnDeleteNet(&net); | ||
67 | } | ||
diff --git a/src/lib/test/train_linear_perceptron_test.c b/src/lib/test/train_linear_perceptron_test.c new file mode 100644 index 0000000..2b1336d --- /dev/null +++ b/src/lib/test/train_linear_perceptron_test.c | |||
@@ -0,0 +1,62 @@ | |||
1 | #include <neuralnet/train.h> | ||
2 | |||
3 | #include <neuralnet/matrix.h> | ||
4 | #include <neuralnet/neuralnet.h> | ||
5 | #include "activation.h" | ||
6 | #include "neuralnet_impl.h" | ||
7 | |||
8 | #include "test.h" | ||
9 | #include "test_util.h" | ||
10 | |||
11 | #include <assert.h> | ||
12 | |||
13 | TEST_CASE(neuralnet_train_linear_perceptron_test) { | ||
14 | const int num_layers = 1; | ||
15 | const int layer_sizes[] = { 1, 1 }; | ||
16 | const nnActivation layer_activations[] = { nnIdentity }; | ||
17 | |||
18 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
19 | assert(net); | ||
20 | |||
21 | // Train. | ||
22 | |||
23 | // Try to learn the Y=X line. | ||
24 | #define N 2 | ||
25 | const R inputs[N] = { 0., 1. }; | ||
26 | const R targets[N] = { 0., 1. }; | ||
27 | |||
28 | nnMatrix inputs_matrix = nnMatrixMake(N, 1); | ||
29 | nnMatrix targets_matrix = nnMatrixMake(N, 1); | ||
30 | nnMatrixInit(&inputs_matrix, inputs); | ||
31 | nnMatrixInit(&targets_matrix, targets); | ||
32 | |||
33 | nnTrainingParams params = { | ||
34 | .learning_rate = 0.7, | ||
35 | .max_iterations = 10, | ||
36 | .seed = 0, | ||
37 | .weight_init = nnWeightInit01, | ||
38 | .debug = false, | ||
39 | }; | ||
40 | |||
41 | nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); | ||
42 | |||
43 | const R weight = nnMatrixAt(&net->weights[0], 0, 0); | ||
44 | const R expected_weight = 1.0; | ||
45 | printf("\nTrained network weight: %f, Expected: %f\n", weight, expected_weight); | ||
46 | TEST_TRUE(double_eq(weight, expected_weight, WEIGHT_EPS)); | ||
47 | |||
48 | // Test. | ||
49 | |||
50 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1); | ||
51 | |||
52 | const R test_input[] = { 2.3 }; | ||
53 | R test_output[1]; | ||
54 | nnQueryArray(net, query, test_input, test_output); | ||
55 | |||
56 | const R expected_output = test_input[0]; | ||
57 | printf("Output: %f, Expected: %f\n", test_output[0], expected_output); | ||
58 | TEST_TRUE(double_eq(test_output[0], expected_output, OUTPUT_EPS)); | ||
59 | |||
60 | nnDeleteQueryObject(&query); | ||
61 | nnDeleteNet(&net); | ||
62 | } | ||
diff --git a/src/lib/test/train_sigmoid_test.c b/src/lib/test/train_sigmoid_test.c new file mode 100644 index 0000000..588e7ca --- /dev/null +++ b/src/lib/test/train_sigmoid_test.c | |||
@@ -0,0 +1,66 @@ | |||
1 | #include <neuralnet/train.h> | ||
2 | |||
3 | #include <neuralnet/matrix.h> | ||
4 | #include <neuralnet/neuralnet.h> | ||
5 | #include "activation.h" | ||
6 | #include "neuralnet_impl.h" | ||
7 | |||
8 | #include "test.h" | ||
9 | #include "test_util.h" | ||
10 | |||
11 | #include <assert.h> | ||
12 | |||
13 | TEST_CASE(neuralnet_train_sigmoid_test) { | ||
14 | const int num_layers = 1; | ||
15 | const int layer_sizes[] = { 1, 1 }; | ||
16 | const nnActivation layer_activations[] = { nnSigmoid }; | ||
17 | |||
18 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
19 | assert(net); | ||
20 | |||
21 | // Train. | ||
22 | |||
23 | // Try to learn the sigmoid function. | ||
24 | #define N 3 | ||
25 | R inputs[N]; | ||
26 | R targets[N]; | ||
27 | for (int i = 0; i < N; ++i) { | ||
28 | inputs[i] = lerp(-1, +1, (R)i / (R)(N-1)); | ||
29 | targets[i] = sigmoid(inputs[i]); | ||
30 | } | ||
31 | |||
32 | nnMatrix inputs_matrix = nnMatrixMake(N, 1); | ||
33 | nnMatrix targets_matrix = nnMatrixMake(N, 1); | ||
34 | nnMatrixInit(&inputs_matrix, inputs); | ||
35 | nnMatrixInit(&targets_matrix, targets); | ||
36 | |||
37 | nnTrainingParams params = { | ||
38 | .learning_rate = 0.9, | ||
39 | .max_iterations = 100, | ||
40 | .seed = 0, | ||
41 | .weight_init = nnWeightInit01, | ||
42 | .debug = false, | ||
43 | }; | ||
44 | |||
45 | nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); | ||
46 | |||
47 | const R weight = nnMatrixAt(&net->weights[0], 0, 0); | ||
48 | const R expected_weight = 1.0; | ||
49 | printf("\nTrained network weight: %f, Expected: %f\n", weight, expected_weight); | ||
50 | TEST_TRUE(double_eq(weight, expected_weight, WEIGHT_EPS)); | ||
51 | |||
52 | // Test. | ||
53 | |||
54 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1); | ||
55 | |||
56 | const R test_input[] = { 0.3 }; | ||
57 | R test_output[1]; | ||
58 | nnQueryArray(net, query, test_input, test_output); | ||
59 | |||
60 | const R expected_output = 0.574442516811659; // sigmoid(0.3) | ||
61 | printf("Output: %f, Expected: %f\n", test_output[0], expected_output); | ||
62 | TEST_TRUE(double_eq(test_output[0], expected_output, OUTPUT_EPS)); | ||
63 | |||
64 | nnDeleteQueryObject(&query); | ||
65 | nnDeleteNet(&net); | ||
66 | } | ||
diff --git a/src/lib/test/train_xor_test.c b/src/lib/test/train_xor_test.c new file mode 100644 index 0000000..6ddc6e0 --- /dev/null +++ b/src/lib/test/train_xor_test.c | |||
@@ -0,0 +1,66 @@ | |||
1 | #include <neuralnet/train.h> | ||
2 | |||
3 | #include <neuralnet/matrix.h> | ||
4 | #include <neuralnet/neuralnet.h> | ||
5 | #include "activation.h" | ||
6 | #include "neuralnet_impl.h" | ||
7 | |||
8 | #include "test.h" | ||
9 | #include "test_util.h" | ||
10 | |||
11 | #include <assert.h> | ||
12 | |||
13 | TEST_CASE(neuralnet_train_xor_test) { | ||
14 | const int num_layers = 2; | ||
15 | const int layer_sizes[] = { 2, 2, 1 }; | ||
16 | const nnActivation layer_activations[] = { nnRelu, nnIdentity }; | ||
17 | |||
18 | nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations); | ||
19 | assert(net); | ||
20 | |||
21 | // Train. | ||
22 | |||
23 | #define N 4 | ||
24 | const R inputs[N][2] = { { 0., 0. }, { 0., 1. }, { 1., 0. }, { 1., 1. } }; | ||
25 | const R targets[N] = { 0., 1., 1., 0. }; | ||
26 | |||
27 | nnMatrix inputs_matrix = nnMatrixMake(N, 2); | ||
28 | nnMatrix targets_matrix = nnMatrixMake(N, 1); | ||
29 | nnMatrixInit(&inputs_matrix, (const R*)inputs); | ||
30 | nnMatrixInit(&targets_matrix, targets); | ||
31 | |||
32 | nnTrainingParams params = { | ||
33 | .learning_rate = 0.1, | ||
34 | .max_iterations = 500, | ||
35 | .seed = 0, | ||
36 | .weight_init = nnWeightInit01, | ||
37 | .debug = false, | ||
38 | }; | ||
39 | |||
40 | nnTrain(net, &inputs_matrix, &targets_matrix, ¶ms); | ||
41 | |||
42 | // Test. | ||
43 | |||
44 | #define M 4 | ||
45 | |||
46 | nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/M); | ||
47 | |||
48 | const R test_inputs[M][2] = { { 0., 0. }, { 1., 0. }, { 0., 1. }, { 1., 1. } }; | ||
49 | nnMatrix test_inputs_matrix = nnMatrixMake(M, 2); | ||
50 | nnMatrixInit(&test_inputs_matrix, (const R*)test_inputs); | ||
51 | nnQuery(net, query, &test_inputs_matrix); | ||
52 | |||
53 | const R expected_outputs[M] = { 0., 1., 1., 0. }; | ||
54 | for (int i = 0; i < M; ++i) { | ||
55 | const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); | ||
56 | printf("\nInput: (%f, %f), Output: %f, Expected: %f\n", | ||
57 | test_inputs[i][0], test_inputs[i][1], test_output, expected_outputs[i]); | ||
58 | } | ||
59 | for (int i = 0; i < M; ++i) { | ||
60 | const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0); | ||
61 | TEST_TRUE(double_eq(test_output, expected_outputs[i], OUTPUT_EPS)); | ||
62 | } | ||
63 | |||
64 | nnDeleteQueryObject(&query); | ||
65 | nnDeleteNet(&net); | ||
66 | } | ||