1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
|
#include <neuralnet/neuralnet.h>
#include "activation.h"
#include "neuralnet_impl.h"
#include <neuralnet/matrix.h>
#include "test.h"
#include "test_util.h"
#include <assert.h>
TEST_CASE(neuralnet_perceptron_test) {
const int num_layers = 2;
const int input_size = 1;
const R weights[] = {0.3};
const R biases[] = {0.0};
const nnLayer layers[] = {
{.type = nnLinear,
.linear =
{.weights = nnMatrixFromArray(1, 1, weights),
.biases = nnMatrixFromArray(1, 1, biases)}},
{.type = nnSigmoid},
};
nnNeuralNetwork* net = nnMakeNet(layers, num_layers, input_size);
assert(net);
nnQueryObject* query = nnMakeQueryObject(net, 1);
const R input[] = {0.9};
R output[1];
nnQueryArray(net, query, input, output);
const R expected_output = sigmoid(input[0] * weights[0]);
printf(
"\n[neuralnet_perceptron_test] Output: %f, Expected: %f\n", output[0],
expected_output);
TEST_TRUE(double_eq(output[0], expected_output, EPS));
nnDeleteQueryObject(&query);
nnDeleteNet(&net);
}
TEST_CASE(neuralnet_xor_test) {
// First (hidden) layer.
const R weights0[] = {1, 1, 1, 1};
const R biases0[] = {0, -1};
// Second (output) layer.
const R weights1[] = {1, -2};
const R biases1[] = {0};
// Network.
const int num_layers = 3;
const int input_size = 2;
const nnLayer layers[] = {
{.type = nnLinear,
.linear =
{.weights = nnMatrixFromArray(2, 2, weights0),
.biases = nnMatrixFromArray(1, 2, biases0)}},
{.type = nnRelu},
{.type = nnLinear,
.linear =
{.weights = nnMatrixFromArray(2, 1, weights1),
.biases = nnMatrixFromArray(1, 1, biases1)}},
};
nnNeuralNetwork* net = nnMakeNet(layers, num_layers, input_size);
assert(net);
// First layer weights.
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 0), 1);
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 1), 1);
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 2), 1);
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.weights, 0, 3), 1);
// Second linear layer (third layer) weights.
TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.weights, 0, 0), 1);
TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.weights, 0, 1), -2);
// First layer biases.
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.biases, 0, 0), 0);
TEST_EQUAL(nnMatrixAt(&net->layers[0].linear.biases, 0, 1), -1);
// Second linear layer (third layer) biases.
TEST_EQUAL(nnMatrixAt(&net->layers[2].linear.biases, 0, 0), 0);
// Test.
#define M 4
nnQueryObject* query = nnMakeQueryObject(net, M);
const R test_inputs[M][2] = {
{0., 0.},
{1., 0.},
{0., 1.},
{1., 1.}
};
nnMatrix test_inputs_matrix = nnMatrixMake(M, 2);
nnMatrixInit(&test_inputs_matrix, (const R*)test_inputs);
nnQuery(net, query, &test_inputs_matrix);
const R expected_outputs[M] = {0., 1., 1., 0.};
for (int i = 0; i < M; ++i) {
const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0);
printf(
"\nInput: (%f, %f), Output: %f, Expected: %f\n", test_inputs[i][0],
test_inputs[i][1], test_output, expected_outputs[i]);
}
for (int i = 0; i < M; ++i) {
const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0);
TEST_TRUE(double_eq(test_output, expected_outputs[i], OUTPUT_EPS));
}
nnDeleteQueryObject(&query);
nnDeleteNet(&net);
}
|