aboutsummaryrefslogtreecommitdiff
path: root/src/lib/test/neuralnet_test.c
blob: 14d94382895ef47e0ec64a1fd6f065b9c23a9951 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#include <neuralnet/neuralnet.h>

#include <neuralnet/matrix.h>
#include "activation.h"
#include "neuralnet_impl.h"

#include "test.h"
#include "test_util.h"

#include <assert.h>

TEST_CASE(neuralnet_perceptron_test) {
  const int num_layers = 1;
  const int layer_sizes[] = { 1, 1 };
  const nnActivation layer_activations[] = { nnSigmoid };
  const R weights[] = { 0.3 };

  nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations);
  assert(net);
  nnSetWeights(net, weights);

  nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/1);

  const R input[] = { 0.9 };
  R output[1];
  nnQueryArray(net, query, input, output);

  const R expected_output = sigmoid(input[0] * weights[0]);
  printf("\nOutput: %f, Expected: %f\n", output[0], expected_output);
  TEST_TRUE(double_eq(output[0], expected_output, EPS));

  nnDeleteQueryObject(&query);
  nnDeleteNet(&net);
}

TEST_CASE(neuralnet_xor_test) {
  const int num_layers = 2;
  const int layer_sizes[] = { 2, 2, 1 };
  const nnActivation layer_activations[] = { nnRelu, nnIdentity };
  const R weights[] = {
    1, 1, 1, 1,  // First (hidden) layer.
    1, -2        // Second (output) layer.
  };
  const R biases[] = {
    0, -1,  // First (hidden) layer.
    0       // Second (output) layer.
  };

  nnNeuralNetwork* net = nnMakeNet(num_layers, layer_sizes, layer_activations);
  assert(net);
  nnSetWeights(net, weights);
  nnSetBiases(net, biases);

  // First layer weights.
  TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 0), 1);
  TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 1), 1);
  TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 2), 1);
  TEST_EQUAL(nnMatrixAt(&net->weights[0], 0, 3), 1);
  // Second layer weights.
  TEST_EQUAL(nnMatrixAt(&net->weights[1], 0, 0), 1);
  TEST_EQUAL(nnMatrixAt(&net->weights[1], 0, 1), -2);
  // First layer biases.
  TEST_EQUAL(nnMatrixAt(&net->biases[0], 0, 0), 0);
  TEST_EQUAL(nnMatrixAt(&net->biases[0], 0, 1), -1);
  // Second layer biases.
  TEST_EQUAL(nnMatrixAt(&net->biases[1], 0, 0), 0);

  // Test.

  #define M 4

  nnQueryObject* query = nnMakeQueryObject(net, /*num_inputs=*/M);

  const R test_inputs[M][2] = { { 0., 0. }, { 1., 0. }, { 0., 1. }, { 1., 1. } };
  nnMatrix test_inputs_matrix = nnMatrixMake(M, 2);
  nnMatrixInit(&test_inputs_matrix, (const R*)test_inputs);
  nnQuery(net, query, &test_inputs_matrix);

  const R expected_outputs[M] = { 0., 1., 1., 0. };
  for (int i = 0; i < M; ++i) {
    const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0);
    printf("\nInput: (%f, %f), Output: %f, Expected: %f\n",
      test_inputs[i][0], test_inputs[i][1], test_output, expected_outputs[i]);
  }
  for (int i = 0; i < M; ++i) {
    const R test_output = nnMatrixAt(nnNetOutputs(query), i, 0);
    TEST_TRUE(double_eq(test_output, expected_outputs[i], OUTPUT_EPS));
  }

  nnDeleteQueryObject(&query);
  nnDeleteNet(&net);
}