From 7710efc305682f35cbc8d69d9b1e5739dbb89f0d Mon Sep 17 00:00:00 2001 From: jvech Date: Mon, 24 Jul 2023 20:06:05 -0500 Subject: doc: backpropagation notes added --- src/nn.c | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) (limited to 'src/nn.c') diff --git a/src/nn.c b/src/nn.c index 87f74ce..d312c29 100644 --- a/src/nn.c +++ b/src/nn.c @@ -2,11 +2,12 @@ static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols); -double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]) +void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]) { - double *out = calloc(input_shape[0] * layer.neurons, sizeof(double)); - if (out == NULL) { - perror("nn_layer_forward() Error"); + if (out_shape[0] != input_shape[0] || out_shape[1] != layer.neurons) { + fprintf(stderr, + "nn_layer_forward() Error: out must have (%zu x %zu) dimensions not (%zu x %zu)\n", + input_shape[0], layer.neurons, out_shape[0], out_shape[1]); exit(1); } @@ -22,7 +23,13 @@ double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]) 1.0, input, input_shape[1], //alpha X layer.weights, layer.neurons, // W 1.0, out, layer.neurons); // beta B - return out; + + for (size_t i = 0; i < input_shape[0]; i++) { + for (size_t j = 0; j < layer.neurons; j ++) { + size_t index = layer.neurons * i + j; + out[index] = layer.activation(out[index]); + } + } } void nn_layer_init_weights(Layer layers[], size_t nmemb, size_t n_inputs) @@ -58,6 +65,11 @@ void nn_layer_free_weights(Layer *layer, size_t nmemb) } } +double identity(double x) +{ + return x; +} + double sigmoid(double x) { return 1 / (1 + exp(-x)); -- cgit v1.2.3-70-g09d2