diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-07-24 20:06:05 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-07-24 20:06:05 -0500 |
commit | 7710efc305682f35cbc8d69d9b1e5739dbb89f0d (patch) | |
tree | 73cb5939eb3fbdc2f232176e6e61df4bf21c0aca /src | |
parent | f2cf742719445a6ac7cea17043d3adbcbc247883 (diff) |
doc: backpropagation notes added
Diffstat (limited to 'src')
-rw-r--r-- | src/nn.c | 22 | ||||
-rw-r--r-- | src/nn.h | 7 |
2 files changed, 21 insertions, 8 deletions
@@ -2,11 +2,12 @@ static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols); -double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]) +void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]) { - double *out = calloc(input_shape[0] * layer.neurons, sizeof(double)); - if (out == NULL) { - perror("nn_layer_forward() Error"); + if (out_shape[0] != input_shape[0] || out_shape[1] != layer.neurons) { + fprintf(stderr, + "nn_layer_forward() Error: out must have (%zu x %zu) dimensions not (%zu x %zu)\n", + input_shape[0], layer.neurons, out_shape[0], out_shape[1]); exit(1); } @@ -22,7 +23,13 @@ double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]) 1.0, input, input_shape[1], //alpha X layer.weights, layer.neurons, // W 1.0, out, layer.neurons); // beta B - return out; + + for (size_t i = 0; i < input_shape[0]; i++) { + for (size_t j = 0; j < layer.neurons; j ++) { + size_t index = layer.neurons * i + j; + out[index] = layer.activation(out[index]); + } + } } void nn_layer_init_weights(Layer layers[], size_t nmemb, size_t n_inputs) @@ -58,6 +65,11 @@ void nn_layer_free_weights(Layer *layer, size_t nmemb) } } +double identity(double x) +{ + return x; +} + double sigmoid(double x) { return 1 / (1 + exp(-x)); @@ -15,12 +15,13 @@ typedef struct Layer { size_t neurons, input_nodes; } Layer; -void nn_layer_init_weights(Layer *layer, size_t nmemb, size_t input_cols); +void nn_layer_init_weights(Layer *layers, size_t nmemb, size_t input_cols); void nn_layer_free_weights(Layer *layer, size_t nmemb); -double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]); //TODO -double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]); //TODO +void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); //TODO +void nn_layer_backward(Layer *layer, double *out, size_t out_shape[2]); //TODO double sigmoid(double x); double relu(double x); +double identity(double x); #endif |