diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-07-12 10:50:32 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-07-12 10:50:32 -0500 |
commit | 46f8c9bed801355ecd85c14ea7a7f11d38f1e5ba (patch) | |
tree | 4037623fabea06bda36ba7193293ea605bb88aca /src/nn.h | |
parent | 29f97864e1b0ef142249898b39332b26a5fb4906 (diff) |
add: weights initialization and deallocation done
Diffstat (limited to 'src/nn.h')
-rw-r--r-- | src/nn.h | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/src/nn.h b/src/nn.h new file mode 100644 index 0000000..fb495bd --- /dev/null +++ b/src/nn.h @@ -0,0 +1,24 @@ +#ifndef __NN__ +#define __NN__ + +#include <stdlib.h> +#include <stdio.h> +#include <stdint.h> +#include <math.h> +#include <unistd.h> + +typedef struct Layer { + double *weights, *bias; + double (*activation)(double x); + size_t neurons, input_size; +} Layer; + +void nn_layer_init_weights(Layer *layer, size_t nmemb, size_t input_size); +void nn_layer_free_weights(Layer *layer, size_t nmemb); + +double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]); +double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]); + +double sigmoid(double x); +double relu(double x); +#endif |