diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-07-13 20:54:27 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-07-13 20:54:27 -0500 |
commit | bd74d6e1842eca74ddc2c268ae9f83e43822fc1b (patch) | |
tree | 9c36d2eadf59bd3deaefb58339e064951bff8a80 /src/nn.h | |
parent | 46f8c9bed801355ecd85c14ea7a7f11d38f1e5ba (diff) |
add: forward pass with openblas in progress
Diffstat (limited to 'src/nn.h')
-rw-r--r-- | src/nn.h | 8 |
1 files changed, 5 insertions, 3 deletions
@@ -4,20 +4,22 @@ #include <stdlib.h> #include <stdio.h> #include <stdint.h> +#include <string.h> #include <math.h> #include <unistd.h> +#include <openblas/cblas.h> typedef struct Layer { double *weights, *bias; double (*activation)(double x); - size_t neurons, input_size; + size_t neurons, input_nodes; } Layer; void nn_layer_init_weights(Layer *layer, size_t nmemb, size_t input_size); void nn_layer_free_weights(Layer *layer, size_t nmemb); -double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]); -double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]); +double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]); //TODO +double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]); //TODO double sigmoid(double x); double relu(double x); |