aboutsummaryrefslogtreecommitdiff
path: root/src/nn.c
diff options
context:
space:
mode:
authorjvech <jmvalenciae@unal.edu.co>2023-07-13 20:54:27 -0500
committerjvech <jmvalenciae@unal.edu.co>2023-07-13 20:54:27 -0500
commitbd74d6e1842eca74ddc2c268ae9f83e43822fc1b (patch)
tree9c36d2eadf59bd3deaefb58339e064951bff8a80 /src/nn.c
parent46f8c9bed801355ecd85c14ea7a7f11d38f1e5ba (diff)
add: forward pass with openblas in progress
Diffstat (limited to 'src/nn.c')
-rw-r--r--src/nn.c49
1 files changed, 34 insertions, 15 deletions
diff --git a/src/nn.c b/src/nn.c
index 99ae1a3..1993614 100644
--- a/src/nn.c
+++ b/src/nn.c
@@ -2,27 +2,51 @@
static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols);
-void nn_layer_init_weights(Layer layer[], size_t nmemb, size_t n_inputs)
+double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2])
+{
+ double *out = calloc(input_shape[0] * layer.neurons, sizeof(double));
+ if (out == NULL) {
+ perror("nn_layer_forward() Error");
+ exit(1);
+ }
+
+ for (size_t i = 0; i < input_shape[0]; i++) {
+ for (size_t j = 0; j < layer.neurons; j++) {
+ size_t index = layer.neurons * i + j;
+ out[index] = layer.bias[j];
+ }
+ }
+
+ cblas_dgemm(CblasRowMajor, CblasNoTrans, CblasNoTrans,
+ input_shape[0], layer.neurons, layer.input_nodes,
+ 1.0, input, input_shape[1], //alpha A
+ layer.weights, layer.neurons, // B
+ 1.0, out, layer.neurons);
+ return out;
+}
+
+void nn_layer_init_weights(Layer layers[], size_t nmemb, size_t n_inputs)
{
int i;
size_t prev_size = n_inputs;
for (i = 0; i < nmemb; i++) {
- layer[i].weights = calloc(prev_size * layer[i].neurons, sizeof(Layer));
- layer[i].bias = calloc(prev_size, sizeof(Layer));
+ layers[i].weights = calloc(prev_size * layers[i].neurons, sizeof(Layer));
+ layers[i].bias = calloc(layers[i].neurons, sizeof(Layer));
- if (layer[i].weights == NULL || layer[i].bias == NULL) {
- goto nn_layer_calloc_weights_error;
+ if (layers[i].weights == NULL || layers[i].bias == NULL) {
+ goto nn_layers_calloc_weights_error;
}
- fill_random_weights(layer[i].weights, layer[i].bias, prev_size, layer[i].neurons);
- prev_size = layer[i].neurons;
+ fill_random_weights(layers[i].weights, layers[i].bias, prev_size, layers[i].neurons);
+ layers[i].input_nodes = prev_size;
+ prev_size = layers[i].neurons;
}
return;
-nn_layer_calloc_weights_error:
- perror("nn_layer_calloc_weights() Error");
+nn_layers_calloc_weights_error:
+ perror("nn_layers_calloc_weights() Error");
exit(1);
}
@@ -30,15 +54,10 @@ void nn_layer_free_weights(Layer *layer, size_t nmemb)
{
for (int i = 0; i < nmemb; i++) {
free(layer[i].weights);
+ free(layer[i].bias);
}
}
-double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2])
-{
- double *out = NULL;
- return out;
-}
-
double sigmoid(double x)
{
return 1 / (1 + exp(-x));
Feel free to download, copy and edit any repo