aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorjvech <jmvalenciae@unal.edu.co>2023-07-12 10:50:32 -0500
committerjvech <jmvalenciae@unal.edu.co>2023-07-12 10:50:32 -0500
commit46f8c9bed801355ecd85c14ea7a7f11d38f1e5ba (patch)
tree4037623fabea06bda36ba7193293ea605bb88aca /src
parent29f97864e1b0ef142249898b39332b26a5fb4906 (diff)
add: weights initialization and deallocation done
Diffstat (limited to 'src')
-rw-r--r--src/main.c22
-rw-r--r--src/neural.h12
-rw-r--r--src/nn.c82
-rw-r--r--src/nn.h24
4 files changed, 119 insertions, 21 deletions
diff --git a/src/main.c b/src/main.c
index 8c82260..4141df0 100644
--- a/src/main.c
+++ b/src/main.c
@@ -3,13 +3,19 @@
#include <string.h>
#include <json-c/json.h>
-#include "neural.h"
+#include "nn.h"
+const size_t MAX_FILE_SIZE = 1<<29; // 0.5 GiB
typedef struct Array {
double *data;
size_t shape[2];
} Array;
+Layer neural[] = {
+ [0] = {.neurons = 3, .activation = relu},
+ [1] = {.neurons = 1, .activation = sigmoid},
+};
+
static Array json_read(const char *filepath);
Array json_read(const char *filepath)
@@ -28,6 +34,9 @@ Array json_read(const char *filepath)
fp_size = ftell(fp);
if (fp_size == -1) goto json_read_error;
+ if (fp_size >= MAX_FILE_SIZE) {
+ fprintf(stderr, "ftell Error(): '%s' size greater than '%zu'\n", filepath, MAX_FILE_SIZE);
+ }
rewind(fp);
fp_buffer = calloc(sizeof(char), fp_size);
@@ -67,14 +76,9 @@ json_read_error:
int main(void) {
Array json_data = json_read("data/housing_rent.json");
- printf("area\tlong\tlat\tprice\n");
- for (int i = 0; i < json_data.shape[0]; i++) {
- printf("%3.1lf\t%3.2lf\t%3.2lf\t%lf\n",
- json_data.data[4*i],
- json_data.data[4*i + 1],
- json_data.data[4*i + 2],
- json_data.data[4*i + 3]);
- }
+ nn_layer_init_weights(neural, 2, 3);
+ printf("%lf\n", neural[0].weights[0]);
+ nn_layer_free_weights(neural, 2);
free(json_data.data);
return 0;
}
diff --git a/src/neural.h b/src/neural.h
deleted file mode 100644
index 4456907..0000000
--- a/src/neural.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#ifndef __NEURAL__
-#define __NEURAL__
-
-#include <stdlib.h>
-#include <stdint.h>
-
-typedef struct Layer {
- double *weights;
- double (*activation)(double x);
- size_t neurons;
-} Layer;
-#endif
diff --git a/src/nn.c b/src/nn.c
new file mode 100644
index 0000000..99ae1a3
--- /dev/null
+++ b/src/nn.c
@@ -0,0 +1,82 @@
+#include "nn.h"
+
+static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols);
+
+void nn_layer_init_weights(Layer layer[], size_t nmemb, size_t n_inputs)
+{
+ int i;
+ size_t prev_size = n_inputs;
+
+
+ for (i = 0; i < nmemb; i++) {
+ layer[i].weights = calloc(prev_size * layer[i].neurons, sizeof(Layer));
+ layer[i].bias = calloc(prev_size, sizeof(Layer));
+
+ if (layer[i].weights == NULL || layer[i].bias == NULL) {
+ goto nn_layer_calloc_weights_error;
+ }
+ fill_random_weights(layer[i].weights, layer[i].bias, prev_size, layer[i].neurons);
+ prev_size = layer[i].neurons;
+ }
+
+ return;
+
+nn_layer_calloc_weights_error:
+ perror("nn_layer_calloc_weights() Error");
+ exit(1);
+}
+
+void nn_layer_free_weights(Layer *layer, size_t nmemb)
+{
+ for (int i = 0; i < nmemb; i++) {
+ free(layer[i].weights);
+ }
+}
+
+double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2])
+{
+ double *out = NULL;
+ return out;
+}
+
+double sigmoid(double x)
+{
+ return 1 / (1 + exp(-x));
+}
+
+double relu(double x)
+{
+ return (x > 0) ? x : 0;
+}
+
+void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols)
+{
+ FILE *fp = fopen("/dev/random", "rb");
+ if (fp == NULL) goto nn_fill_random_weights_error;
+
+ size_t weights_size = rows * cols;
+ int64_t *random_weights = calloc(weights_size, sizeof(int64_t));
+ int64_t *random_bias = calloc(rows, sizeof(int64_t));
+
+ fread(random_weights, sizeof(double), weights_size, fp);
+ fread(random_bias, sizeof(double), rows, fp);
+
+ if (!random_weights || !random_bias) goto nn_fill_random_weights_error;
+
+ for (size_t i = 0; i < weights_size; i++) {
+ weights[i] = (double)random_weights[i] / (double)INT64_MAX * 2;
+ }
+
+ for (size_t i = 0; i < weights_size; i++) {
+ bias[i] = (double)random_bias[i] / (double)INT64_MAX * 2;
+ }
+
+ free(random_weights);
+ free(random_bias);
+ fclose(fp);
+ return;
+
+nn_fill_random_weights_error:
+ perror("nn_fill_random_weights Error()");
+ exit(1);
+}
diff --git a/src/nn.h b/src/nn.h
new file mode 100644
index 0000000..fb495bd
--- /dev/null
+++ b/src/nn.h
@@ -0,0 +1,24 @@
+#ifndef __NN__
+#define __NN__
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <stdint.h>
+#include <math.h>
+#include <unistd.h>
+
+typedef struct Layer {
+ double *weights, *bias;
+ double (*activation)(double x);
+ size_t neurons, input_size;
+} Layer;
+
+void nn_layer_init_weights(Layer *layer, size_t nmemb, size_t input_size);
+void nn_layer_free_weights(Layer *layer, size_t nmemb);
+
+double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]);
+double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]);
+
+double sigmoid(double x);
+double relu(double x);
+#endif
Feel free to download, copy and edit any repo