aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/main.c45
-rw-r--r--src/nn.c18
-rw-r--r--src/nn.h3
3 files changed, 40 insertions, 26 deletions
diff --git a/src/main.c b/src/main.c
index c348b76..a65591d 100644
--- a/src/main.c
+++ b/src/main.c
@@ -13,10 +13,6 @@ typedef struct Array {
} Array;
#define ARRAY_SIZE(x, type) sizeof(x) / sizeof(type)
-Layer neural[] = {
- {.neurons = 5, .activation = relu},
- {.neurons = 1, .activation = sigmoid},
-};
static void json_read(const char *filepath,
Array *input, Array *out,
@@ -30,8 +26,8 @@ void json_read(const char *filepath,
char *in_keys[],
size_t n_input_keys)
{
- FILE *fp;
- char *fp_buffer;
+ FILE *fp = NULL;
+ char *fp_buffer = NULL;
size_t ret;
int64_t fp_size;
@@ -90,34 +86,31 @@ json_read_error:
}
int main(void) {
+ Layer network[] = {
+ {.neurons = 5, .activation = relu},
+ {.neurons = 1, .activation = sigmoid},
+ };
Array X, y;
char *in_keys[] = {"area", "longitude", "latitude"};
json_read("data/test.json", &X, &y, "price", in_keys, ARRAY_SIZE(in_keys, char *));
- nn_layer_init_weights(neural, ARRAY_SIZE(neural, Layer), X.shape[1]);
- double *out = nn_layer_forward(neural[0], X.data, X.shape);
+ size_t network_size = ARRAY_SIZE(network, Layer);
+ nn_network_init_weights(network, network_size, 3);
+ double **outputs = calloc(network_size, sizeof(double *));
- printf("area\tlat\tlong\t| price\n");
- for (size_t i = 0; i < X.shape[0]; i++) {
- for (size_t j = 0; j < X.shape[1]; j++) {
- size_t index = X.shape[1] * i + j;
- printf("%.2lf\t", X.data[index]);
- }
- printf("| %.2lf\n", y.data[i]);
- }
+ size_t out_rows = X.shape[0];//
+ for (size_t l = 0; l < network_size; l++) {//
+ outputs[l] = calloc(network[l].neurons * out_rows, sizeof(double));//
+ } //
- printf("---\n");
- for (size_t i = 0; i < X.shape[0]; i++) {
- for (size_t j = 0; j < neural[0].neurons; j++) {
- size_t index = neural[0].neurons * i + j;
- printf("%.2lf\t", out[index]);
- }
- printf("\n");
- }
+ nn_forward(outputs, X.data, X.shape, network, network_size);
+ for (size_t l = 0; l < network_size; l++) free(outputs[l]);
+ free(outputs);
- nn_layer_free_weights(neural, ARRAY_SIZE(neural, Layer));
- free(out);
+ nn_network_free_weights(network, network_size);
free(X.data);
free(y.data);
+
+
}
diff --git a/src/nn.c b/src/nn.c
index d773324..e47b56d 100644
--- a/src/nn.c
+++ b/src/nn.c
@@ -2,6 +2,24 @@
static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols);
+void nn_forward(
+ double **out,
+ double *X, size_t X_shape[2],
+ Layer network[], size_t network_size)
+{
+ size_t in_shape[2] = {X_shape[0], X_shape[1]};
+ size_t out_shape[2];
+ out_shape[0] = X_shape[0];
+ double *input = X;
+
+ for (size_t l = 0; l < network_size; l++) {
+ out_shape[1] = network[l].neurons;
+ nn_layer_forward(network[l], out[l], out_shape, input, in_shape);
+ in_shape[1] = out_shape[1];
+ input = out[l];
+ }
+}
+
void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2])
{
if (out_shape[0] != input_shape[0] || out_shape[1] != layer.neurons) {
diff --git a/src/nn.h b/src/nn.h
index d66fabc..a339dfc 100644
--- a/src/nn.h
+++ b/src/nn.h
@@ -24,4 +24,7 @@ void nn_layer_backward(Layer *layer, double *out, size_t out_shape[2]); //TODO
double sigmoid(double x);
double relu(double x);
double identity(double x);
+
+
+void nn_forward(double **out, double *input, size_t input_shape[2], Layer network[], size_t network_size);
#endif
Feel free to download, copy and edit any repo