diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-08-02 20:49:21 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-08-02 20:49:21 -0500 |
commit | 7796b9e4dc1fd138108b0262ab131e51453d8e66 (patch) | |
tree | 42c5b7a8103ba83e0df73f3d84cc2bfa4ce58283 /src/nn.c | |
parent | 2d84b6e5a34fd8fbc62a96c4842665701ab4e9bd (diff) |
add: layer backward done
Diffstat (limited to 'src/nn.c')
-rw-r--r-- | src/nn.c | 18 |
1 files changed, 18 insertions, 0 deletions
@@ -2,6 +2,24 @@ static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols); +void nn_layer_backward( + double *weights, size_t weigths_shape[2], + double *delta, size_t delta_cols, + double *out_prev, size_t out_cols, + Layer layer, double alpha) +{ + assert(out_cols == weigths_shape[0] && "out_cols does not match with weight rows"); + assert(delta_cols == weigths_shape[1] && "delta_cols does not match with weight cols"); + + for (size_t i = 0; i < weigths_shape[0]; i++) { + for (size_t j = 0; j < weigths_shape[0]; j++) { + size_t index = weigths_shape[1] * i + j; + double dcost_w = delta[j] * out_prev[i]; + weights[index] = layer.weights[index] + alpha * dcost_w; + } + } +} + void nn_layer_hidden_delta( double *delta, size_t delta_cols, double *delta_next, size_t delta_next_cols, |