diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-08-02 20:49:21 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-08-02 20:49:21 -0500 |
commit | 7796b9e4dc1fd138108b0262ab131e51453d8e66 (patch) | |
tree | 42c5b7a8103ba83e0df73f3d84cc2bfa4ce58283 /src/nn.h | |
parent | 2d84b6e5a34fd8fbc62a96c4842665701ab4e9bd (diff) |
add: layer backward done
Diffstat (limited to 'src/nn.h')
-rw-r--r-- | src/nn.h | 22 |
1 files changed, 15 insertions, 7 deletions
@@ -26,19 +26,27 @@ void nn_layer_map_activation( void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); void nn_layer_backward( - Layer layer, - double *weights, - double *cost_derivative, size_t dcost_shape[2], - double *out, size_t out_shape[2], - double *labels, size_t labels_shape[2], - double *local_gradient); //TODO + double *weights, size_t weigths_shape[2], + double *delta, size_t dcost_cols, + double *out_prev, size_t out_cols, + Layer layer, double alpha); double sigmoid(double x); double relu(double x); double identity(double x); -void nn_forward(double **aout, double **zout, double *input, size_t input_shape[2], Layer network[], size_t network_size); +void nn_forward( + double **aout, double **zout, + double *input, size_t input_shape[2], + Layer network[], size_t network_size); + +void nn_backwad( + double **weights, + double **zout, double **outs, size_t n_rows, + Layer network[], size_t network_size, + double (cost_derivative)(double, double)); + void nn_layer_out_delta( double *delta, size_t delta_cols, double *error, size_t error_cols, |