diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-08-04 18:40:41 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-08-04 18:40:41 -0500 |
commit | 21a570b6d98497835785eccf28fc7f16e57ab197 (patch) | |
tree | d9106f88ea04b1ce175b7d8382966a39f7dd652a /src/nn.h | |
parent | 7796b9e4dc1fd138108b0262ab131e51453d8e66 (diff) |
add: nn_backward implemented
It needs to be tested and some backward layer functions were redefined
to improve readability
Diffstat (limited to 'src/nn.h')
-rw-r--r-- | src/nn.h | 36 |
1 files changed, 20 insertions, 16 deletions
@@ -13,6 +13,7 @@ typedef struct Layer { double *weights, *bias; double (*activation)(double x); + double (*activation_derivative)(double x); size_t neurons, input_nodes; } Layer; @@ -24,12 +25,6 @@ void nn_layer_map_activation( double *aout, size_t aout_shape[2], double *zout, size_t zout_shape[2]); -void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); -void nn_layer_backward( - double *weights, size_t weigths_shape[2], - double *delta, size_t dcost_cols, - double *out_prev, size_t out_cols, - Layer layer, double alpha); double sigmoid(double x); double relu(double x); @@ -41,22 +36,31 @@ void nn_forward( double *input, size_t input_shape[2], Layer network[], size_t network_size); -void nn_backwad( +void nn_backward( double **weights, - double **zout, double **outs, size_t n_rows, + double **zout, double **outs, + double *input, size_t input_shape[2], + double *labels, size_t labels_shape[2], Layer network[], size_t network_size, - double (cost_derivative)(double, double)); + double (cost_derivative)(double, double), + double alpha); + +void nn_layer_forward( + Layer layer, + double *out, size_t out_shape[2], + double *input, size_t input_shape[2]); + +void nn_layer_backward( + double *weights, size_t weigths_shape[2], + double *delta, double *out_prev, + Layer layer, double alpha); void nn_layer_out_delta( - double *delta, size_t delta_cols, - double *error, size_t error_cols, - double *zout, size_t zout_cols, + double *delta, double *dcost_out, double *zout, size_t cols, double (*activation_derivative)(double));//TODO void nn_layer_hidden_delta( - double *delta, size_t delta_cols, - double *delta_next, size_t delta_next_cols, - double *weigths_next, size_t weigths_shape[2], - double *zout, size_t zout_cols, + double *delta, double *delta_next, double *zout, + double *weights_next, size_t weights_next_shape[2], double (*activation_derivative)(double));//TODO #endif |