diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-07-29 10:34:20 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-07-29 10:34:20 -0500 |
commit | 525f8398c58cc2ca7f92c416df880068c62abbd5 (patch) | |
tree | 419a7c3e1aa77d9e4ac72389f61d878a4161a499 /src/nn.h | |
parent | 1503fc83991237fa0cf6eb42b0ca1a4904cf8a01 (diff) |
add: nn_layer_forward splited
The function nn_layer_forward was changed from f(X*W) to X*W and
nn_layer_map_activation will map the activation function through the
output of nn_layer_forward.
Diffstat (limited to 'src/nn.h')
-rw-r--r-- | src/nn.h | 13 |
1 files changed, 10 insertions, 3 deletions
@@ -18,13 +18,20 @@ typedef struct Layer { void nn_network_init_weights(Layer *network, size_t nmemb, size_t input_cols); void nn_network_free_weights(Layer *network, size_t nmemb); -void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); //TODO -void nn_layer_backward(Layer *layer, double *out, size_t out_shape[2]); //TODO +void nn_layer_map_activation(double (*activation)(double), double *aout, size_t aout_shape[2], double *zout, size_t zout_shape[2]); +void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); +void nn_layer_backward( + Layer *layer, + double *weights, + double *out, size_t out_shape[2], + double *labels, size_t labels_shape[2], + double *local_gradient); //TODO double sigmoid(double x); double relu(double x); double identity(double x); -void nn_forward(double **out, double *input, size_t input_shape[2], Layer network[], size_t network_size); +void nn_forward(double **aout, double **zout, double *input, size_t input_shape[2], Layer network[], size_t network_size); +double nn_layer_out_delta(double error, double (*activation_derivative)(double)); #endif |