diff options
author | jvech <jmvalenciae@unal.edu.co> | 2023-08-08 17:04:04 -0500 |
---|---|---|
committer | jvech <jmvalenciae@unal.edu.co> | 2023-08-08 17:04:04 -0500 |
commit | 4b2fec340e01d09902ca790621b476a05f4b8c2a (patch) | |
tree | 542e59795357b9a94c873f6cd44371e3bd48ea38 /src/nn.c | |
parent | fff35116eed83c9368e8bd07d02c9e95c447e018 (diff) |
add: pass activations to a separate file
The point is have this file for new activation function without bloating
nn.c file
Diffstat (limited to 'src/nn.c')
-rw-r--r-- | src/nn.c | 70 |
1 files changed, 0 insertions, 70 deletions
@@ -9,41 +9,11 @@ static double get_avg_loss( double square_loss(double labels[], double net_outs[], size_t shape); double square_dloss_out(double labels, double net_out); -double leaky_relu(double x); -double dleaky_relu(double x); -double relu(double x); -double drelu(double x); -double sigmoid(double x); -double dsigmoid(double x); -double softplus(double x); -double dsoftplus(double x); - struct Cost NN_SQUARE = { .func = square_loss, .dfunc_out = square_dloss_out }; -struct Activation NN_SOFTPLUS = { - .func = softplus, - .dfunc = dsoftplus, -}; - -struct Activation NN_LEAKY_RELU = { - .func = leaky_relu, - .dfunc = dleaky_relu -}; - -struct Activation NN_RELU = { - .func = relu, - .dfunc = drelu -}; - -struct Activation NN_SIGMOID = { - .func = sigmoid, - .dfunc = dsigmoid -}; - - void nn_network_train( Layer network[], size_t network_size, double *input, size_t input_shape[2], @@ -338,46 +308,6 @@ nn_fill_random_weights_error: exit(1); } -double sigmoid(double x) -{ - return 1 / (1 + exp(-x)); -} - -double dsigmoid(double x) -{ - return sigmoid(x) * (1 - sigmoid(x)); -} - -double relu(double x) -{ - return (x > 0) ? x : 0; -} - -double drelu(double x) -{ - return (x > 0) ? 1 : 0; -} - -double leaky_relu(double x) -{ - return (x > 0) ? x : 0.01 * x; -} - -double dleaky_relu(double x) -{ - return (x > 0) ? 1 : 0.01; -} - -double softplus(double x) -{ - return log1p(exp(x)); -} - -double dsoftplus(double x) -{ - return sigmoid(x); -} - double square_loss(double labels[], double net_out[], size_t shape) { double sum = 0; |