aboutsummaryrefslogtreecommitdiff
path: root/src/nn.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/nn.c')
-rw-r--r--src/nn.c70
1 files changed, 0 insertions, 70 deletions
diff --git a/src/nn.c b/src/nn.c
index 0bcc28b..63dd643 100644
--- a/src/nn.c
+++ b/src/nn.c
@@ -9,41 +9,11 @@ static double get_avg_loss(
double square_loss(double labels[], double net_outs[], size_t shape);
double square_dloss_out(double labels, double net_out);
-double leaky_relu(double x);
-double dleaky_relu(double x);
-double relu(double x);
-double drelu(double x);
-double sigmoid(double x);
-double dsigmoid(double x);
-double softplus(double x);
-double dsoftplus(double x);
-
struct Cost NN_SQUARE = {
.func = square_loss,
.dfunc_out = square_dloss_out
};
-struct Activation NN_SOFTPLUS = {
- .func = softplus,
- .dfunc = dsoftplus,
-};
-
-struct Activation NN_LEAKY_RELU = {
- .func = leaky_relu,
- .dfunc = dleaky_relu
-};
-
-struct Activation NN_RELU = {
- .func = relu,
- .dfunc = drelu
-};
-
-struct Activation NN_SIGMOID = {
- .func = sigmoid,
- .dfunc = dsigmoid
-};
-
-
void nn_network_train(
Layer network[], size_t network_size,
double *input, size_t input_shape[2],
@@ -338,46 +308,6 @@ nn_fill_random_weights_error:
exit(1);
}
-double sigmoid(double x)
-{
- return 1 / (1 + exp(-x));
-}
-
-double dsigmoid(double x)
-{
- return sigmoid(x) * (1 - sigmoid(x));
-}
-
-double relu(double x)
-{
- return (x > 0) ? x : 0;
-}
-
-double drelu(double x)
-{
- return (x > 0) ? 1 : 0;
-}
-
-double leaky_relu(double x)
-{
- return (x > 0) ? x : 0.01 * x;
-}
-
-double dleaky_relu(double x)
-{
- return (x > 0) ? 1 : 0.01;
-}
-
-double softplus(double x)
-{
- return log1p(exp(x));
-}
-
-double dsoftplus(double x)
-{
- return sigmoid(x);
-}
-
double square_loss(double labels[], double net_out[], size_t shape)
{
double sum = 0;
Feel free to download, copy and edit any repo