diff options
-rw-r--r-- | src/activations.c | 8 | ||||
-rw-r--r-- | src/main.c | 2 |
2 files changed, 10 insertions, 0 deletions
diff --git a/src/activations.c b/src/activations.c index 064482d..7a8ea24 100644 --- a/src/activations.c +++ b/src/activations.c @@ -30,6 +30,7 @@ double softplus(double x); double dsoftplus(double x); double linear(double x); double dlinear(double x); +double dtanh(double x); struct Activation NN_LEAKY_RELU = { .func = leaky_relu, @@ -56,6 +57,11 @@ struct Activation NN_LINEAR = { .dfunc = dlinear, }; +struct Activation NN_TANH = { + .func = tanh, + .dfunc = dtanh, +}; + double linear(double x) {return x;} double dlinear(double x) {return 1.0;} @@ -70,3 +76,5 @@ double dleaky_relu(double x) { return (x > 0) ? 1 : 0.01; } double softplus(double x) { return log1p(exp(x)); } double dsoftplus(double x) { return sigmoid(x); } + +double dtanh(double x) {return 1 - tanh(x) * tanh(x);}; @@ -171,6 +171,7 @@ Layer * load_network(struct Configs cfg) extern struct Activation NN_SIGMOID; extern struct Activation NN_LEAKY_RELU; extern struct Activation NN_LINEAR; + extern struct Activation NN_TANH; Layer *network = ecalloc(cfg.network_size, sizeof(Layer)); @@ -180,6 +181,7 @@ Layer * load_network(struct Configs cfg) else if (!strcmp("softplus", cfg.activations[i])) network[i].activation = NN_SOFTPLUS; else if (!strcmp("leaky_relu", cfg.activations[i])) network[i].activation = NN_LEAKY_RELU; else if (!strcmp("linear", cfg.activations[i])) network[i].activation = NN_LINEAR; + else if (!strcmp("tanh", cfg.activations[i])) network[i].activation = NN_TANH; else die("load_network() Error: Unknown '%s' activation", cfg.activations[i]); network[i].neurons = cfg.neurons[i]; |