diff options
-rw-r--r-- | Makefile | 2 | ||||
-rw-r--r-- | commands.gdb | 4 | ||||
-rw-r--r-- | src/activations.c | 37 | ||||
-rw-r--r-- | src/nn.c | 70 |
4 files changed, 42 insertions, 71 deletions
@@ -25,7 +25,7 @@ run: build ./${BIN} debug: build - gdb ${BIN} -x breaks.txt --tui + gdb ${BIN} -x commands.gdb --tui clean: @rm $(OBJS) $(OBJDIR) -rv diff --git a/commands.gdb b/commands.gdb new file mode 100644 index 0000000..9161580 --- /dev/null +++ b/commands.gdb @@ -0,0 +1,4 @@ +break nn.c:68 + commands + p *net_out@labels_shape[1]*labels_shape[0] + end diff --git a/src/activations.c b/src/activations.c new file mode 100644 index 0000000..011734d --- /dev/null +++ b/src/activations.c @@ -0,0 +1,37 @@ +#include "nn.h" + +double leaky_relu(double x); +double dleaky_relu(double x); +double relu(double x); +double drelu(double x); +double sigmoid(double x); +double dsigmoid(double x); +double softplus(double x); +double dsoftplus(double x); + +struct Activation NN_LEAKY_RELU = { + .func = leaky_relu, + .dfunc = dleaky_relu +}; + +struct Activation NN_RELU = { + .func = relu, + .dfunc = drelu +}; + +struct Activation NN_SIGMOID = { + .func = sigmoid, + .dfunc = dsigmoid +}; + +double sigmoid(double x) { return 1 / (1 + exp(-x)); } +double dsigmoid(double x) { return sigmoid(x) * (1 - sigmoid(x)); } + +double relu(double x) { return (x > 0) ? x : 0; } +double drelu(double x) { return (x > 0) ? 1 : 0; } + +double leaky_relu(double x) { return (x > 0) ? x : 0.01 * x; } +double dleaky_relu(double x) { return (x > 0) ? 1 : 0.01; } + +double softplus(double x) { return log1p(exp(x)); } +double dsoftplus(double x) { return sigmoid(x); } @@ -9,41 +9,11 @@ static double get_avg_loss( double square_loss(double labels[], double net_outs[], size_t shape); double square_dloss_out(double labels, double net_out); -double leaky_relu(double x); -double dleaky_relu(double x); -double relu(double x); -double drelu(double x); -double sigmoid(double x); -double dsigmoid(double x); -double softplus(double x); -double dsoftplus(double x); - struct Cost NN_SQUARE = { .func = square_loss, .dfunc_out = square_dloss_out }; -struct Activation NN_SOFTPLUS = { - .func = softplus, - .dfunc = dsoftplus, -}; - -struct Activation NN_LEAKY_RELU = { - .func = leaky_relu, - .dfunc = dleaky_relu -}; - -struct Activation NN_RELU = { - .func = relu, - .dfunc = drelu -}; - -struct Activation NN_SIGMOID = { - .func = sigmoid, - .dfunc = dsigmoid -}; - - void nn_network_train( Layer network[], size_t network_size, double *input, size_t input_shape[2], @@ -338,46 +308,6 @@ nn_fill_random_weights_error: exit(1); } -double sigmoid(double x) -{ - return 1 / (1 + exp(-x)); -} - -double dsigmoid(double x) -{ - return sigmoid(x) * (1 - sigmoid(x)); -} - -double relu(double x) -{ - return (x > 0) ? x : 0; -} - -double drelu(double x) -{ - return (x > 0) ? 1 : 0; -} - -double leaky_relu(double x) -{ - return (x > 0) ? x : 0.01 * x; -} - -double dleaky_relu(double x) -{ - return (x > 0) ? 1 : 0.01; -} - -double softplus(double x) -{ - return log1p(exp(x)); -} - -double dsoftplus(double x) -{ - return sigmoid(x); -} - double square_loss(double labels[], double net_out[], size_t shape) { double sum = 0; |