aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/main.pdfbin0 -> 530497 bytes
-rw-r--r--doc/main.tex67
-rw-r--r--doc/net.dot17
-rw-r--r--doc/net.pdfbin0 -> 11751 bytes
-rw-r--r--src/nn.c22
-rw-r--r--src/nn.h7
6 files changed, 105 insertions, 8 deletions
diff --git a/doc/main.pdf b/doc/main.pdf
new file mode 100644
index 0000000..1a2404e
--- /dev/null
+++ b/doc/main.pdf
Binary files differ
diff --git a/doc/main.tex b/doc/main.tex
new file mode 100644
index 0000000..00028f9
--- /dev/null
+++ b/doc/main.tex
@@ -0,0 +1,67 @@
+%acmart
+%IEEEtran
+%rbt-mathnotes-formula-sheet
+\documentclass{rbt-mathnotes-formula-sheet}
+\usepackage[utf8]{inputenc}
+\usepackage[pdf]{graphviz}
+\usepackage{derivative}
+\title{Deep learning notes}
+
+\begin{document}
+
+\section{Observations}
+\begin{eqnarray}
+ i,j,k,l,L,m,M,n,N,o \in & \mathcal{N} \\
+ X \in & \mathcal{R}^{n \times o} \\
+ Y \in & \mathcal{R}^{n \times m}
+\end{eqnarray}
+
+\section{Neural Network}
+
+\includegraphics[width=0.3\textwidth]{net.pdf}
+
+\begin{eqnarray}
+ a^0 = & x_{1 \times p}(n) \\
+ a^L = & d_{1 \times m}(n) \\
+ a^l = & \varphi (z^l) \\
+ z^l = & a^{l - 1} W^l
+\end{eqnarray}
+
+\section{Gradient Descent}
+
+\begin{eqnarray}
+ e(n) = & y(n) - d(n) \\
+ \xi(n) = & \frac{1}{2} e e^{\top}\\
+ \xi(n) = & \frac{1}{2} \sum_{j=1}^{M} (e_j(n))^2 \\
+ W_{(k + 1)} = & W_{(k)} - \nabla_{W} \xi(d,y) \\
+ \xi_{avg}(n) = & \frac{1}{2n} \sum_{n=1}^N \sum_{j=1}^{M} (e_j(n))^2 \\
+\end{eqnarray}
+
+\section{Backpropagation}
+
+\begin{eqnarray}
+ \pdv{\xi}{\omega^l_{ij}} = & \delta_j^l \pdv{z_j^l}{\omega_{ij}} \\
+ \delta_j^l = & \pdv{\xi}{z_j^l} \\
+ \pdv{z_j^l}{\omega_{ij}} = & a_i^{l-1} \\
+\end{eqnarray}
+
+Output Layer
+
+\begin{eqnarray}
+ \delta_j^L =& \pdv{\xi}{z_j^L} = \pdv{\xi}{a_j^L} \pdv{a_j^L}{z_j^L}\\
+ \delta_j^L =& \pdv{\xi}{a_j^L} \dot{\varphi}(z_j^L)\\
+ =& - e_j \dot{\varphi}(z_j^L)
+\end{eqnarray}
+
+Hidden Layer
+
+\begin{eqnarray}
+ \delta_j^l = & \pdv{\xi}{z_j^l} = \sum_k \pdv{\xi}{z_k^{l+1}} \pdv{z_k^{l+1}}{z_j^l}\\
+ \delta_j^l = & \sum_k \delta_k^{l+1} \pdv{z_k^{l+1}}{z_j^l}\\
+ \pdv{z_k^{l+1}}{z_j^l} = &
+ \frac{\partial}{\partial z_j^l} \left( \sum_j \omega_{jk}^{l+1} \varphi(z_j^l) \right)\\
+ \pdv{z_k^{l+1}}{z_j^l} = & \omega_{jk} \dot{\varphi}(z_j^l)\\
+ \delta_j^l = & \sum_k \delta_k^{l+1} \omega_{jk}^{l+1} \dot{\varphi}(z_j^l)\\
+\end{eqnarray}
+
+\end{document}
diff --git a/doc/net.dot b/doc/net.dot
new file mode 100644
index 0000000..d7a1bf3
--- /dev/null
+++ b/doc/net.dot
@@ -0,0 +1,17 @@
+digraph abc {
+ node [shape=circle, size=0.2];
+ rankdir=LR;
+
+ x₁ -> a₁[label = w¹];
+ x₁ -> {a₂ a₃};
+ x₂ -> {a₁ a₂ a₃};
+
+ a₁ -> d₁[label = w²];
+ a₁ -> d₂;
+ a₂ -> {d₁ d₂};
+ a₃ -> {d₁ d₂};
+
+ subgraph {rank=source; x₁; x₂};
+ subgraph {rank=same; a₁; a₂; a₃};
+ subgraph {rank=sink; d₁; d₂};
+}
diff --git a/doc/net.pdf b/doc/net.pdf
new file mode 100644
index 0000000..1ecc6cc
--- /dev/null
+++ b/doc/net.pdf
Binary files differ
diff --git a/src/nn.c b/src/nn.c
index 87f74ce..d312c29 100644
--- a/src/nn.c
+++ b/src/nn.c
@@ -2,11 +2,12 @@
static void fill_random_weights(double *weights, double *bias, size_t rows, size_t cols);
-double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2])
+void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2])
{
- double *out = calloc(input_shape[0] * layer.neurons, sizeof(double));
- if (out == NULL) {
- perror("nn_layer_forward() Error");
+ if (out_shape[0] != input_shape[0] || out_shape[1] != layer.neurons) {
+ fprintf(stderr,
+ "nn_layer_forward() Error: out must have (%zu x %zu) dimensions not (%zu x %zu)\n",
+ input_shape[0], layer.neurons, out_shape[0], out_shape[1]);
exit(1);
}
@@ -22,7 +23,13 @@ double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2])
1.0, input, input_shape[1], //alpha X
layer.weights, layer.neurons, // W
1.0, out, layer.neurons); // beta B
- return out;
+
+ for (size_t i = 0; i < input_shape[0]; i++) {
+ for (size_t j = 0; j < layer.neurons; j ++) {
+ size_t index = layer.neurons * i + j;
+ out[index] = layer.activation(out[index]);
+ }
+ }
}
void nn_layer_init_weights(Layer layers[], size_t nmemb, size_t n_inputs)
@@ -58,6 +65,11 @@ void nn_layer_free_weights(Layer *layer, size_t nmemb)
}
}
+double identity(double x)
+{
+ return x;
+}
+
double sigmoid(double x)
{
return 1 / (1 + exp(-x));
diff --git a/src/nn.h b/src/nn.h
index 88b4462..05ddd28 100644
--- a/src/nn.h
+++ b/src/nn.h
@@ -15,12 +15,13 @@ typedef struct Layer {
size_t neurons, input_nodes;
} Layer;
-void nn_layer_init_weights(Layer *layer, size_t nmemb, size_t input_cols);
+void nn_layer_init_weights(Layer *layers, size_t nmemb, size_t input_cols);
void nn_layer_free_weights(Layer *layer, size_t nmemb);
-double * nn_layer_forward(Layer layer, double *input, size_t input_shape[2]); //TODO
-double * nn_layer_backward(Layer layer, double *output, size_t out_shape[2]); //TODO
+void nn_layer_forward(Layer layer, double *out, size_t out_shape[2], double *input, size_t input_shape[2]); //TODO
+void nn_layer_backward(Layer *layer, double *out, size_t out_shape[2]); //TODO
double sigmoid(double x);
double relu(double x);
+double identity(double x);
#endif
Feel free to download, copy and edit any repo