blob: a7b90a569110e495fc95cb909f03789ae5f8cd40 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
|
[net]
loss = square ; options (square)
epochs = 1000 ; comment
alpha = 1e-6
weights_path = data/big_nn.bin
inputs = x,y
labels = z
; activation options (relu, sigmoid, softplus, leaky_relu)
[layer]
neurons=20
activation=relu
[layer]
neurons=20
activation=relu
[outlayer]
activation = sigmoid
|