blob: 6b0c23f40da218f7bf38fce6986be4093fdc1a11 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
|
[net]
loss = square ; options (square)
epochs = 1000 ; comment
alpha = 1e-4
weights_path = data/sine.bin
inputs = x
labels = y
; activation options (relu, sigmoid, softplus, leaky_relu)
[layer]
neurons=20
activation=leaky_relu
[layer]
neurons=20
activation=sigmoid
[outlayer]
activation=linear
|