blob: 877e301567033bee42208a87569b3682c481b595 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
|
[net]
loss = square ; options (square)
epochs = 500 ; comment
batch = 32
alpha = 1
weights_path = utils/weights.bin
inputs = x, y
labels = z
; activation options (relu, sigmoid, softplus, leaky_relu, linear, tanh)
[layer]
neurons=10
activation=sigmoid
[outlayer]
activation = sigmoid
|