[net] loss = square ; options (square) epochs = 200 ; comment alpha = 1e-2 weights_path = utils/weights.bin inputs = x labels = y ; activation options (relu, sigmoid, softplus, leaky_relu) [layer] neurons=20 activation=sigmoid [outlayer] activation = sigmoid