Hello everybody,
I am running into an error with BNNS.NormalizationLayer. It appears to only work with .vector, and matrix shapes throws layerApplyFail during training. Inference doesn't throw but the output stays the same.
How to correctly use BNNS.NormalizationLayer with matrix shapes? How to debug layerApplyFail exception?
Thanks
let array: [Float32] = [
01, 02, 03, 04, 05, 06,
07, 08, 09, 10, 11, 12,
13, 14, 15, 16, 17, 18,
]
// let inputShape: BNNS.Shape = .vector(6 * 3) // works
let inputShape: BNNS.Shape = .matrixColumnMajor(6, 3)
let input = BNNSNDArrayDescriptor.allocateUninitialized(scalarType: Float32.self, shape: inputShape)
let output = BNNSNDArrayDescriptor.allocateUninitialized(scalarType: Float32.self, shape: inputShape)
let beta = BNNSNDArrayDescriptor.allocate(repeating: Float32(0), shape: inputShape, batchSize: 1)
let gamma = BNNSNDArrayDescriptor.allocate(repeating: Float32(1), shape: inputShape, batchSize: 1)
let activation: BNNS.ActivationFunction = .identity
let layer = BNNS.NormalizationLayer(type: .layer(normalizationAxis: 0), input: input, output: output, beta: beta, gamma: gamma, epsilon: 1e-12, activation: activation)!
let layerInput = BNNSNDArrayDescriptor.allocate(initializingFrom: array, shape: inputShape)
let layerOutput = BNNSNDArrayDescriptor.allocateUninitialized(scalarType: Float32.self, shape: inputShape)
// try layer.apply(batchSize: 1, input: layerInput, output: layerOutput, for: .inference) // No throw
try layer.apply(batchSize: 1, input: layerInput, output: layerOutput, for: .training)
_ = layerOutput.makeArray(of: Float32.self) // All zeros when .inference