I am following in the first steps of O’reilly’s book on Machine Learning with Go, I am learning and there is something that confuses me and I do not know what may be happening.
I run the code to train a neural network and I have a warning that the matrix is of zero length, I don’t know what happens because I am using that matrix of zeros in the output variable of the neural network.
package main
import (
"errors"
"fmt"
"log"
"math"
"math/rand"
"time"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/mat"
)
// sumAlongAxis sums a matrix along a
// particular dimension, preserving the
// other dimension.
func sumAlongAxis(axis int, m *mat.Dense) (*mat.Dense, error) {
numRows, numCols := m.Dims()
var output *mat.Dense
switch axis {
case 0:
data := make([]float64, numCols)
for i := 0; i < numCols; i++ {
col := mat.Col(nil, i, m)
data[i] = floats.Sum(col)
}
output = mat.NewDense(1, numCols, data)
case 1:
data := make([]float64, numRows)
for i := 0; i < numRows; i++ {
row := mat.Row(nil, i, m)
data[i] = floats.Sum(row)
}
output = mat.NewDense(numRows, 1, data)
default:
return nil, errors.New("invalid axis, must be 0 or 1")
}
return output, nil
}
// sigmoid implements the sigmoid function
// for use in activation functions.
func sigmoid(x float64) float64 {
return 1.0 / (1.0 + math.Exp(-x))
}
// sigmoidPrime implements the derivative
// of the sigmoid function for backpropagation.
func sigmoidPrime(x float64) float64 {
return x * (1.0 - x)
}
// neuralNet contains all of the information
// that defines a trained neural network.
type neuralNet struct {
config neuralNetConfig
wHidden *mat.Dense
bHidden *mat.Dense
wOut *mat.Dense
bOut *mat.Dense
}
// neuralNetConfig defines our neural network
// architecture and learning parameters.
type neuralNetConfig struct {
inputNeurons int
outputNeurons int
hiddenNeurons int
numEpochs int
learningRate float64
}
// NewNetwork initializes a new neural network.
func newNetwork(config neuralNetConfig) *neuralNet {
return &neuralNet{config: config}
}
// Train trains a neural network using backpropagation.
func (nn *neuralNet) train(x, y *mat.Dense) error {
// Initialize biases/weights.
randSource := rand.NewSource(time.Now().UnixNano())
randGen := rand.New(randSource)
wHiddenRaw := make([]float64, nn.config.hiddenNeurons*nn.config.inputNeurons)
bHiddenRaw := make([]float64, nn.config.hiddenNeurons)
wOutRaw := make([]float64, nn.config.outputNeurons*nn.config.hiddenNeurons)
bOutRaw := make([]float64, nn.config.outputNeurons)
for _, param := range [][]float64{wHiddenRaw, bHiddenRaw, wOutRaw, bOutRaw} {
for i := range param {
param[i] = randGen.Float64()
}
}
wHidden := mat.NewDense(nn.config.inputNeurons, nn.config.hiddenNeurons, wHiddenRaw)
bHidden := mat.NewDense(1, nn.config.hiddenNeurons, bHiddenRaw)
wOut := mat.NewDense(nn.config.hiddenNeurons, nn.config.outputNeurons, wOutRaw)
bOut := mat.NewDense(1, nn.config.outputNeurons, bOutRaw)
// Define the output of the neural network.
output := mat.NewDense(0, 0, nil)
// Loop over the number of epochs utilizing
// backpropagation to train our model.
for i := 0; i < nn.config.numEpochs; i++ {
// Complete the feed forward process.
hiddenLayerInput := mat.NewDense(0, 0, nil)
hiddenLayerInput.Mul(x, wHidden)
addBHidden := func(_, col int, v float64) float64 { return v + bHidden.At(0, col) }
hiddenLayerInput.Apply(addBHidden, hiddenLayerInput)
hiddenLayerActivations := mat.NewDense(0, 0, nil)
applySigmoid := func(_, _ int, v float64) float64 { return sigmoid(v) }
hiddenLayerActivations.Apply(applySigmoid, hiddenLayerInput)
outputLayerInput := mat.NewDense(0, 0, nil)
outputLayerInput.Mul(hiddenLayerActivations, wOut)
addBOut := func(_, col int, v float64) float64 { return v + bOut.At(0, col) }
outputLayerInput.Apply(addBOut, outputLayerInput)
output.Apply(applySigmoid, outputLayerInput)
// Complete the backpropagation.
networkError := mat.NewDense(0, 0, nil)
networkError.Sub(y, output)
slopeOutputLayer := mat.NewDense(0, 0, nil)
applySigmoidPrime := func(_, _ int, v float64) float64 { return sigmoidPrime(v) }
slopeOutputLayer.Apply(applySigmoidPrime, output)
slopeHiddenLayer := mat.NewDense(0, 0, nil)
slopeHiddenLayer.Apply(applySigmoidPrime, hiddenLayerActivations)
dOutput := mat.NewDense(0, 0, nil)
dOutput.MulElem(networkError, slopeOutputLayer)
errorAtHiddenLayer := mat.NewDense(0, 0, nil)
errorAtHiddenLayer.Mul(dOutput, wOut.T())
dHiddenLayer := mat.NewDense(0, 0, nil)
dHiddenLayer.MulElem(errorAtHiddenLayer, slopeHiddenLayer)
// Adjust the parameters.
wOutAdj := mat.NewDense(0, 0, nil)
wOutAdj.Mul(hiddenLayerActivations.T(), dOutput)
wOutAdj.Scale(nn.config.learningRate, wOutAdj)
wOut.Add(wOut, wOutAdj)
bOutAdj, err := sumAlongAxis(0, dOutput)
if err != nil {
return err
}
bOutAdj.Scale(nn.config.learningRate, bOutAdj)
bOut.Add(bOut, bOutAdj)
wHiddenAdj := mat.NewDense(0, 0, nil)
wHiddenAdj.Mul(x.T(), dHiddenLayer)
wHiddenAdj.Scale(nn.config.learningRate, wHiddenAdj)
wHidden.Add(wHidden, wHiddenAdj)
bHiddenAdj, err := sumAlongAxis(0, dHiddenLayer)
if err != nil {
return err
}
bHiddenAdj.Scale(nn.config.learningRate, bHiddenAdj)
bHidden.Add(bHidden, bHiddenAdj)
}
nn.wHidden = wHidden
nn.bHidden = bHidden
nn.wOut = wOut
nn.bOut = bOut
return nil
}
func main() {
// Define our input attributes.
input := mat.NewDense(3, 4, []float64{
1.0, 0.0, 1.0, 0.0,
1.0, 0.0, 1.0, 1.0,
0.0, 1.0, 0.0, 1.0,
})
// Define our labels.
labels := mat.NewDense(3, 1, []float64{1.0, 1.0, 0.0})
// Define our network architecture and
// learning parameters.
config := neuralNetConfig{
inputNeurons: 4,
outputNeurons: 1,
hiddenNeurons: 3,
numEpochs: 5000,
learningRate: 0.3,
}
// Train the neural network.
network := newNetwork(config)
if err := network.train(input, labels); err != nil {
log.Fatal(err)
}
// Output the weights that define our network!
f := mat.Formatted(network.wHidden, mat.Prefix(" "))
fmt.Printf("\nwHidden = % v\n\n", f)
f = mat.Formatted(network.bHidden, mat.Prefix(" "))
fmt.Printf("\nbHidden = % v\n\n", f)
f = mat.Formatted(network.wOut, mat.Prefix(" "))
fmt.Printf("\nwOut = % v\n\n", f)
f = mat.Formatted(network.bOut, mat.Prefix(" "))
fmt.Printf("\nbOut = % v\n\n", f)
}
I recieve this feedback:
panic: mat: zero length in matrix dimension
goroutine 1 [running]:
gonum.org/v1/gonum/mat.NewDense(...)
C:/Users/fabri/go/pkg/mod/gonum.org/v1/gonum@v0.9.2/mat/dense.go:50
main.(*neuralNet).train(0xc00010ff30, 0xc000024080, 0xc0000240c0, 0x8f3bc0, 0x952088)
D:/Tech/go/src/go-text-classification/main.go:106 +0x870
main.main()
D:/Tech/go/src/go-text-classification/main.go:204 +0x2a5
I would appreciate your help with that in advance.