Skip to content

Latest commit

 

History

History
179 lines (140 loc) · 4.36 KB

README.md

File metadata and controls

179 lines (140 loc) · 4.36 KB

autograd

PkgGoDev Go Report Card tests codecov

  • Automatic differentiation library for Go
  • pure Go implementation
  • using only the standard library

Example

Backward

x := variable.New(1.0)
y := F.Sin(x)
y.Backward()

fmt.Println(y)
fmt.Println(x.Grad)

// Output:
// variable([0.8414709848078965])
// variable([0.5403023058681398])

Composite function

matyas := func(x, y *variable.Variable) *variable.Variable {
	// 0.26(x^2 + y^2) - 0.48xy
	z0 := F.MulC(0.26, F.Add(F.Pow(2.0)(x), F.Pow(2.0)(y)))
	z1 := F.MulC(0.48, F.Mul(x, y))
	return F.Sub(z0, z1)
}

x := variable.New(1.0)
y := variable.New(1.0)
z := matyas(x, y)
z.Backward()

fmt.Println(x.Grad, y.Grad)

// Output:
// variable([0.040000000000000036]) variable([0.040000000000000036])

Gradient descent

rosenbrock := func(x0, x1 *variable.Variable) *variable.Variable {
	// 100 * (x1 - x0^2)^2 + (x0 - 1)^2
	y0 := F.Pow(2.0)(F.Sub(x1, F.Pow(2.0)(x0)))
	y1 := F.Pow(2.0)(F.AddC(-1.0, x0))
	return F.Add(F.MulC(100, y0), y1)
}

update := func(lr float64, x ...*variable.Variable) {
	for _, v := range x {
		v.Data = matrix.F2(v.Data, v.Grad.Data, func(a, b float64) float64 {
			return a - lr*b
		})
	}
}

x0 := variable.New(0.0)
x1 := variable.New(2.0)

lr := 0.001
iters := 10000

for i := range iters + 1 {
	if i%1000 == 0 {
		fmt.Println(x0, x1)
	}

	x0.Cleargrad()
	x1.Cleargrad()
	y := rosenbrock(x0, x1)
	y.Backward()

	update(lr, x0, x1)
}

// Output:
// variable([0]) variable([2])
// variable([0.6837118569138317]) variable([0.4659526837427042])
// variable([0.8263177857050957]) variable([0.6820311873361097])
// variable([0.8947837494333546]) variable([0.8001896451930564])
// variable([0.9334871723401226]) variable([0.8711213202579401])
// variable([0.9569899983530249]) variable([0.9156532462021957])
// variable([0.9718168065095137]) variable([0.9443132014542008])
// variable([0.9813809710644894]) variable([0.9630332658658076])
// variable([0.9876355102559093]) variable([0.9753740541653942])
// variable([0.9917613994572028]) variable([0.9835575421346807])
// variable([0.9944984367782456]) variable([0.9890050527419593])

Deep Learning

dataset := NewCurve(N, noise, math.Sin)
dataloader := &DataLoader{
	BatchSize: batchSize,
	N:         dataset.N,
	Data:      dataset.Data,
	Label:     dataset.Label,
}

m := model.NewLSTM(hiddenSize, 1)
o := optimizer.SGD{
	LearningRate: 0.01,
}

for i := range epochs {
	m.ResetState()

	loss, count := variable.New(0), 0
	for x, t := dataloader.Seq2() {
		y := m.Forward(x)
		loss = F.Add(loss, F.MeanSquaredError(y, t))

		if count++; count%bpttLength == 0 || count == dataset.N {
			m.Cleargrads()
			loss.Backward()
			loss.UnchainBackward()
			o.Update(m)
		}
	}
}

Double backpropagation

x := variable.New(1.0)
y := F.Sin(x)
y.Backward(variable.Opts{CreateGraph: true})

fmt.Println(y)
fmt.Println(x.Grad)

for i := 0; i < 5; i++ {
	gx := x.Grad
	x.Cleargrad()
	gx.Backward(variable.Opts{CreateGraph: true})

	fmt.Println(x.Grad)
}

// Output:
// variable([0.8414709848078965])
// variable([0.5403023058681398])
// variable([-0.8414709848078965])
// variable([-0.5403023058681398])
// variable([0.8414709848078965])
// variable([0.5403023058681398])
// variable([-0.8414709848078965])

Dot graph

$ brew install graphviz
$ go run cmd/dot/main.go -func tanh -order 2 -verbose > sample.dot
$ dot sample.dot -T png -o sample.png

Links