Skip to content

Commit

Permalink
Update some files
Browse files Browse the repository at this point in the history
  • Loading branch information
itsubaki committed Oct 17, 2023
1 parent db62f20 commit d76ab52
Show file tree
Hide file tree
Showing 11 changed files with 238 additions and 137 deletions.
44 changes: 14 additions & 30 deletions layer/layer.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,46 +2,30 @@ package layer

import "github.com/itsubaki/autograd/variable"

type Forwarder interface {
type Layer interface {
First(x ...*variable.Variable) *variable.Variable
Forward(x ...*variable.Variable) []*variable.Variable
Params() []Parameter
Cleargrads()
}

type Layer struct {
Layers []*Layer
Input, Output []*variable.Variable
Forwarder
}

func (l *Layer) First(x ...*variable.Variable) *variable.Variable {
return l.Forward(x...)[0]
}
type Layers map[string]Layer

func (l *Layer) Forward(x ...*variable.Variable) []*variable.Variable {
y := l.Forwarder.Forward(x...)
l.Input, l.Output = x, y
return y
}

func (l *Layer) Add(layer *Layer) {
l.Layers = append(l.Layers, layer)
}

func (l *Layer) Params() []Parameter {
func (l Layers) Params() []Parameter {
params := make([]Parameter, 0)
if l.Forwarder != nil {
params = l.Forwarder.Params()
}

for _, ll := range l.Layers {
params = append(params, ll.Params()...)
for k := range l {
params = append(params, l[k].Params()...)
}

return params
}

func (l *Layer) Cleargrads() {
for _, p := range l.Params() {
p.Cleargrad()
func (l Layers) Cleargrads() {
for k := range l {
l[k].Cleargrads()
}
}

func (l Layers) Add(name string, layer Layer) {
l[name] = layer
}
36 changes: 0 additions & 36 deletions layer/layer_test.go
Original file line number Diff line number Diff line change
@@ -1,37 +1 @@
package layer_test

import (
"fmt"

L "github.com/itsubaki/autograd/layer"
)

func ExampleLayer() {
l := &L.Layer{}

l.Add(L.Linear(1))
l.Add(L.Linear(2))
l.Add(L.Linear(3))

for _, v := range l.Params() {
fmt.Println(v)
}

// Unordered output:
// b([0])
// b([0 0])
// b([0 0 0])
}

func ExampleLayer_forwarder() {
l := &L.Layer{
Forwarder: L.Linear(1),
}

for _, v := range l.Params() {
fmt.Println(v)
}

// Output:
// b([0])
}
16 changes: 9 additions & 7 deletions layer/linear.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type LinearOpts struct {
Source rand.Source
}

func Linear(outSize int, opts ...LinearOpts) *Layer {
func Linear(outSize int, opts ...LinearOpts) *LinearT {
s := rand.NewSource(time.Now().UnixNano())
if len(opts) != 0 && opts[0].Source != nil {
s = opts[0].Source
Expand All @@ -26,12 +26,10 @@ func Linear(outSize int, opts ...LinearOpts) *Layer {
p.Add("b", variable.Zero(1, outSize))
}

return &Layer{
Forwarder: &LinearT{
outSize: outSize,
rand: rand.New(s),
Parameters: p,
},
return &LinearT{
outSize: outSize,
rand: rand.New(s),
Parameters: p,
}
}

Expand All @@ -41,6 +39,10 @@ type LinearT struct {
Parameters
}

func (l *LinearT) First(x ...*variable.Variable) *variable.Variable {
return l.Forward(x...)[0]
}

func (l *LinearT) Forward(x ...*variable.Variable) []*variable.Variable {
if _, ok := l.Parameters["w"]; !ok {
l.inSize = variable.Shape(x[0])[1]
Expand Down
17 changes: 0 additions & 17 deletions layer/linear_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,20 +67,3 @@ func ExampleLinear_backward() {
// b variable([2 2 2 2 2])
// w variable([[2 2 2 2 2] [4 4 4 4 4] [6 6 6 6 6]])
}

func ExampleLinear_cleargrads() {
l := L.Linear(5)

x := variable.New(1, 2, 3)
y := l.First(x)
y.Backward()

l.Cleargrads()
for _, v := range l.Params() {
fmt.Println(v.Name, v.Grad)
}

// Unordered output:
// b <nil>
// w <nil>
}
6 changes: 6 additions & 0 deletions layer/parameter.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ func (p Parameters) Params() []Parameter {
return params
}

func (p Parameters) Cleargrads() {
for k := range p {
p[k].Cleargrad()
}
}

func (p Parameters) Add(name string, param Parameter) {
param.Name = name
p[name] = param
Expand Down
16 changes: 16 additions & 0 deletions layer/parameter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,19 @@ func ExampleParameter() {
// w([1 2])
// b([3 4])
}

func ExampleParameters_Cleargrads() {
v := variable.New(1, 2)
v.Grad = variable.New(3, 4)

p := make(layer.Parameters)
p.Add("w", v)
p.Cleargrads()

for _, v := range p {
fmt.Println(v, v.Grad)
}

// Output:
// w([1 2]) <nil>
}
57 changes: 57 additions & 0 deletions layer/rnn.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
package layer

import (
"math/rand"
"time"

F "github.com/itsubaki/autograd/function"
"github.com/itsubaki/autograd/variable"
)

type RNNOpts struct {
Source rand.Source
}

func RNN(hiddenSize int, opts ...RNNOpts) *RNNT {
s := rand.NewSource(time.Now().UnixNano())
if len(opts) != 0 && opts[0].Source != nil {
s = opts[0].Source
}

l := make(Layers)
l.Add("x2h", Linear(hiddenSize, LinearOpts{Source: s}))
l.Add("h2h", Linear(hiddenSize, LinearOpts{Source: s, NoBias: true}))

return &RNNT{
hiddenSize: hiddenSize,
Layers: l,
}
}

type RNNT struct {
hiddenSize int
h *variable.Variable
Layers
}

func (l *RNNT) ResetState() {
l.h = nil
}

func (l *RNNT) First(x ...*variable.Variable) *variable.Variable {
return l.Forward(x...)[0]
}

func (l *RNNT) Forward(x ...*variable.Variable) []*variable.Variable {
if l.h == nil {
l.h = F.Tanh(l.Layers["x2h"].First(x...))
return []*variable.Variable{
l.h,
}
}

l.h = F.Tanh(F.Add(l.Layers["x2h"].First(x...), (l.Layers["h2h"].First(l.h))))
return []*variable.Variable{
l.h,
}
}
92 changes: 92 additions & 0 deletions layer/rnn_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package layer_test

import (
"fmt"
"math/rand"

L "github.com/itsubaki/autograd/layer"
"github.com/itsubaki/autograd/variable"
)

func ExampleRNN() {
l := L.RNN(5, L.RNNOpts{
Source: rand.NewSource(1),
})

x := variable.New(1)
y := l.Forward(x)
fmt.Printf("%.4f\n", y[0].Data)

for _, v := range l.Params() {
fmt.Println(v)
}

// Unordered output:
// [[-0.8437 -0.1257 -0.4785 0.9795 0.3120]]
// b([0 0 0 0 0])
// w([-1.233758177597947 -0.12634751070237293 -0.5209945711531503 2.28571911769958 0.3228052526115799])
}

func ExampleRNN_backward() {
l := L.RNN(3, L.RNNOpts{
Source: rand.NewSource(1),
})

x := variable.New(1)
y := l.First(x)
y.Backward()

for _, v := range l.Params() {
fmt.Println(v.Name, v.Grad)
}

y = l.First(x)
y.Backward()

for _, v := range l.Params() {
fmt.Println(v.Name, v.Grad)
}

// Unordered output:
// b variable([0.28822771944106673 0.984204675359483 0.7710690810213434])
// w variable([0.28822771944106673 0.984204675359483 0.7710690810213434])
// b variable([0.39396398439363184 1.6894261397851316 1.764807089541906])
// w variable([0.39396398439363184 1.6894261397851316 1.764807089541906])
// w variable([[-0.020396585815502664 -0.4758483540449156 -0.3614345140035484] [-0.003038443883017692 -0.07088630095596118 -0.053842270374037576] [-0.01156749092376784 -0.269867298688821 -0.204979916643439]])
}

func ExampleRNN_cleargrads() {
l := L.RNN(3, L.RNNOpts{
Source: rand.NewSource(1),
})

x := variable.New(1)
y := l.First(x)
y.Backward()

l.Cleargrads()
for _, v := range l.Params() {
fmt.Println(v.Name, v.Grad)
}

// Unordered output:
// b <nil>
// w <nil>
}

func ExampleRNNT_ResetState() {
l := L.RNN(3)

x := variable.New(1)
l.Forward(x) // set hidden state
l.ResetState() // reset hidden state
l.Forward(x) // h2h is not used

for _, v := range l.Params() {
fmt.Println(v.Name)
}

// Output:
// b
// w
}
11 changes: 2 additions & 9 deletions model/mlp.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package model
import (
"math/rand"

"github.com/itsubaki/autograd/dot"
F "github.com/itsubaki/autograd/function"
L "github.com/itsubaki/autograd/layer"
"github.com/itsubaki/autograd/variable"
Expand All @@ -30,7 +29,7 @@ func NewMLP(outSize []int, opts ...MLPOpts) *MLP {
s = opts[0].Source
}

layers := make([]*L.Layer, len(outSize))
layers := make([]Layer, len(outSize))
for i := 0; i < len(outSize); i++ {
layers[i] = L.Linear(outSize[i], L.LinearOpts{
Source: s,
Expand All @@ -40,9 +39,7 @@ func NewMLP(outSize []int, opts ...MLPOpts) *MLP {
return &MLP{
Activation: activation,
Model: Model{
Layer: L.Layer{
Layers: layers,
},
Layers: layers,
},
}
}
Expand All @@ -55,7 +52,3 @@ func (m *MLP) Forward(x *variable.Variable) *variable.Variable {

return m.Layers[last].First(x)
}

func (m *MLP) Graph(x *variable.Variable, opts ...dot.Opts) []string {
return m.graph(m.Forward(x), opts...)
}
Loading

0 comments on commit d76ab52

Please sign in to comment.