Skip to content

Commit

Permalink
Update some files
Browse files Browse the repository at this point in the history
  • Loading branch information
itsubaki committed Oct 15, 2023
1 parent b31fdd1 commit 775e25c
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 8 deletions.
21 changes: 15 additions & 6 deletions layer/layer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,31 @@ import (
)

func ExampleLayer() {
l := &L.Layer{
Forwarder: L.Linear(1),
Layers: make([]*L.Layer, 0),
}
l := &L.Layer{}

l.Add(L.Linear(1))
l.Add(L.Linear(2))
l.Add(L.Linear(3))

for _, v := range l.Params() {
fmt.Println(v)
}

l.Cleargrads()

// Unordered output:
// b([0])
// b([0 0])
// b([0 0 0])
}

func ExampleLayer_linear() {
l := &L.Layer{
Forwarder: L.Linear(1),
}

for _, v := range l.Params() {
fmt.Println(v)
}

// Output:
// b([0])
}
21 changes: 21 additions & 0 deletions layer/parameter_test.go
Original file line number Diff line number Diff line change
@@ -1 +1,22 @@
package layer_test

import (
"fmt"

"github.com/itsubaki/autograd/layer"
"github.com/itsubaki/autograd/variable"
)

func ExampleParameter() {
p := make(layer.Parameters)
p.Add("w", variable.New(1, 2))
p.Add("b", variable.New(3, 4))

for _, v := range p {
fmt.Println(v)
}

// Unordered output:
// w([1 2])
// b([3 4])
}
4 changes: 2 additions & 2 deletions model/mlp.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ import (
)

type MLP struct {
Model
Activation Activation
Model
}

type MLPOpts struct {
Expand All @@ -35,12 +35,12 @@ func NewMLP(outSize []int, opts ...MLPOpts) *MLP {
}

return &MLP{
Activation: activation,
Model: Model{
Layer: L.Layer{
Layers: layers,
},
},
Activation: activation,
}
}

Expand Down
12 changes: 12 additions & 0 deletions model/mlp_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,18 @@ import (
)

func ExampleMLP() {
mlp := model.NewMLP([]int{5, 1})

for _, l := range mlp.Layers {
fmt.Printf("%T\n", l.Forwarder)
}

// Output:
// *layer.LinearT
// *layer.LinearT
}

func ExampleMLP_opts() {
mlp := model.NewMLP([]int{5, 1}, model.MLPOpts{
Activation: F.ReLU,
Source: rand.NewSource(1),
Expand Down

0 comments on commit 775e25c

Please sign in to comment.