From a470e31598a6e8258f26338ebce1d8d7de9afb79 Mon Sep 17 00:00:00 2001 From: Cam Hutchison Date: Thu, 18 Aug 2022 16:23:37 +1000 Subject: [PATCH] compiler: Add participle-based protobuf parser Add a protobuf parser using participle, based on the example in Alec Thomas's protobuf compiler. I have reworked the parser to be more how I think it should look and to bring it closer to the BNF(ish) in the protobuf spec. I have also removed comments from the grammar (they are ignored like whitespace) since I don't believe the best way to handle comments will be in the grammar (I am thinking of attaching comments to tokens in the lexer; we'll see). At the moment, the `protog` command just prints out the Go struct for the parsed form of the protobuf. This will grow into something more useful as the compiler evolves to be a proper compiler. We bump the required Go version to 1.18 as there is a small use of generics in the test code, but I also want to use generics wherever it makes sense. Link: https://github.com/alecthomas/participle Link: https://github.com/alecthomas/protobuf Link: https://developers.google.com/protocol-buffers/docs/reference/proto2-spec Link: https://developers.google.com/protocol-buffers/docs/reference/proto3-spec --- Makefile | 2 +- cmd/protog/main.go | 53 +++++ compiler/parser/lexer.go | 131 ++++++++++++ compiler/parser/lexer_test.go | 32 +++ compiler/parser/parser.go | 356 +++++++++++++++++++++++++++++++++ compiler/parser/parser_test.go | 177 ++++++++++++++++ go.mod | 14 +- go.sum | 7 +- 8 files changed, 769 insertions(+), 3 deletions(-) create mode 100644 cmd/protog/main.go create mode 100644 compiler/parser/lexer.go create mode 100644 compiler/parser/lexer_test.go create mode 100644 compiler/parser/parser.go create mode 100644 compiler/parser/parser_test.go diff --git a/Makefile b/Makefile index 3da68ea..8006dea 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ clean:: ## Remove generated files .PHONY: all ci clean # --- Build -------------------------------------------------------------------- -CMDS = ./cmd/pb +CMDS = ./cmd/pb ./cmd/protog build: | $(O) ## Build reflect binaries go build -o $(O) $(CMDS) diff --git a/cmd/protog/main.go b/cmd/protog/main.go new file mode 100644 index 0000000..fe51c23 --- /dev/null +++ b/cmd/protog/main.go @@ -0,0 +1,53 @@ +package main + +import ( + "fmt" + "os" + + "foxygo.at/protog/compiler/parser" + "github.com/alecthomas/kong" +) + +var ( + // version vars set by goreleaser + version = "tip" + commit = "HEAD" + date = "now" + + description = `protog compiles .proto files` + + cli struct { + ProtogConfig + Version kong.VersionFlag `help:"Show version"` + } +) + +type ProtogConfig struct { + Filename []string `arg:"" help:"filenames of .proto file to compile"` +} + +func main() { + kctx := kong.Parse(&cli, + kong.Description(description), + kong.Vars{"version": fmt.Sprintf("%s (%s on %s)", version, commit, date)}, + ) + if err := kctx.Run(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +func (cfg *ProtogConfig) Run() error { + for _, filename := range cfg.Filename { + f, err := os.Open(filename) + if err != nil { + return err + } + p, err := parser.Parse(filename, f) + if err != nil { + return err + } + fmt.Printf("%+v\n", p) + } + return nil +} diff --git a/compiler/parser/lexer.go b/compiler/parser/lexer.go new file mode 100644 index 0000000..5512102 --- /dev/null +++ b/compiler/parser/lexer.go @@ -0,0 +1,131 @@ +// This code is derived from https://github.com/alecthomas/protobuf + +package parser + +import ( + "fmt" + "strconv" + "strings" + "unicode" + + "github.com/alecthomas/participle/v2/lexer" +) + +var lex = lexer.MustSimple([]lexer.SimpleRule{ + {Name: "String", Pattern: `"(\\"|[^"])*"|'(\\'|[^'])*'`}, + {Name: "Ident", Pattern: `[a-zA-Z_]([a-zA-Z_0-9])*`}, + {Name: "Float", Pattern: `[-+]?(\d*\.\d+([eE][-+]?\d+)?|\d+[eE][-+]?\d+|inf)`}, + {Name: "Int", Pattern: `[-+]?(0[xX][0-9A-Fa-f]+)|([-+]?\d+)`}, + {Name: "Whitespace", Pattern: `[ \t\n\r\s]+`}, + {Name: "Comment", Pattern: `(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/)|(//(.*)[^\n]*\n)`}, + {Name: "Symbols", Pattern: `[/={}\[\]()<>.,;:]`}, +}) + +var escapeTable = map[rune]rune{ + '0': '\000', + 'a': '\x07', + 'b': '\x08', + 'e': '\x1B', + 'f': '\x0C', + 'n': '\x0A', + 'r': '\x0D', + 't': '\x09', + 'v': '\x0B', + '\\': '\x5C', + '\'': '\x27', + '"': '\x22', + '?': '\x3F', +} + +var stateBase = map[unquoteState]int{ + uqHex: 16, uqOctal: 8, +} + +type unquoteState int + +const ( + uqDefault unquoteState = iota + uqEscape + uqHex + uqOctal +) + +// C-style unquoting - supports octal \DDD and hex \xDDD +func unquote(token lexer.Token) (lexer.Token, error) { + kind := token.Value[0] // nolint: ifshort + token.Value = token.Value[1 : len(token.Value)-1] + // Single quoted, no escaping. + if kind == '\'' { + return token, nil + } + out := strings.Builder{} + state := uqDefault + // Digits being collected in hex/octal modes + var digits string + for _, rn := range token.Value { + switch state { + case uqEscape: + if rn == 'x' { // nolint: gocritic + state = uqHex + } else if unicode.Is(unicode.Digit, rn) { + state = uqOctal + digits = string(rn) + } else { + escaped, ok := escapeTable[rn] + if !ok { + return token, fmt.Errorf("%s: %q: unknown escape sequence \"\\%c\"", token.Pos, token.Value, rn) + } + out.WriteRune(escaped) + state = uqDefault + } + continue + case uqHex: + if unicode.Is(unicode.ASCII_Hex_Digit, rn) && len(digits) < 2 { + digits += string(rn) + continue + } + if err := flushDigits(digits, 16, &out); err != nil { + return token, fmt.Errorf("%s: %w", token.Pos, err) + } + state = uqDefault + digits = "" + case uqOctal: + if unicode.IsDigit(rn) && len(digits) < 3 { + digits += string(rn) + continue + } + if err := flushDigits(digits, 8, &out); err != nil { + return token, fmt.Errorf("%s: %w", token.Pos, err) + } + state = uqDefault + digits = "" + case uqDefault: + default: + panic(state) + } + if rn == '\\' { + state = uqEscape + } else { + out.WriteRune(rn) + } + } + if digits != "" { + if err := flushDigits(digits, stateBase[state], &out); err != nil { + return token, fmt.Errorf("%s: %w", token.Pos, err) + } + } + token.Value = out.String() + return token, nil +} + +func flushDigits(digits string, base int, out *strings.Builder) error { + n, err := strconv.ParseUint(digits, base, 32) + if err != nil { + return fmt.Errorf("invalid base %d numeric value %s", base, digits) + } + if n > 255 { + return fmt.Errorf("base %d value %s larger than 255", base, digits) + } + out.WriteByte(byte(n)) + return nil +} diff --git a/compiler/parser/lexer_test.go b/compiler/parser/lexer_test.go new file mode 100644 index 0000000..5ca5226 --- /dev/null +++ b/compiler/parser/lexer_test.go @@ -0,0 +1,32 @@ +// This code is derived from https://github.com/alecthomas/protobuf + +package parser + +import ( + "testing" + + "github.com/alecthomas/participle/v2/lexer" + "github.com/stretchr/testify/require" +) + +func TestUnquote(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {`"\n\027"`, "\n\027"}, + {`"\?"`, "\x3f"}, + {`'\n\027'`, `\n\027`}, + {`"\n\x17"`, "\n\027"}, + {`"hello\0world"`, "hello\000world"}, + {`"hello\x0world"`, "hello\000world"}, + {`"\0001"`, "\0001"}, + {`"\x001"`, "\x001"}, + {`"\341\210\264"`, "ሴ"}, + } + for _, test := range tests { + actual, err := unquote(lexer.Token{Value: test.input}) + require.NoError(t, err) + require.Equal(t, test.expected, actual.Value) + } +} diff --git a/compiler/parser/parser.go b/compiler/parser/parser.go new file mode 100644 index 0000000..1935d1f --- /dev/null +++ b/compiler/parser/parser.go @@ -0,0 +1,356 @@ +// This code is derived from https://github.com/alecthomas/protobuf + +// Package parser contains a protobuf parser. +// nolint: govet, golint +package parser + +import ( + "io" + "math/big" + "strings" + + "github.com/alecthomas/participle/v2" + "github.com/alecthomas/participle/v2/lexer" +) + +var parser = participle.MustBuild( + &Proto{}, + participle.UseLookahead(2), + participle.Map(unquote, "String"), + participle.Lexer(lex), + participle.Elide("Whitespace", "Comment"), +) + +// Parse protobuf. +func Parse(filename string, r io.Reader) (*Proto, error) { + p := &Proto{} + return p, parser.Parse(filename, r, p) +} + +func ParseString(filename string, source string) (*Proto, error) { + p := &Proto{} + return p, parser.ParseString(filename, source, p) +} + +type Proto struct { + Pos lexer.Position + + Syntax string `( "syntax" "=" @String ";" )?` + Package []Package `( @@` // there can be only 1, but it can be anywhere in the file + Imports []Import `| @@` + Options []Option `| "option" @@ ";"` + Entries []Entry `| @@` + Empty Empty `| ";" )*` +} + +type Package struct { + Pos lexer.Position + + Name FullIdent `"package" @@ ";"` +} + +type Import struct { + Pos lexer.Position + + Public bool `"import" ( @"public" )?` + Name string `@String ";"` +} + +type Entry struct { + Pos lexer.Position + + Message *Message `( @@` + Enum *Enum `| @@` + Extend *Extend `| @@` + Service *Service `| @@ )` +} + +type Message struct { + Pos lexer.Position + + Name string `"message" @Ident` + Entries []MessageEntry `"{" @@* "}"` +} + +type MessageEntry struct { + Pos lexer.Position + + Enum *Enum `( @@` + Message *Message `| @@` + Oneof *OneOf `| @@` + Extend *Extend `| @@` + Reserved *Reserved `| @@` + Extensions *Extensions `| @@` + Option *Option `| "option" @@ ";"` + Field *Field `| @@` // must be after Option, or option parses as Field + Empty Empty `| ";" )` +} + +type Field struct { + Pos lexer.Position + + Optional bool `( @"optional"` + Required bool `| @"required"` + Repeated bool `| @"repeated" )?` + + Group *Group `( @@` + Direct *Direct `| @@ ";" )` +} + +type Direct struct { + Pos lexer.Position + + Type *Type `@@` + Name string `@Ident` + Tag int `"=" @Int` + + Options []*Option `( "[" @@ ( "," @@ )* "]" )?` +} + +type Group struct { + Pos lexer.Position + + Name string `"group" @Ident` + Tag int `"=" @Int` + Options []Option `( "[" @@ ( "," @@ )* "]" )?` + Entries []MessageEntry `"{" @@* "}"` +} + +type OneOf struct { + Pos lexer.Position + + Name string `"oneof" @Ident` + Entries []OneOfEntry `"{" @@* "}"` +} + +type OneOfEntry struct { + Pos lexer.Position + + Field *Field `( @@` + Option *Option `| "option" @@ ";"` + Empty Empty `| ";" )` +} + +type Extend struct { + Pos lexer.Position + + Reference FQIdent `"extend" @@` + Fields []Field `"{" @@* "}"` +} + +type Reserved struct { + Pos lexer.Position + + Ranges []Range `"reserved" ( @@ ( "," @@ )*` + FieldNames []string ` | @String ( "," @String )* ) ";"` +} + +type Extensions struct { + Pos lexer.Position + + Extensions []Range `"extensions" @@ ( "," @@ )*` + Options []Option `( "[" @@ ( "," @@ )* "]" )?` +} + +type Range struct { + Start int `@Int` + End *int ` ( "to" ( @Int` + Max bool ` | @"max" ) )?` +} + +type Option struct { + Pos lexer.Position + + Name []OptionName `@@ ( "." @@ )*` + Value *Value `"=" @@` +} + +type OptionName struct { + Pos lexer.Position + + Name string `@Ident` + Extension *FQIdent `| "(" @@ ")"` +} + +type Enum struct { + Pos lexer.Position + + Name string `"enum" @Ident` + Values []EnumEntry `"{" @@* "}"` +} + +type EnumEntry struct { + Pos lexer.Position + + Value *EnumValue `( @@` + Reserved *Reserved `| @@` + Option *Option `| "option" @@ ";"` + Empty Empty `| ";" )` +} + +type EnumValue struct { + Pos lexer.Position + + Key string `@Ident` + Value int `"=" @( ( "-" )? Int )` + + Options []Option `( "[" @@ ( "," @@ )* "]" )? ";"` +} + +type Service struct { + Pos lexer.Position + + Name string `"service" @Ident` + Entries []ServiceEntry `"{" @@* "}"` +} + +type ServiceEntry struct { + Pos lexer.Position + + Method *Method `( @@` + Option *Option `| "option" @@ ";"` + Empty Empty `| ";" )` +} + +type Method struct { + Pos lexer.Position + + Name string `"rpc" @Ident` + StreamingRequest bool `"(" ( @"stream" )?` + Request *Type ` @@ ")"` + StreamingResponse bool `"returns" "(" ( @"stream" )?` + Response *Type ` @@ ")"` + Options []Option `( ( "{" ( "option" @@ ";"+ )* "}" ) | ";")` +} + +type FullIdent struct { + Pos lexer.Position + + Parts []string `@Ident ( "." @Ident )*` +} + +func (fi *FullIdent) String() string { return strings.Join(fi.Parts, ".") } + +type FQIdent struct { + Pos lexer.Position + + FullyQualified bool `( @"." )?` + Parts []string `@Ident ( "." @Ident )*` +} + +func NewFQIdentFromString(ident string) *FQIdent { + parts := strings.Split(ident, ".") + if len(parts) == 0 { + return nil + } + if parts[0] == "" { + return &FQIdent{FullyQualified: true, Parts: parts[1:]} + } + return &FQIdent{Parts: parts} +} + +func (fqi *FQIdent) String() (result string) { + if fqi.FullyQualified { + result = "." + } + result += strings.Join(fqi.Parts, ".") + return result +} + +type Type struct { + Pos lexer.Position + + Scalar Scalar ` @@` + Map *MapType `| @@` + Reference *FQIdent `| @@` +} + +type MapType struct { + Pos lexer.Position + + Key Scalar `"map" "<" @@` + Value *Type `"," @@ ">"` +} + +type Value struct { + Pos lexer.Position + + String *string ` @String+` + Number *big.Float `| ("-" | "+")? (@Float | @Int)` + Bool *Boolean `| @("true"|"false")` + ProtoText *ProtoText `| "{" @@ "}"` + Array *Array `| @@` + Reference *FQIdent `| @@` +} + +type Boolean bool + +func (b *Boolean) Capture(v []string) error { *b = v[0] == "true"; return nil } + +type ProtoText struct { + Pos lexer.Position + + Fields []ProtoTextField `( @@ ( "," | ";" )? )*` +} + +type ProtoTextField struct { + Pos lexer.Position + + Name string `( @Ident` + Type string `| "[" @("."? Ident ( ("." | "/") Ident )* ) "]" )` + Value *Value `( ":"? @@ )` +} + +type Array struct { + Pos lexer.Position + + Elements []*Value `"[" ( @@ ( ","? @@ )* )? "]"` +} + +type Empty struct{} + +type Scalar int + +const ( + None Scalar = iota + Double + Float + Int32 + Int64 + Uint32 + Uint64 + Sint32 + Sint64 + Fixed32 + Fixed64 + SFixed32 + SFixed64 + Bool + String + Bytes +) + +var scalarToString = map[Scalar]string{ + None: "None", Double: "Double", Float: "Float", Int32: "Int32", Int64: "Int64", Uint32: "Uint32", + Uint64: "Uint64", Sint32: "Sint32", Sint64: "Sint64", Fixed32: "Fixed32", Fixed64: "Fixed64", + SFixed32: "SFixed32", SFixed64: "SFixed64", Bool: "Bool", String: "String", Bytes: "Bytes", +} + +func (s Scalar) GoString() string { return scalarToString[s] } + +var stringToScalar = map[string]Scalar{ + "double": Double, "float": Float, "int32": Int32, "int64": Int64, "uint32": Uint32, "uint64": Uint64, + "sint32": Sint32, "sint64": Sint64, "fixed32": Fixed32, "fixed64": Fixed64, "sfixed32": SFixed32, + "sfixed64": SFixed64, "bool": Bool, "string": String, "bytes": Bytes, +} + +func (s *Scalar) Parse(lex *lexer.PeekingLexer) error { + token := lex.Peek() + scalar, ok := stringToScalar[token.Value] + if !ok { + return participle.NextMatch + } + lex.Next() + *s = scalar + return nil +} diff --git a/compiler/parser/parser_test.go b/compiler/parser/parser_test.go new file mode 100644 index 0000000..13818e4 --- /dev/null +++ b/compiler/parser/parser_test.go @@ -0,0 +1,177 @@ +// This code is derived from https://github.com/alecthomas/protobuf + +package parser + +import ( + "math/big" + "os" + "path/filepath" + "reflect" + "strconv" + "testing" + + "github.com/alecthomas/participle/v2/lexer" + "github.com/alecthomas/repr" + "github.com/stretchr/testify/require" +) + +func TestParserFromFixtures(t *testing.T) { + files, err := filepath.Glob("../testdata/*.proto") + require.NoError(t, err) + for _, file := range files { + t.Run(file, func(t *testing.T) { + r, err := os.Open(file) + require.NoError(t, err) + _, err = Parse(file, r) + require.NoError(t, err) + }) + } +} + +func TestParser(t *testing.T) { + tests := []struct { + name string + input string + expected *Proto + }{{ + name: "MessageOptions", + input: ` + message VariousComplexOptions { + option (complex_opt2).bar.(protobuf_unittest.corge).qux = 2008; + option (complex_opt2).(protobuf_unittest.garply).(corge).qux = 2121; + option (.ComplexOptionType2.ComplexOptionType4.complex_opt4).waldo = 1971; + option (complex_opt2).foo.bar.(baz).qux = 1980; + option (strings) = "1" "2"; + option deprecated = true; + option deprecated = false; + } + `, + expected: &Proto{ + Entries: []Entry{ + {Message: &Message{ + Name: "VariousComplexOptions", + Entries: []MessageEntry{ + {Option: &Option{ + Name: []OptionName{ + {Extension: NewFQIdentFromString("complex_opt2")}, + {Name: "bar"}, + {Extension: NewFQIdentFromString("protobuf_unittest.corge")}, + {Name: "qux"}, + }, + Value: &Value{Number: toBig(2008)}, + }}, + {Option: &Option{ + Name: []OptionName{ + {Extension: NewFQIdentFromString("complex_opt2")}, + {Extension: NewFQIdentFromString("protobuf_unittest.garply")}, + {Extension: NewFQIdentFromString("corge")}, + {Name: "qux"}, + }, + Value: &Value{Number: toBig(2121)}, + }}, + {Option: &Option{ + Name: []OptionName{ + {Extension: NewFQIdentFromString(".ComplexOptionType2.ComplexOptionType4.complex_opt4")}, + {Name: "waldo"}, + }, + Value: &Value{Number: toBig(1971)}, + }}, + {Option: &Option{ + Name: []OptionName{ + {Extension: NewFQIdentFromString("complex_opt2")}, + {Name: "foo"}, + {Name: "bar"}, + {Extension: NewFQIdentFromString("baz")}, + {Name: "qux"}, + }, + Value: &Value{Number: toBig(1980)}, + }}, + {Option: &Option{ + Name: []OptionName{{Extension: NewFQIdentFromString("strings")}}, + Value: &Value{String: ptr[string]("12")}, + }}, + {Option: &Option{ + Name: []OptionName{{Name: "deprecated"}}, + Value: &Value{Bool: ptr[Boolean](true)}, + }}, + {Option: &Option{ + Name: []OptionName{{Name: "deprecated"}}, + Value: &Value{Bool: ptr[Boolean](false)}, + }}, + }, + }}, + }, + }}, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual, err := ParseString(test.name, test.input) + require.NoError(t, err) + clearPos(actual) + expectedStr := repr.String(test.expected, repr.Indent(" ")) + actualStr := repr.String(actual, repr.Indent(" ")) + require.Equal(t, expectedStr, actualStr, actualStr) + }) + } +} + +func TestImports(t *testing.T) { + tests := []struct { + name string + source string + want []Import + }{{ + name: "parses a single import correctly", + source: `import 'foo/bar/test.proto';`, + want: []Import{{Name: "foo/bar/test.proto", Public: false}}, + }, { + name: "parses public imports correctly", + source: `import public "foo/bar/test.proto";`, + want: []Import{{Name: "foo/bar/test.proto", Public: true}}, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ParseString("test.proto", tt.source) + require.NoError(t, err) + clearPos(got) + require.Equal(t, tt.want, got.Imports) + }) + } +} + +var zeroPos = reflect.ValueOf(lexer.Position{}) + +func clearPos(node any) { + clearPosFromValue(reflect.ValueOf(node)) +} + +func clearPosFromValue(node reflect.Value) { + node = reflect.Indirect(node) + switch node.Kind() { + case reflect.Array, reflect.Slice: + for i := 0; i < node.Len(); i++ { + clearPosFromValue(node.Index(i)) + } + case reflect.Struct: + for i := 0; i < node.NumField(); i++ { + f := node.Field(i) + if node.Type().Field(i).Name == "Pos" { + f.Set(zeroPos) + continue + } + if f.CanInterface() { + clearPosFromValue(f) + } + } + } +} + +func toBig(n int) *big.Float { + f, _, _ := big.ParseFloat(strconv.Itoa(n), 10, 64, 0) + return f +} + +func ptr[T any](v T) *T { + vv := T(v) + return &vv +} diff --git a/go.mod b/go.mod index 999e7b2..b72ed8f 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,24 @@ module foxygo.at/protog -go 1.16 +go 1.18 require ( github.com/alecthomas/kong v0.4.1 + github.com/alecthomas/participle/v2 v2.0.0-alpha9 + github.com/alecthomas/repr v0.1.0 github.com/stretchr/testify v1.7.0 golang.org/x/sys v0.0.0-20210510120138-977fb7262007 google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67 google.golang.org/grpc v1.39.1 google.golang.org/protobuf v1.27.1 ) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 // indirect + golang.org/x/text v0.3.5 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) diff --git a/go.sum b/go.sum index 0b1b920..aee82f2 100644 --- a/go.sum +++ b/go.sum @@ -3,8 +3,12 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/alecthomas/kong v0.4.1 h1:0sFnMts+ijOiFuSHsMB9MlDi3NGINBkx9KIw1/gcuDw= github.com/alecthomas/kong v0.4.1/go.mod h1:uzxf/HUh0tj43x1AyJROl3JT7SgsZ5m+icOv1csRhc0= -github.com/alecthomas/repr v0.0.0-20210801044451-80ca428c5142 h1:8Uy0oSf5co/NZXje7U1z8Mpep++QJOldL2hs/sBQf48= +github.com/alecthomas/participle/v2 v2.0.0-alpha9 h1:TnflwDbtf5/aG6JMbmdiA+YB3bLg0sc6yRtmAfedfN4= +github.com/alecthomas/participle/v2 v2.0.0-alpha9/go.mod h1:NumScqsC42o9x+dGj8/YqsIfhrIQjFEOFovxotbBirA= +github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/repr v0.0.0-20210801044451-80ca428c5142/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8= +github.com/alecthomas/repr v0.1.0 h1:ENn2e1+J3k09gyj2shc0dHr/yjaWSHRlrJ4DPMevDqE= +github.com/alecthomas/repr v0.1.0/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= @@ -53,6 +57,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=