From 4e3bb7e95fb72401119f4d095177fa6420e0e6e7 Mon Sep 17 00:00:00 2001 From: Nalin Dahyabhai Date: Thu, 6 May 2021 11:41:43 -0400 Subject: [PATCH 1/2] Remove dependencies on ffjson Signed-off-by: Nalin Dahyabhai --- Makefile | 22 +- containers_ffjson.go | 1413 ---------- ffjson_deps.go | 10 - go.mod | 1 - go.sum | 2 - images_ffjson.go | 1418 ---------- layers_ffjson.go | 2267 --------------- pkg/archive/archive_ffjson.go | 2465 ----------------- tests/tools/go.mod | 1 - tests/tools/go.sum | 2 - tests/tools/tools.go | 1 - .../github.com/pquerna/ffjson/.gitignore | 5 - .../github.com/pquerna/ffjson/.travis.yml | 14 - .../vendor/github.com/pquerna/ffjson/LICENSE | 202 -- .../vendor/github.com/pquerna/ffjson/Makefile | 49 - .../vendor/github.com/pquerna/ffjson/NOTICE | 8 - .../github.com/pquerna/ffjson/README.md | 232 -- .../github.com/pquerna/ffjson/ffjson.go | 85 - .../pquerna/ffjson/fflib/v1/buffer.go | 421 --- .../pquerna/ffjson/fflib/v1/buffer_nopool.go | 11 - .../pquerna/ffjson/fflib/v1/buffer_pool.go | 105 - .../pquerna/ffjson/fflib/v1/bytenum.go | 88 - .../pquerna/ffjson/fflib/v1/decimal.go | 378 --- .../pquerna/ffjson/fflib/v1/extfloat.go | 668 ----- .../pquerna/ffjson/fflib/v1/fold.go | 121 - .../pquerna/ffjson/fflib/v1/ftoa.go | 542 ---- .../pquerna/ffjson/fflib/v1/internal/atof.go | 936 ------- .../pquerna/ffjson/fflib/v1/internal/atoi.go | 213 -- .../ffjson/fflib/v1/internal/extfloat.go | 668 ----- .../pquerna/ffjson/fflib/v1/internal/ftoa.go | 475 ---- .../pquerna/ffjson/fflib/v1/iota.go | 161 -- .../pquerna/ffjson/fflib/v1/jsonstring.go | 512 ---- .../pquerna/ffjson/fflib/v1/lexer.go | 937 ------- .../pquerna/ffjson/fflib/v1/reader.go | 512 ---- .../ffjson/fflib/v1/reader_scan_generic.go | 34 - .../pquerna/ffjson/generator/generator.go | 59 - .../pquerna/ffjson/generator/inceptionmain.go | 251 -- .../pquerna/ffjson/generator/parser.go | 142 - .../pquerna/ffjson/generator/tags.go | 59 - .../pquerna/ffjson/generator/tempfile.go | 65 - .../pquerna/ffjson/inception/decoder.go | 323 --- .../pquerna/ffjson/inception/decoder_tpl.go | 773 ------ .../pquerna/ffjson/inception/encoder.go | 544 ---- .../pquerna/ffjson/inception/encoder_tpl.go | 73 - .../pquerna/ffjson/inception/inception.go | 160 -- .../pquerna/ffjson/inception/reflect.go | 290 -- .../pquerna/ffjson/inception/tags.go | 79 - .../pquerna/ffjson/inception/template.go | 60 - .../pquerna/ffjson/inception/writerstack.go | 65 - .../pquerna/ffjson/shared/options.go | 51 - tests/tools/vendor/modules.txt | 7 - vendor/github.com/pquerna/ffjson/LICENSE | 202 -- vendor/github.com/pquerna/ffjson/NOTICE | 8 - .../pquerna/ffjson/fflib/v1/buffer.go | 421 --- .../pquerna/ffjson/fflib/v1/buffer_nopool.go | 11 - .../pquerna/ffjson/fflib/v1/buffer_pool.go | 105 - .../pquerna/ffjson/fflib/v1/bytenum.go | 88 - .../pquerna/ffjson/fflib/v1/decimal.go | 378 --- .../pquerna/ffjson/fflib/v1/extfloat.go | 668 ----- .../pquerna/ffjson/fflib/v1/fold.go | 121 - .../pquerna/ffjson/fflib/v1/ftoa.go | 542 ---- .../pquerna/ffjson/fflib/v1/internal/atof.go | 936 ------- .../pquerna/ffjson/fflib/v1/internal/atoi.go | 213 -- .../ffjson/fflib/v1/internal/extfloat.go | 668 ----- .../pquerna/ffjson/fflib/v1/internal/ftoa.go | 475 ---- .../pquerna/ffjson/fflib/v1/iota.go | 161 -- .../pquerna/ffjson/fflib/v1/jsonstring.go | 512 ---- .../pquerna/ffjson/fflib/v1/lexer.go | 937 ------- .../pquerna/ffjson/fflib/v1/reader.go | 512 ---- .../ffjson/fflib/v1/reader_scan_generic.go | 34 - .../pquerna/ffjson/inception/decoder.go | 323 --- .../pquerna/ffjson/inception/decoder_tpl.go | 773 ------ .../pquerna/ffjson/inception/encoder.go | 544 ---- .../pquerna/ffjson/inception/encoder_tpl.go | 73 - .../pquerna/ffjson/inception/inception.go | 160 -- .../pquerna/ffjson/inception/reflect.go | 290 -- .../pquerna/ffjson/inception/tags.go | 79 - .../pquerna/ffjson/inception/template.go | 60 - .../pquerna/ffjson/inception/writerstack.go | 65 - .../pquerna/ffjson/shared/options.go | 51 - vendor/modules.txt | 6 - 81 files changed, 2 insertions(+), 27394 deletions(-) delete mode 100644 containers_ffjson.go delete mode 100644 ffjson_deps.go delete mode 100644 images_ffjson.go delete mode 100644 layers_ffjson.go delete mode 100644 pkg/archive/archive_ffjson.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/.gitignore delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/.travis.yml delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/LICENSE delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/Makefile delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/NOTICE delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/README.md delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/ffjson.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/generator/generator.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/generator/inceptionmain.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/generator/parser.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/generator/tags.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/generator/tempfile.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/inception.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/reflect.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/tags.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/template.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/inception/writerstack.go delete mode 100644 tests/tools/vendor/github.com/pquerna/ffjson/shared/options.go delete mode 100644 vendor/github.com/pquerna/ffjson/LICENSE delete mode 100644 vendor/github.com/pquerna/ffjson/NOTICE delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/fold.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/iota.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/reader.go delete mode 100644 vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/decoder.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/encoder.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/inception.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/reflect.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/tags.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/template.go delete mode 100644 vendor/github.com/pquerna/ffjson/inception/writerstack.go delete mode 100644 vendor/github.com/pquerna/ffjson/shared/options.go diff --git a/Makefile b/Makefile index 31fc7dff06..581961fed0 100644 --- a/Makefile +++ b/Makefile @@ -41,34 +41,16 @@ ifeq ($(shell $(GO) help mod >/dev/null 2>&1 && echo true), true) endif RUNINVM := vagrant/runinvm.sh -FFJSON := tests/tools/build/ffjson default all: local-binary docs local-validate local-cross local-gccgo test-unit test-integration ## validate all checks, build and cross-build\nbinaries and docs, run tests in a VM clean: ## remove all built files $(RM) -f containers-storage containers-storage.* docs/*.1 docs/*.5 -sources := $(wildcard *.go cmd/containers-storage/*.go drivers/*.go drivers/*/*.go pkg/*/*.go pkg/*/*/*.go) layers_ffjson.go images_ffjson.go containers_ffjson.go pkg/archive/archive_ffjson.go - +sources := $(wildcard *.go cmd/containers-storage/*.go drivers/*.go drivers/*/*.go pkg/*/*.go pkg/*/*/*.go) containers-storage: $(sources) ## build using gc on the host $(GO) build $(MOD_VENDOR) -compiler gc $(BUILDFLAGS) ./cmd/containers-storage -layers_ffjson.go: $(FFJSON) layers.go - $(RM) $@ - $(FFJSON) layers.go - -images_ffjson.go: $(FFJSON) images.go - $(RM) $@ - $(FFJSON) images.go - -containers_ffjson.go: $(FFJSON) containers.go - $(RM) $@ - $(FFJSON) containers.go - -pkg/archive/archive_ffjson.go: $(FFJSON) pkg/archive/archive.go - $(RM) $@ - $(FFJSON) pkg/archive/archive.go - binary local-binary: containers-storage local-gccgo: ## build using gccgo on the host @@ -118,7 +100,7 @@ install.tools: make -C tests/tools $(FFJSON): - make -C tests/tools build/ffjson + make -C tests/tools install.docs: docs make -C docs install diff --git a/containers_ffjson.go b/containers_ffjson.go deleted file mode 100644 index aef6becfe0..0000000000 --- a/containers_ffjson.go +++ /dev/null @@ -1,1413 +0,0 @@ -// Code generated by ffjson . DO NOT EDIT. -// source: containers.go - -package storage - -import ( - "bytes" - "encoding/json" - "fmt" - "github.com/containers/storage/pkg/idtools" - "github.com/opencontainers/go-digest" - fflib "github.com/pquerna/ffjson/fflib/v1" -) - -// MarshalJSON marshal bytes to json - template -func (j *Container) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *Container) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{ "id":`) - fflib.WriteJsonString(buf, string(j.ID)) - buf.WriteByte(',') - if len(j.Names) != 0 { - buf.WriteString(`"names":`) - if j.Names != nil { - buf.WriteString(`[`) - for i, v := range j.Names { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - buf.WriteString(`"image":`) - fflib.WriteJsonString(buf, string(j.ImageID)) - buf.WriteString(`,"layer":`) - fflib.WriteJsonString(buf, string(j.LayerID)) - buf.WriteByte(',') - if len(j.Metadata) != 0 { - buf.WriteString(`"metadata":`) - fflib.WriteJsonString(buf, string(j.Metadata)) - buf.WriteByte(',') - } - if len(j.BigDataNames) != 0 { - buf.WriteString(`"big-data-names":`) - if j.BigDataNames != nil { - buf.WriteString(`[`) - for i, v := range j.BigDataNames { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.BigDataSizes) != 0 { - if j.BigDataSizes == nil { - buf.WriteString(`"big-data-sizes":null`) - } else { - buf.WriteString(`"big-data-sizes":{ `) - for key, value := range j.BigDataSizes { - fflib.WriteJsonString(buf, key) - buf.WriteString(`:`) - fflib.FormatBits2(buf, uint64(value), 10, value < 0) - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - } - buf.WriteByte(',') - } - if len(j.BigDataDigests) != 0 { - if j.BigDataDigests == nil { - buf.WriteString(`"big-data-digests":null`) - } else { - buf.WriteString(`"big-data-digests":{ `) - for key, value := range j.BigDataDigests { - fflib.WriteJsonString(buf, key) - buf.WriteString(`:`) - fflib.WriteJsonString(buf, string(value)) - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - } - buf.WriteByte(',') - } - if true { - buf.WriteString(`"created":`) - - { - - obj, err = j.Created.MarshalJSON() - if err != nil { - return err - } - buf.Write(obj) - - } - buf.WriteByte(',') - } - if len(j.UIDMap) != 0 { - buf.WriteString(`"uidmap":`) - if j.UIDMap != nil { - buf.WriteString(`[`) - for i, v := range j.UIDMap { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.GIDMap) != 0 { - buf.WriteString(`"gidmap":`) - if j.GIDMap != nil { - buf.WriteString(`[`) - for i, v := range j.GIDMap { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.Flags) != 0 { - buf.WriteString(`"flags":`) - /* Falling back. type=map[string]interface {} kind=map */ - err = buf.Encode(j.Flags) - if err != nil { - return err - } - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - return nil -} - -const ( - ffjtContainerbase = iota - ffjtContainernosuchkey - - ffjtContainerID - - ffjtContainerNames - - ffjtContainerImageID - - ffjtContainerLayerID - - ffjtContainerMetadata - - ffjtContainerBigDataNames - - ffjtContainerBigDataSizes - - ffjtContainerBigDataDigests - - ffjtContainerCreated - - ffjtContainerUIDMap - - ffjtContainerGIDMap - - ffjtContainerFlags -) - -var ffjKeyContainerID = []byte("id") - -var ffjKeyContainerNames = []byte("names") - -var ffjKeyContainerImageID = []byte("image") - -var ffjKeyContainerLayerID = []byte("layer") - -var ffjKeyContainerMetadata = []byte("metadata") - -var ffjKeyContainerBigDataNames = []byte("big-data-names") - -var ffjKeyContainerBigDataSizes = []byte("big-data-sizes") - -var ffjKeyContainerBigDataDigests = []byte("big-data-digests") - -var ffjKeyContainerCreated = []byte("created") - -var ffjKeyContainerUIDMap = []byte("uidmap") - -var ffjKeyContainerGIDMap = []byte("gidmap") - -var ffjKeyContainerFlags = []byte("flags") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *Container) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *Container) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtContainerbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtContainernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'b': - - if bytes.Equal(ffjKeyContainerBigDataNames, kn) { - currentKey = ffjtContainerBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyContainerBigDataSizes, kn) { - currentKey = ffjtContainerBigDataSizes - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyContainerBigDataDigests, kn) { - currentKey = ffjtContainerBigDataDigests - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'c': - - if bytes.Equal(ffjKeyContainerCreated, kn) { - currentKey = ffjtContainerCreated - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'f': - - if bytes.Equal(ffjKeyContainerFlags, kn) { - currentKey = ffjtContainerFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'g': - - if bytes.Equal(ffjKeyContainerGIDMap, kn) { - currentKey = ffjtContainerGIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'i': - - if bytes.Equal(ffjKeyContainerID, kn) { - currentKey = ffjtContainerID - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyContainerImageID, kn) { - currentKey = ffjtContainerImageID - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'l': - - if bytes.Equal(ffjKeyContainerLayerID, kn) { - currentKey = ffjtContainerLayerID - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'm': - - if bytes.Equal(ffjKeyContainerMetadata, kn) { - currentKey = ffjtContainerMetadata - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'n': - - if bytes.Equal(ffjKeyContainerNames, kn) { - currentKey = ffjtContainerNames - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'u': - - if bytes.Equal(ffjKeyContainerUIDMap, kn) { - currentKey = ffjtContainerUIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyContainerFlags, kn) { - currentKey = ffjtContainerFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerGIDMap, kn) { - currentKey = ffjtContainerGIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerUIDMap, kn) { - currentKey = ffjtContainerUIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerCreated, kn) { - currentKey = ffjtContainerCreated - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyContainerBigDataDigests, kn) { - currentKey = ffjtContainerBigDataDigests - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyContainerBigDataSizes, kn) { - currentKey = ffjtContainerBigDataSizes - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyContainerBigDataNames, kn) { - currentKey = ffjtContainerBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerMetadata, kn) { - currentKey = ffjtContainerMetadata - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerLayerID, kn) { - currentKey = ffjtContainerLayerID - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerImageID, kn) { - currentKey = ffjtContainerImageID - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyContainerNames, kn) { - currentKey = ffjtContainerNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyContainerID, kn) { - currentKey = ffjtContainerID - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtContainernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtContainerID: - goto handle_ID - - case ffjtContainerNames: - goto handle_Names - - case ffjtContainerImageID: - goto handle_ImageID - - case ffjtContainerLayerID: - goto handle_LayerID - - case ffjtContainerMetadata: - goto handle_Metadata - - case ffjtContainerBigDataNames: - goto handle_BigDataNames - - case ffjtContainerBigDataSizes: - goto handle_BigDataSizes - - case ffjtContainerBigDataDigests: - goto handle_BigDataDigests - - case ffjtContainerCreated: - goto handle_Created - - case ffjtContainerUIDMap: - goto handle_UIDMap - - case ffjtContainerGIDMap: - goto handle_GIDMap - - case ffjtContainerFlags: - goto handle_Flags - - case ffjtContainernosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_ID: - - /* handler: j.ID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.ID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Names: - - /* handler: j.Names type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Names = nil - } else { - - j.Names = []string{} - - wantVal := true - - for { - - var tmpJNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJNames = string(string(outBuf)) - - } - } - - j.Names = append(j.Names, tmpJNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ImageID: - - /* handler: j.ImageID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.ImageID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_LayerID: - - /* handler: j.LayerID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.LayerID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Metadata: - - /* handler: j.Metadata type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.Metadata = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataNames: - - /* handler: j.BigDataNames type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataNames = nil - } else { - - j.BigDataNames = []string{} - - wantVal := true - - for { - - var tmpJBigDataNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJBigDataNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJBigDataNames = string(string(outBuf)) - - } - } - - j.BigDataNames = append(j.BigDataNames, tmpJBigDataNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataSizes: - - /* handler: j.BigDataSizes type=map[string]int64 kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataSizes = nil - } else { - - j.BigDataSizes = make(map[string]int64, 0) - - wantVal := true - - for { - - var k string - - var tmpJBigDataSizes int64 - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJBigDataSizes type=int64 kind=int64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - tmpJBigDataSizes = int64(tval) - - } - } - - j.BigDataSizes[k] = tmpJBigDataSizes - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataDigests: - - /* handler: j.BigDataDigests type=map[string]digest.Digest kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataDigests = nil - } else { - - j.BigDataDigests = make(map[string]digest.Digest, 0) - - wantVal := true - - for { - - var k string - - var tmpJBigDataDigests digest.Digest - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJBigDataDigests type=digest.Digest kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Digest", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJBigDataDigests = digest.Digest(string(outBuf)) - - } - } - - j.BigDataDigests[k] = tmpJBigDataDigests - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Created: - - /* handler: j.Created type=time.Time kind=struct quoted=false*/ - - { - if tok == fflib.FFTok_null { - - } else { - - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = j.Created.UnmarshalJSON(tbuf) - if err != nil { - return fs.WrapErr(err) - } - } - state = fflib.FFParse_after_value - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UIDMap: - - /* handler: j.UIDMap type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.UIDMap = nil - } else { - - j.UIDMap = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJUIDMap idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJUIDMap type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJUIDMap) - if err != nil { - return fs.WrapErr(err) - } - } - - j.UIDMap = append(j.UIDMap, tmpJUIDMap) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_GIDMap: - - /* handler: j.GIDMap type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.GIDMap = nil - } else { - - j.GIDMap = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJGIDMap idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJGIDMap type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJGIDMap) - if err != nil { - return fs.WrapErr(err) - } - } - - j.GIDMap = append(j.GIDMap, tmpJGIDMap) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Flags: - - /* handler: j.Flags type=map[string]interface {} kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Flags = nil - } else { - - j.Flags = make(map[string]interface{}, 0) - - wantVal := true - - for { - - var k string - - var tmpJFlags interface{} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJFlags type=interface {} kind=interface quoted=false*/ - - { - /* Falling back. type=interface {} kind=interface */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJFlags) - if err != nil { - return fs.WrapErr(err) - } - } - - j.Flags[k] = tmpJFlags - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *containerStore) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *containerStore) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{}`) - return nil -} - -const ( - ffjtcontainerStorebase = iota - ffjtcontainerStorenosuchkey -) - -// UnmarshalJSON umarshall json - template of ffjson -func (j *containerStore) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *containerStore) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtcontainerStorebase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtcontainerStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - } - - currentKey = ffjtcontainerStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtcontainerStorenosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} diff --git a/ffjson_deps.go b/ffjson_deps.go deleted file mode 100644 index 33e5340a5d..0000000000 --- a/ffjson_deps.go +++ /dev/null @@ -1,10 +0,0 @@ -package storage - -// NOTE: this is a hack to trick go modules into vendoring the below -// dependencies. Those are required during ffjson generation -// but do NOT end up in the final file. - -import ( - _ "github.com/pquerna/ffjson/inception" // nolint:typecheck - _ "github.com/pquerna/ffjson/shared" // nolint:typecheck -) diff --git a/go.mod b/go.mod index c035e29793..1600a03636 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,6 @@ require ( github.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d github.com/opencontainers/selinux v1.8.0 github.com/pkg/errors v0.9.1 - github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 github.com/sirupsen/logrus v1.8.1 github.com/stretchr/testify v1.7.0 github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 diff --git a/go.sum b/go.sum index f73f225f23..825ab3ff85 100644 --- a/go.sum +++ b/go.sum @@ -430,8 +430,6 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= -github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 h1:gGBSHPOU7g8YjTbhwn+lvFm2VDEhhA+PwDIlstkgSxE= -github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7/go.mod h1:YARuvh7BUWHNhzDq2OM5tzR2RiCcN2D7sapiKyCel/M= github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= diff --git a/images_ffjson.go b/images_ffjson.go deleted file mode 100644 index e1954ad041..0000000000 --- a/images_ffjson.go +++ /dev/null @@ -1,1418 +0,0 @@ -// Code generated by ffjson . DO NOT EDIT. -// source: images.go - -package storage - -import ( - "bytes" - "encoding/json" - "fmt" - "github.com/opencontainers/go-digest" - fflib "github.com/pquerna/ffjson/fflib/v1" -) - -// MarshalJSON marshal bytes to json - template -func (j *Image) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *Image) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{ "id":`) - fflib.WriteJsonString(buf, string(j.ID)) - buf.WriteByte(',') - if len(j.Digest) != 0 { - buf.WriteString(`"digest":`) - fflib.WriteJsonString(buf, string(j.Digest)) - buf.WriteByte(',') - } - if len(j.Names) != 0 { - buf.WriteString(`"names":`) - if j.Names != nil { - buf.WriteString(`[`) - for i, v := range j.Names { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.NamesHistory) != 0 { - buf.WriteString(`"names-history":`) - if j.NamesHistory != nil { - buf.WriteString(`[`) - for i, v := range j.NamesHistory { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.TopLayer) != 0 { - buf.WriteString(`"layer":`) - fflib.WriteJsonString(buf, string(j.TopLayer)) - buf.WriteByte(',') - } - if len(j.MappedTopLayers) != 0 { - buf.WriteString(`"mapped-layers":`) - if j.MappedTopLayers != nil { - buf.WriteString(`[`) - for i, v := range j.MappedTopLayers { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.Metadata) != 0 { - buf.WriteString(`"metadata":`) - fflib.WriteJsonString(buf, string(j.Metadata)) - buf.WriteByte(',') - } - if len(j.BigDataNames) != 0 { - buf.WriteString(`"big-data-names":`) - if j.BigDataNames != nil { - buf.WriteString(`[`) - for i, v := range j.BigDataNames { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.BigDataSizes) != 0 { - if j.BigDataSizes == nil { - buf.WriteString(`"big-data-sizes":null`) - } else { - buf.WriteString(`"big-data-sizes":{ `) - for key, value := range j.BigDataSizes { - fflib.WriteJsonString(buf, key) - buf.WriteString(`:`) - fflib.FormatBits2(buf, uint64(value), 10, value < 0) - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - } - buf.WriteByte(',') - } - if len(j.BigDataDigests) != 0 { - if j.BigDataDigests == nil { - buf.WriteString(`"big-data-digests":null`) - } else { - buf.WriteString(`"big-data-digests":{ `) - for key, value := range j.BigDataDigests { - fflib.WriteJsonString(buf, key) - buf.WriteString(`:`) - fflib.WriteJsonString(buf, string(value)) - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - } - buf.WriteByte(',') - } - if true { - buf.WriteString(`"created":`) - - { - - obj, err = j.Created.MarshalJSON() - if err != nil { - return err - } - buf.Write(obj) - - } - buf.WriteByte(',') - } - if len(j.Flags) != 0 { - buf.WriteString(`"flags":`) - /* Falling back. type=map[string]interface {} kind=map */ - err = buf.Encode(j.Flags) - if err != nil { - return err - } - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - return nil -} - -const ( - ffjtImagebase = iota - ffjtImagenosuchkey - - ffjtImageID - - ffjtImageDigest - - ffjtImageNames - - ffjtImageNamesHistory - - ffjtImageTopLayer - - ffjtImageMappedTopLayers - - ffjtImageMetadata - - ffjtImageBigDataNames - - ffjtImageBigDataSizes - - ffjtImageBigDataDigests - - ffjtImageCreated - - ffjtImageFlags -) - -var ffjKeyImageID = []byte("id") - -var ffjKeyImageDigest = []byte("digest") - -var ffjKeyImageNames = []byte("names") - -var ffjKeyImageNamesHistory = []byte("names-history") - -var ffjKeyImageTopLayer = []byte("layer") - -var ffjKeyImageMappedTopLayers = []byte("mapped-layers") - -var ffjKeyImageMetadata = []byte("metadata") - -var ffjKeyImageBigDataNames = []byte("big-data-names") - -var ffjKeyImageBigDataSizes = []byte("big-data-sizes") - -var ffjKeyImageBigDataDigests = []byte("big-data-digests") - -var ffjKeyImageCreated = []byte("created") - -var ffjKeyImageFlags = []byte("flags") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *Image) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *Image) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtImagebase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtImagenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'b': - - if bytes.Equal(ffjKeyImageBigDataNames, kn) { - currentKey = ffjtImageBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyImageBigDataSizes, kn) { - currentKey = ffjtImageBigDataSizes - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyImageBigDataDigests, kn) { - currentKey = ffjtImageBigDataDigests - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'c': - - if bytes.Equal(ffjKeyImageCreated, kn) { - currentKey = ffjtImageCreated - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'd': - - if bytes.Equal(ffjKeyImageDigest, kn) { - currentKey = ffjtImageDigest - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'f': - - if bytes.Equal(ffjKeyImageFlags, kn) { - currentKey = ffjtImageFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'i': - - if bytes.Equal(ffjKeyImageID, kn) { - currentKey = ffjtImageID - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'l': - - if bytes.Equal(ffjKeyImageTopLayer, kn) { - currentKey = ffjtImageTopLayer - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'm': - - if bytes.Equal(ffjKeyImageMappedTopLayers, kn) { - currentKey = ffjtImageMappedTopLayers - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyImageMetadata, kn) { - currentKey = ffjtImageMetadata - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'n': - - if bytes.Equal(ffjKeyImageNames, kn) { - currentKey = ffjtImageNames - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyImageNamesHistory, kn) { - currentKey = ffjtImageNamesHistory - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyImageFlags, kn) { - currentKey = ffjtImageFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyImageCreated, kn) { - currentKey = ffjtImageCreated - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageBigDataDigests, kn) { - currentKey = ffjtImageBigDataDigests - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageBigDataSizes, kn) { - currentKey = ffjtImageBigDataSizes - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageBigDataNames, kn) { - currentKey = ffjtImageBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyImageMetadata, kn) { - currentKey = ffjtImageMetadata - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageMappedTopLayers, kn) { - currentKey = ffjtImageMappedTopLayers - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyImageTopLayer, kn) { - currentKey = ffjtImageTopLayer - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageNamesHistory, kn) { - currentKey = ffjtImageNamesHistory - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageNames, kn) { - currentKey = ffjtImageNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyImageDigest, kn) { - currentKey = ffjtImageDigest - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyImageID, kn) { - currentKey = ffjtImageID - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtImagenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtImageID: - goto handle_ID - - case ffjtImageDigest: - goto handle_Digest - - case ffjtImageNames: - goto handle_Names - - case ffjtImageNamesHistory: - goto handle_NamesHistory - - case ffjtImageTopLayer: - goto handle_TopLayer - - case ffjtImageMappedTopLayers: - goto handle_MappedTopLayers - - case ffjtImageMetadata: - goto handle_Metadata - - case ffjtImageBigDataNames: - goto handle_BigDataNames - - case ffjtImageBigDataSizes: - goto handle_BigDataSizes - - case ffjtImageBigDataDigests: - goto handle_BigDataDigests - - case ffjtImageCreated: - goto handle_Created - - case ffjtImageFlags: - goto handle_Flags - - case ffjtImagenosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_ID: - - /* handler: j.ID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.ID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Digest: - - /* handler: j.Digest type=digest.Digest kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Digest", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.Digest = digest.Digest(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Names: - - /* handler: j.Names type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Names = nil - } else { - - j.Names = []string{} - - wantVal := true - - for { - - var tmpJNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJNames = string(string(outBuf)) - - } - } - - j.Names = append(j.Names, tmpJNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_NamesHistory: - - /* handler: j.NamesHistory type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.NamesHistory = nil - } else { - - j.NamesHistory = []string{} - - wantVal := true - - for { - - var tmpJNamesHistory string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJNamesHistory type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJNamesHistory = string(string(outBuf)) - - } - } - - j.NamesHistory = append(j.NamesHistory, tmpJNamesHistory) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_TopLayer: - - /* handler: j.TopLayer type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.TopLayer = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_MappedTopLayers: - - /* handler: j.MappedTopLayers type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.MappedTopLayers = nil - } else { - - j.MappedTopLayers = []string{} - - wantVal := true - - for { - - var tmpJMappedTopLayers string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJMappedTopLayers type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJMappedTopLayers = string(string(outBuf)) - - } - } - - j.MappedTopLayers = append(j.MappedTopLayers, tmpJMappedTopLayers) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Metadata: - - /* handler: j.Metadata type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.Metadata = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataNames: - - /* handler: j.BigDataNames type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataNames = nil - } else { - - j.BigDataNames = []string{} - - wantVal := true - - for { - - var tmpJBigDataNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJBigDataNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJBigDataNames = string(string(outBuf)) - - } - } - - j.BigDataNames = append(j.BigDataNames, tmpJBigDataNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataSizes: - - /* handler: j.BigDataSizes type=map[string]int64 kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataSizes = nil - } else { - - j.BigDataSizes = make(map[string]int64, 0) - - wantVal := true - - for { - - var k string - - var tmpJBigDataSizes int64 - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJBigDataSizes type=int64 kind=int64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - tmpJBigDataSizes = int64(tval) - - } - } - - j.BigDataSizes[k] = tmpJBigDataSizes - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataDigests: - - /* handler: j.BigDataDigests type=map[string]digest.Digest kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataDigests = nil - } else { - - j.BigDataDigests = make(map[string]digest.Digest, 0) - - wantVal := true - - for { - - var k string - - var tmpJBigDataDigests digest.Digest - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJBigDataDigests type=digest.Digest kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Digest", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJBigDataDigests = digest.Digest(string(outBuf)) - - } - } - - j.BigDataDigests[k] = tmpJBigDataDigests - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Created: - - /* handler: j.Created type=time.Time kind=struct quoted=false*/ - - { - if tok == fflib.FFTok_null { - - } else { - - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = j.Created.UnmarshalJSON(tbuf) - if err != nil { - return fs.WrapErr(err) - } - } - state = fflib.FFParse_after_value - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Flags: - - /* handler: j.Flags type=map[string]interface {} kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Flags = nil - } else { - - j.Flags = make(map[string]interface{}, 0) - - wantVal := true - - for { - - var k string - - var tmpJFlags interface{} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJFlags type=interface {} kind=interface quoted=false*/ - - { - /* Falling back. type=interface {} kind=interface */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJFlags) - if err != nil { - return fs.WrapErr(err) - } - } - - j.Flags[k] = tmpJFlags - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *imageStore) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *imageStore) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{}`) - return nil -} - -const ( - ffjtimageStorebase = iota - ffjtimageStorenosuchkey -) - -// UnmarshalJSON umarshall json - template of ffjson -func (j *imageStore) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *imageStore) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtimageStorebase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtimageStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - } - - currentKey = ffjtimageStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtimageStorenosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} diff --git a/layers_ffjson.go b/layers_ffjson.go deleted file mode 100644 index f61e68a21f..0000000000 --- a/layers_ffjson.go +++ /dev/null @@ -1,2267 +0,0 @@ -// Code generated by ffjson . DO NOT EDIT. -// source: layers.go - -package storage - -import ( - "bytes" - "encoding/json" - "fmt" - "github.com/containers/storage/pkg/archive" - "github.com/containers/storage/pkg/idtools" - "github.com/opencontainers/go-digest" - fflib "github.com/pquerna/ffjson/fflib/v1" -) - -// MarshalJSON marshal bytes to json - template -func (j *DiffOptions) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *DiffOptions) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - if j.Compression != nil { - buf.WriteString(`{"Compression":`) - fflib.FormatBits2(buf, uint64(*j.Compression), 10, *j.Compression < 0) - } else { - buf.WriteString(`{"Compression":null`) - } - buf.WriteByte('}') - return nil -} - -const ( - ffjtDiffOptionsbase = iota - ffjtDiffOptionsnosuchkey - - ffjtDiffOptionsCompression -) - -var ffjKeyDiffOptionsCompression = []byte("Compression") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *DiffOptions) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *DiffOptions) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtDiffOptionsbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtDiffOptionsnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'C': - - if bytes.Equal(ffjKeyDiffOptionsCompression, kn) { - currentKey = ffjtDiffOptionsCompression - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyDiffOptionsCompression, kn) { - currentKey = ffjtDiffOptionsCompression - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtDiffOptionsnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtDiffOptionsCompression: - goto handle_Compression - - case ffjtDiffOptionsnosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_Compression: - - /* handler: j.Compression type=archive.Compression kind=int quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Compression", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - j.Compression = nil - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - ttypval := archive.Compression(tval) - j.Compression = &ttypval - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *Layer) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *Layer) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{ "id":`) - fflib.WriteJsonString(buf, string(j.ID)) - buf.WriteByte(',') - if len(j.Names) != 0 { - buf.WriteString(`"names":`) - if j.Names != nil { - buf.WriteString(`[`) - for i, v := range j.Names { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.Parent) != 0 { - buf.WriteString(`"parent":`) - fflib.WriteJsonString(buf, string(j.Parent)) - buf.WriteByte(',') - } - if len(j.Metadata) != 0 { - buf.WriteString(`"metadata":`) - fflib.WriteJsonString(buf, string(j.Metadata)) - buf.WriteByte(',') - } - if len(j.MountLabel) != 0 { - buf.WriteString(`"mountlabel":`) - fflib.WriteJsonString(buf, string(j.MountLabel)) - buf.WriteByte(',') - } - if true { - buf.WriteString(`"created":`) - - { - - obj, err = j.Created.MarshalJSON() - if err != nil { - return err - } - buf.Write(obj) - - } - buf.WriteByte(',') - } - if len(j.CompressedDigest) != 0 { - buf.WriteString(`"compressed-diff-digest":`) - fflib.WriteJsonString(buf, string(j.CompressedDigest)) - buf.WriteByte(',') - } - if j.CompressedSize != 0 { - buf.WriteString(`"compressed-size":`) - fflib.FormatBits2(buf, uint64(j.CompressedSize), 10, j.CompressedSize < 0) - buf.WriteByte(',') - } - if len(j.UncompressedDigest) != 0 { - buf.WriteString(`"diff-digest":`) - fflib.WriteJsonString(buf, string(j.UncompressedDigest)) - buf.WriteByte(',') - } - if j.UncompressedSize != 0 { - buf.WriteString(`"diff-size":`) - fflib.FormatBits2(buf, uint64(j.UncompressedSize), 10, j.UncompressedSize < 0) - buf.WriteByte(',') - } - if j.CompressionType != 0 { - buf.WriteString(`"compression":`) - fflib.FormatBits2(buf, uint64(j.CompressionType), 10, j.CompressionType < 0) - buf.WriteByte(',') - } - if len(j.UIDs) != 0 { - buf.WriteString(`"uidset":`) - if j.UIDs != nil { - buf.WriteString(`[`) - for i, v := range j.UIDs { - if i != 0 { - buf.WriteString(`,`) - } - fflib.FormatBits2(buf, uint64(v), 10, false) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.GIDs) != 0 { - buf.WriteString(`"gidset":`) - if j.GIDs != nil { - buf.WriteString(`[`) - for i, v := range j.GIDs { - if i != 0 { - buf.WriteString(`,`) - } - fflib.FormatBits2(buf, uint64(v), 10, false) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.Flags) != 0 { - buf.WriteString(`"flags":`) - /* Falling back. type=map[string]interface {} kind=map */ - err = buf.Encode(j.Flags) - if err != nil { - return err - } - buf.WriteByte(',') - } - if len(j.UIDMap) != 0 { - buf.WriteString(`"uidmap":`) - if j.UIDMap != nil { - buf.WriteString(`[`) - for i, v := range j.UIDMap { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.GIDMap) != 0 { - buf.WriteString(`"gidmap":`) - if j.GIDMap != nil { - buf.WriteString(`[`) - for i, v := range j.GIDMap { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - if len(j.BigDataNames) != 0 { - buf.WriteString(`"big-data-names":`) - if j.BigDataNames != nil { - buf.WriteString(`[`) - for i, v := range j.BigDataNames { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - return nil -} - -const ( - ffjtLayerbase = iota - ffjtLayernosuchkey - - ffjtLayerID - - ffjtLayerNames - - ffjtLayerParent - - ffjtLayerMetadata - - ffjtLayerMountLabel - - ffjtLayerCreated - - ffjtLayerCompressedDigest - - ffjtLayerCompressedSize - - ffjtLayerUncompressedDigest - - ffjtLayerUncompressedSize - - ffjtLayerCompressionType - - ffjtLayerUIDs - - ffjtLayerGIDs - - ffjtLayerFlags - - ffjtLayerUIDMap - - ffjtLayerGIDMap - - ffjtLayerBigDataNames -) - -var ffjKeyLayerID = []byte("id") - -var ffjKeyLayerNames = []byte("names") - -var ffjKeyLayerParent = []byte("parent") - -var ffjKeyLayerMetadata = []byte("metadata") - -var ffjKeyLayerMountLabel = []byte("mountlabel") - -var ffjKeyLayerCreated = []byte("created") - -var ffjKeyLayerCompressedDigest = []byte("compressed-diff-digest") - -var ffjKeyLayerCompressedSize = []byte("compressed-size") - -var ffjKeyLayerUncompressedDigest = []byte("diff-digest") - -var ffjKeyLayerUncompressedSize = []byte("diff-size") - -var ffjKeyLayerCompressionType = []byte("compression") - -var ffjKeyLayerUIDs = []byte("uidset") - -var ffjKeyLayerGIDs = []byte("gidset") - -var ffjKeyLayerFlags = []byte("flags") - -var ffjKeyLayerUIDMap = []byte("uidmap") - -var ffjKeyLayerGIDMap = []byte("gidmap") - -var ffjKeyLayerBigDataNames = []byte("big-data-names") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *Layer) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *Layer) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtLayerbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtLayernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'b': - - if bytes.Equal(ffjKeyLayerBigDataNames, kn) { - currentKey = ffjtLayerBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'c': - - if bytes.Equal(ffjKeyLayerCreated, kn) { - currentKey = ffjtLayerCreated - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerCompressedDigest, kn) { - currentKey = ffjtLayerCompressedDigest - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerCompressedSize, kn) { - currentKey = ffjtLayerCompressedSize - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerCompressionType, kn) { - currentKey = ffjtLayerCompressionType - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'd': - - if bytes.Equal(ffjKeyLayerUncompressedDigest, kn) { - currentKey = ffjtLayerUncompressedDigest - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerUncompressedSize, kn) { - currentKey = ffjtLayerUncompressedSize - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'f': - - if bytes.Equal(ffjKeyLayerFlags, kn) { - currentKey = ffjtLayerFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'g': - - if bytes.Equal(ffjKeyLayerGIDs, kn) { - currentKey = ffjtLayerGIDs - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerGIDMap, kn) { - currentKey = ffjtLayerGIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'i': - - if bytes.Equal(ffjKeyLayerID, kn) { - currentKey = ffjtLayerID - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'm': - - if bytes.Equal(ffjKeyLayerMetadata, kn) { - currentKey = ffjtLayerMetadata - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerMountLabel, kn) { - currentKey = ffjtLayerMountLabel - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'n': - - if bytes.Equal(ffjKeyLayerNames, kn) { - currentKey = ffjtLayerNames - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'p': - - if bytes.Equal(ffjKeyLayerParent, kn) { - currentKey = ffjtLayerParent - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'u': - - if bytes.Equal(ffjKeyLayerUIDs, kn) { - currentKey = ffjtLayerUIDs - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyLayerUIDMap, kn) { - currentKey = ffjtLayerUIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyLayerBigDataNames, kn) { - currentKey = ffjtLayerBigDataNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerGIDMap, kn) { - currentKey = ffjtLayerGIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerUIDMap, kn) { - currentKey = ffjtLayerUIDMap - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerFlags, kn) { - currentKey = ffjtLayerFlags - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerGIDs, kn) { - currentKey = ffjtLayerGIDs - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerUIDs, kn) { - currentKey = ffjtLayerUIDs - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerCompressionType, kn) { - currentKey = ffjtLayerCompressionType - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerUncompressedSize, kn) { - currentKey = ffjtLayerUncompressedSize - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerUncompressedDigest, kn) { - currentKey = ffjtLayerUncompressedDigest - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerCompressedSize, kn) { - currentKey = ffjtLayerCompressedSize - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerCompressedDigest, kn) { - currentKey = ffjtLayerCompressedDigest - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerCreated, kn) { - currentKey = ffjtLayerCreated - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerMountLabel, kn) { - currentKey = ffjtLayerMountLabel - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerMetadata, kn) { - currentKey = ffjtLayerMetadata - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerParent, kn) { - currentKey = ffjtLayerParent - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyLayerNames, kn) { - currentKey = ffjtLayerNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyLayerID, kn) { - currentKey = ffjtLayerID - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtLayernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtLayerID: - goto handle_ID - - case ffjtLayerNames: - goto handle_Names - - case ffjtLayerParent: - goto handle_Parent - - case ffjtLayerMetadata: - goto handle_Metadata - - case ffjtLayerMountLabel: - goto handle_MountLabel - - case ffjtLayerCreated: - goto handle_Created - - case ffjtLayerCompressedDigest: - goto handle_CompressedDigest - - case ffjtLayerCompressedSize: - goto handle_CompressedSize - - case ffjtLayerUncompressedDigest: - goto handle_UncompressedDigest - - case ffjtLayerUncompressedSize: - goto handle_UncompressedSize - - case ffjtLayerCompressionType: - goto handle_CompressionType - - case ffjtLayerUIDs: - goto handle_UIDs - - case ffjtLayerGIDs: - goto handle_GIDs - - case ffjtLayerFlags: - goto handle_Flags - - case ffjtLayerUIDMap: - goto handle_UIDMap - - case ffjtLayerGIDMap: - goto handle_GIDMap - - case ffjtLayerBigDataNames: - goto handle_BigDataNames - - case ffjtLayernosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_ID: - - /* handler: j.ID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.ID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Names: - - /* handler: j.Names type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Names = nil - } else { - - j.Names = []string{} - - wantVal := true - - for { - - var tmpJNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJNames = string(string(outBuf)) - - } - } - - j.Names = append(j.Names, tmpJNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Parent: - - /* handler: j.Parent type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.Parent = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Metadata: - - /* handler: j.Metadata type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.Metadata = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_MountLabel: - - /* handler: j.MountLabel type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.MountLabel = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Created: - - /* handler: j.Created type=time.Time kind=struct quoted=false*/ - - { - if tok == fflib.FFTok_null { - - } else { - - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = j.Created.UnmarshalJSON(tbuf) - if err != nil { - return fs.WrapErr(err) - } - } - state = fflib.FFParse_after_value - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_CompressedDigest: - - /* handler: j.CompressedDigest type=digest.Digest kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Digest", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.CompressedDigest = digest.Digest(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_CompressedSize: - - /* handler: j.CompressedSize type=int64 kind=int64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.CompressedSize = int64(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UncompressedDigest: - - /* handler: j.UncompressedDigest type=digest.Digest kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Digest", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.UncompressedDigest = digest.Digest(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UncompressedSize: - - /* handler: j.UncompressedSize type=int64 kind=int64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.UncompressedSize = int64(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_CompressionType: - - /* handler: j.CompressionType type=archive.Compression kind=int quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Compression", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.CompressionType = archive.Compression(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UIDs: - - /* handler: j.UIDs type=[]uint32 kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.UIDs = nil - } else { - - j.UIDs = []uint32{} - - wantVal := true - - for { - - var tmpJUIDs uint32 - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJUIDs type=uint32 kind=uint32 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for uint32", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseUint(fs.Output.Bytes(), 10, 32) - - if err != nil { - return fs.WrapErr(err) - } - - tmpJUIDs = uint32(tval) - - } - } - - j.UIDs = append(j.UIDs, tmpJUIDs) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_GIDs: - - /* handler: j.GIDs type=[]uint32 kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.GIDs = nil - } else { - - j.GIDs = []uint32{} - - wantVal := true - - for { - - var tmpJGIDs uint32 - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJGIDs type=uint32 kind=uint32 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for uint32", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseUint(fs.Output.Bytes(), 10, 32) - - if err != nil { - return fs.WrapErr(err) - } - - tmpJGIDs = uint32(tval) - - } - } - - j.GIDs = append(j.GIDs, tmpJGIDs) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Flags: - - /* handler: j.Flags type=map[string]interface {} kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.Flags = nil - } else { - - j.Flags = make(map[string]interface{}, 0) - - wantVal := true - - for { - - var k string - - var tmpJFlags interface{} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJFlags type=interface {} kind=interface quoted=false*/ - - { - /* Falling back. type=interface {} kind=interface */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJFlags) - if err != nil { - return fs.WrapErr(err) - } - } - - j.Flags[k] = tmpJFlags - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UIDMap: - - /* handler: j.UIDMap type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.UIDMap = nil - } else { - - j.UIDMap = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJUIDMap idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJUIDMap type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJUIDMap) - if err != nil { - return fs.WrapErr(err) - } - } - - j.UIDMap = append(j.UIDMap, tmpJUIDMap) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_GIDMap: - - /* handler: j.GIDMap type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.GIDMap = nil - } else { - - j.GIDMap = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJGIDMap idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJGIDMap type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJGIDMap) - if err != nil { - return fs.WrapErr(err) - } - } - - j.GIDMap = append(j.GIDMap, tmpJGIDMap) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_BigDataNames: - - /* handler: j.BigDataNames type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.BigDataNames = nil - } else { - - j.BigDataNames = []string{} - - wantVal := true - - for { - - var tmpJBigDataNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJBigDataNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJBigDataNames = string(string(outBuf)) - - } - } - - j.BigDataNames = append(j.BigDataNames, tmpJBigDataNames) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *layerMountPoint) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *layerMountPoint) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{"id":`) - fflib.WriteJsonString(buf, string(j.ID)) - buf.WriteString(`,"path":`) - fflib.WriteJsonString(buf, string(j.MountPoint)) - buf.WriteString(`,"count":`) - fflib.FormatBits2(buf, uint64(j.MountCount), 10, j.MountCount < 0) - buf.WriteByte('}') - return nil -} - -const ( - ffjtlayerMountPointbase = iota - ffjtlayerMountPointnosuchkey - - ffjtlayerMountPointID - - ffjtlayerMountPointMountPoint - - ffjtlayerMountPointMountCount -) - -var ffjKeylayerMountPointID = []byte("id") - -var ffjKeylayerMountPointMountPoint = []byte("path") - -var ffjKeylayerMountPointMountCount = []byte("count") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *layerMountPoint) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *layerMountPoint) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtlayerMountPointbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtlayerMountPointnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'c': - - if bytes.Equal(ffjKeylayerMountPointMountCount, kn) { - currentKey = ffjtlayerMountPointMountCount - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'i': - - if bytes.Equal(ffjKeylayerMountPointID, kn) { - currentKey = ffjtlayerMountPointID - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'p': - - if bytes.Equal(ffjKeylayerMountPointMountPoint, kn) { - currentKey = ffjtlayerMountPointMountPoint - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.SimpleLetterEqualFold(ffjKeylayerMountPointMountCount, kn) { - currentKey = ffjtlayerMountPointMountCount - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeylayerMountPointMountPoint, kn) { - currentKey = ffjtlayerMountPointMountPoint - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeylayerMountPointID, kn) { - currentKey = ffjtlayerMountPointID - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtlayerMountPointnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtlayerMountPointID: - goto handle_ID - - case ffjtlayerMountPointMountPoint: - goto handle_MountPoint - - case ffjtlayerMountPointMountCount: - goto handle_MountCount - - case ffjtlayerMountPointnosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_ID: - - /* handler: j.ID type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.ID = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_MountPoint: - - /* handler: j.MountPoint type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - j.MountPoint = string(string(outBuf)) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_MountCount: - - /* handler: j.MountCount type=int kind=int quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.MountCount = int(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *layerStore) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *layerStore) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{}`) - return nil -} - -const ( - ffjtlayerStorebase = iota - ffjtlayerStorenosuchkey -) - -// UnmarshalJSON umarshall json - template of ffjson -func (j *layerStore) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *layerStore) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtlayerStorebase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtlayerStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - } - - currentKey = ffjtlayerStorenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtlayerStorenosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *simpleGetCloser) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *simpleGetCloser) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{}`) - return nil -} - -const ( - ffjtsimpleGetCloserbase = iota - ffjtsimpleGetClosernosuchkey -) - -// UnmarshalJSON umarshall json - template of ffjson -func (j *simpleGetCloser) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *simpleGetCloser) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtsimpleGetCloserbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtsimpleGetClosernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - } - - currentKey = ffjtsimpleGetClosernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtsimpleGetClosernosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} diff --git a/pkg/archive/archive_ffjson.go b/pkg/archive/archive_ffjson.go deleted file mode 100644 index c420ca3587..0000000000 --- a/pkg/archive/archive_ffjson.go +++ /dev/null @@ -1,2465 +0,0 @@ -// Code generated by ffjson . DO NOT EDIT. -// source: pkg/archive/archive.go - -package archive - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "github.com/containers/storage/pkg/idtools" - fflib "github.com/pquerna/ffjson/fflib/v1" - "os" -) - -// MarshalJSON marshal bytes to json - template -func (j *Archiver) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *Archiver) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{"Untar":`) - /* Falling back. type=func(io.Reader, string, *archive.TarOptions) error kind=func */ - err = buf.Encode(j.Untar) - if err != nil { - return err - } - if j.TarIDMappings != nil { - /* Struct fall back. type=idtools.IDMappings kind=struct */ - buf.WriteString(`,"TarIDMappings":`) - err = buf.Encode(j.TarIDMappings) - if err != nil { - return err - } - } else { - buf.WriteString(`,"TarIDMappings":null`) - } - if j.ChownOpts != nil { - /* Struct fall back. type=idtools.IDPair kind=struct */ - buf.WriteString(`,"ChownOpts":`) - err = buf.Encode(j.ChownOpts) - if err != nil { - return err - } - } else { - buf.WriteString(`,"ChownOpts":null`) - } - if j.UntarIDMappings != nil { - /* Struct fall back. type=idtools.IDMappings kind=struct */ - buf.WriteString(`,"UntarIDMappings":`) - err = buf.Encode(j.UntarIDMappings) - if err != nil { - return err - } - } else { - buf.WriteString(`,"UntarIDMappings":null`) - } - buf.WriteByte('}') - return nil -} - -const ( - ffjtArchiverbase = iota - ffjtArchivernosuchkey - - ffjtArchiverUntar - - ffjtArchiverTarIDMappings - - ffjtArchiverChownOpts - - ffjtArchiverUntarIDMappings -) - -var ffjKeyArchiverUntar = []byte("Untar") - -var ffjKeyArchiverTarIDMappings = []byte("TarIDMappings") - -var ffjKeyArchiverChownOpts = []byte("ChownOpts") - -var ffjKeyArchiverUntarIDMappings = []byte("UntarIDMappings") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *Archiver) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *Archiver) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtArchiverbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtArchivernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'C': - - if bytes.Equal(ffjKeyArchiverChownOpts, kn) { - currentKey = ffjtArchiverChownOpts - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'T': - - if bytes.Equal(ffjKeyArchiverTarIDMappings, kn) { - currentKey = ffjtArchiverTarIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'U': - - if bytes.Equal(ffjKeyArchiverUntar, kn) { - currentKey = ffjtArchiverUntar - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyArchiverUntarIDMappings, kn) { - currentKey = ffjtArchiverUntarIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyArchiverUntarIDMappings, kn) { - currentKey = ffjtArchiverUntarIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyArchiverChownOpts, kn) { - currentKey = ffjtArchiverChownOpts - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyArchiverTarIDMappings, kn) { - currentKey = ffjtArchiverTarIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyArchiverUntar, kn) { - currentKey = ffjtArchiverUntar - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtArchivernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtArchiverUntar: - goto handle_Untar - - case ffjtArchiverTarIDMappings: - goto handle_TarIDMappings - - case ffjtArchiverChownOpts: - goto handle_ChownOpts - - case ffjtArchiverUntarIDMappings: - goto handle_UntarIDMappings - - case ffjtArchivernosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_Untar: - - /* handler: j.Untar type=func(io.Reader, string, *archive.TarOptions) error kind=func quoted=false*/ - - { - /* Falling back. type=func(io.Reader, string, *archive.TarOptions) error kind=func */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.Untar) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_TarIDMappings: - - /* handler: j.TarIDMappings type=idtools.IDMappings kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMappings kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.TarIDMappings) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ChownOpts: - - /* handler: j.ChownOpts type=idtools.IDPair kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDPair kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.ChownOpts) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UntarIDMappings: - - /* handler: j.UntarIDMappings type=idtools.IDMappings kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMappings kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.UntarIDMappings) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *TarOptions) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *TarOptions) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{"IncludeFiles":`) - if j.IncludeFiles != nil { - buf.WriteString(`[`) - for i, v := range j.IncludeFiles { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteString(`,"ExcludePatterns":`) - if j.ExcludePatterns != nil { - buf.WriteString(`[`) - for i, v := range j.ExcludePatterns { - if i != 0 { - buf.WriteString(`,`) - } - fflib.WriteJsonString(buf, string(v)) - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteString(`,"Compression":`) - fflib.FormatBits2(buf, uint64(j.Compression), 10, j.Compression < 0) - if j.NoLchown { - buf.WriteString(`,"NoLchown":true`) - } else { - buf.WriteString(`,"NoLchown":false`) - } - buf.WriteString(`,"UIDMaps":`) - if j.UIDMaps != nil { - buf.WriteString(`[`) - for i, v := range j.UIDMaps { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - buf.WriteString(`,"GIDMaps":`) - if j.GIDMaps != nil { - buf.WriteString(`[`) - for i, v := range j.GIDMaps { - if i != 0 { - buf.WriteString(`,`) - } - /* Struct fall back. type=idtools.IDMap kind=struct */ - err = buf.Encode(&v) - if err != nil { - return err - } - } - buf.WriteString(`]`) - } else { - buf.WriteString(`null`) - } - if j.IgnoreChownErrors { - buf.WriteString(`,"IgnoreChownErrors":true`) - } else { - buf.WriteString(`,"IgnoreChownErrors":false`) - } - if j.ChownOpts != nil { - /* Struct fall back. type=idtools.IDPair kind=struct */ - buf.WriteString(`,"ChownOpts":`) - err = buf.Encode(j.ChownOpts) - if err != nil { - return err - } - } else { - buf.WriteString(`,"ChownOpts":null`) - } - if j.IncludeSourceDir { - buf.WriteString(`,"IncludeSourceDir":true`) - } else { - buf.WriteString(`,"IncludeSourceDir":false`) - } - buf.WriteString(`,"WhiteoutFormat":`) - fflib.FormatBits2(buf, uint64(j.WhiteoutFormat), 10, j.WhiteoutFormat < 0) - buf.WriteString(`,"WhiteoutData":`) - /* Interface types must use runtime reflection. type=interface {} kind=interface */ - err = buf.Encode(j.WhiteoutData) - if err != nil { - return err - } - if j.NoOverwriteDirNonDir { - buf.WriteString(`,"NoOverwriteDirNonDir":true`) - } else { - buf.WriteString(`,"NoOverwriteDirNonDir":false`) - } - if j.RebaseNames == nil { - buf.WriteString(`,"RebaseNames":null`) - } else { - buf.WriteString(`,"RebaseNames":{ `) - for key, value := range j.RebaseNames { - fflib.WriteJsonString(buf, key) - buf.WriteString(`:`) - fflib.WriteJsonString(buf, string(value)) - buf.WriteByte(',') - } - buf.Rewind(1) - buf.WriteByte('}') - } - if j.InUserNS { - buf.WriteString(`,"InUserNS":true`) - } else { - buf.WriteString(`,"InUserNS":false`) - } - if j.CopyPass { - buf.WriteString(`,"CopyPass":true`) - } else { - buf.WriteString(`,"CopyPass":false`) - } - if j.ForceMask != nil { - buf.WriteString(`,"ForceMask":`) - fflib.FormatBits2(buf, uint64(*j.ForceMask), 10, false) - } else { - buf.WriteString(`,"ForceMask":null`) - } - buf.WriteByte('}') - return nil -} - -const ( - ffjtTarOptionsbase = iota - ffjtTarOptionsnosuchkey - - ffjtTarOptionsIncludeFiles - - ffjtTarOptionsExcludePatterns - - ffjtTarOptionsCompression - - ffjtTarOptionsNoLchown - - ffjtTarOptionsUIDMaps - - ffjtTarOptionsGIDMaps - - ffjtTarOptionsIgnoreChownErrors - - ffjtTarOptionsChownOpts - - ffjtTarOptionsIncludeSourceDir - - ffjtTarOptionsWhiteoutFormat - - ffjtTarOptionsWhiteoutData - - ffjtTarOptionsNoOverwriteDirNonDir - - ffjtTarOptionsRebaseNames - - ffjtTarOptionsInUserNS - - ffjtTarOptionsCopyPass - - ffjtTarOptionsForceMask -) - -var ffjKeyTarOptionsIncludeFiles = []byte("IncludeFiles") - -var ffjKeyTarOptionsExcludePatterns = []byte("ExcludePatterns") - -var ffjKeyTarOptionsCompression = []byte("Compression") - -var ffjKeyTarOptionsNoLchown = []byte("NoLchown") - -var ffjKeyTarOptionsUIDMaps = []byte("UIDMaps") - -var ffjKeyTarOptionsGIDMaps = []byte("GIDMaps") - -var ffjKeyTarOptionsIgnoreChownErrors = []byte("IgnoreChownErrors") - -var ffjKeyTarOptionsChownOpts = []byte("ChownOpts") - -var ffjKeyTarOptionsIncludeSourceDir = []byte("IncludeSourceDir") - -var ffjKeyTarOptionsWhiteoutFormat = []byte("WhiteoutFormat") - -var ffjKeyTarOptionsWhiteoutData = []byte("WhiteoutData") - -var ffjKeyTarOptionsNoOverwriteDirNonDir = []byte("NoOverwriteDirNonDir") - -var ffjKeyTarOptionsRebaseNames = []byte("RebaseNames") - -var ffjKeyTarOptionsInUserNS = []byte("InUserNS") - -var ffjKeyTarOptionsCopyPass = []byte("CopyPass") - -var ffjKeyTarOptionsForceMask = []byte("ForceMask") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *TarOptions) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *TarOptions) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtTarOptionsbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtTarOptionsnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'C': - - if bytes.Equal(ffjKeyTarOptionsCompression, kn) { - currentKey = ffjtTarOptionsCompression - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsChownOpts, kn) { - currentKey = ffjtTarOptionsChownOpts - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsCopyPass, kn) { - currentKey = ffjtTarOptionsCopyPass - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'E': - - if bytes.Equal(ffjKeyTarOptionsExcludePatterns, kn) { - currentKey = ffjtTarOptionsExcludePatterns - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'F': - - if bytes.Equal(ffjKeyTarOptionsForceMask, kn) { - currentKey = ffjtTarOptionsForceMask - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'G': - - if bytes.Equal(ffjKeyTarOptionsGIDMaps, kn) { - currentKey = ffjtTarOptionsGIDMaps - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'I': - - if bytes.Equal(ffjKeyTarOptionsIncludeFiles, kn) { - currentKey = ffjtTarOptionsIncludeFiles - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsIgnoreChownErrors, kn) { - currentKey = ffjtTarOptionsIgnoreChownErrors - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsIncludeSourceDir, kn) { - currentKey = ffjtTarOptionsIncludeSourceDir - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsInUserNS, kn) { - currentKey = ffjtTarOptionsInUserNS - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'N': - - if bytes.Equal(ffjKeyTarOptionsNoLchown, kn) { - currentKey = ffjtTarOptionsNoLchown - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsNoOverwriteDirNonDir, kn) { - currentKey = ffjtTarOptionsNoOverwriteDirNonDir - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'R': - - if bytes.Equal(ffjKeyTarOptionsRebaseNames, kn) { - currentKey = ffjtTarOptionsRebaseNames - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'U': - - if bytes.Equal(ffjKeyTarOptionsUIDMaps, kn) { - currentKey = ffjtTarOptionsUIDMaps - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'W': - - if bytes.Equal(ffjKeyTarOptionsWhiteoutFormat, kn) { - currentKey = ffjtTarOptionsWhiteoutFormat - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeyTarOptionsWhiteoutData, kn) { - currentKey = ffjtTarOptionsWhiteoutData - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsForceMask, kn) { - currentKey = ffjtTarOptionsForceMask - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsCopyPass, kn) { - currentKey = ffjtTarOptionsCopyPass - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsInUserNS, kn) { - currentKey = ffjtTarOptionsInUserNS - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsRebaseNames, kn) { - currentKey = ffjtTarOptionsRebaseNames - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyTarOptionsNoOverwriteDirNonDir, kn) { - currentKey = ffjtTarOptionsNoOverwriteDirNonDir - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyTarOptionsWhiteoutData, kn) { - currentKey = ffjtTarOptionsWhiteoutData - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyTarOptionsWhiteoutFormat, kn) { - currentKey = ffjtTarOptionsWhiteoutFormat - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsIncludeSourceDir, kn) { - currentKey = ffjtTarOptionsIncludeSourceDir - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsChownOpts, kn) { - currentKey = ffjtTarOptionsChownOpts - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsIgnoreChownErrors, kn) { - currentKey = ffjtTarOptionsIgnoreChownErrors - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsGIDMaps, kn) { - currentKey = ffjtTarOptionsGIDMaps - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsUIDMaps, kn) { - currentKey = ffjtTarOptionsUIDMaps - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeyTarOptionsNoLchown, kn) { - currentKey = ffjtTarOptionsNoLchown - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsCompression, kn) { - currentKey = ffjtTarOptionsCompression - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsExcludePatterns, kn) { - currentKey = ffjtTarOptionsExcludePatterns - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeyTarOptionsIncludeFiles, kn) { - currentKey = ffjtTarOptionsIncludeFiles - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtTarOptionsnosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtTarOptionsIncludeFiles: - goto handle_IncludeFiles - - case ffjtTarOptionsExcludePatterns: - goto handle_ExcludePatterns - - case ffjtTarOptionsCompression: - goto handle_Compression - - case ffjtTarOptionsNoLchown: - goto handle_NoLchown - - case ffjtTarOptionsUIDMaps: - goto handle_UIDMaps - - case ffjtTarOptionsGIDMaps: - goto handle_GIDMaps - - case ffjtTarOptionsIgnoreChownErrors: - goto handle_IgnoreChownErrors - - case ffjtTarOptionsChownOpts: - goto handle_ChownOpts - - case ffjtTarOptionsIncludeSourceDir: - goto handle_IncludeSourceDir - - case ffjtTarOptionsWhiteoutFormat: - goto handle_WhiteoutFormat - - case ffjtTarOptionsWhiteoutData: - goto handle_WhiteoutData - - case ffjtTarOptionsNoOverwriteDirNonDir: - goto handle_NoOverwriteDirNonDir - - case ffjtTarOptionsRebaseNames: - goto handle_RebaseNames - - case ffjtTarOptionsInUserNS: - goto handle_InUserNS - - case ffjtTarOptionsCopyPass: - goto handle_CopyPass - - case ffjtTarOptionsForceMask: - goto handle_ForceMask - - case ffjtTarOptionsnosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_IncludeFiles: - - /* handler: j.IncludeFiles type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.IncludeFiles = nil - } else { - - j.IncludeFiles = []string{} - - wantVal := true - - for { - - var tmpJIncludeFiles string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJIncludeFiles type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJIncludeFiles = string(string(outBuf)) - - } - } - - j.IncludeFiles = append(j.IncludeFiles, tmpJIncludeFiles) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ExcludePatterns: - - /* handler: j.ExcludePatterns type=[]string kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.ExcludePatterns = nil - } else { - - j.ExcludePatterns = []string{} - - wantVal := true - - for { - - var tmpJExcludePatterns string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJExcludePatterns type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJExcludePatterns = string(string(outBuf)) - - } - } - - j.ExcludePatterns = append(j.ExcludePatterns, tmpJExcludePatterns) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Compression: - - /* handler: j.Compression type=archive.Compression kind=int quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for Compression", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.Compression = Compression(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_NoLchown: - - /* handler: j.NoLchown type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.NoLchown = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.NoLchown = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_UIDMaps: - - /* handler: j.UIDMaps type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.UIDMaps = nil - } else { - - j.UIDMaps = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJUIDMaps idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJUIDMaps type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJUIDMaps) - if err != nil { - return fs.WrapErr(err) - } - } - - j.UIDMaps = append(j.UIDMaps, tmpJUIDMaps) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_GIDMaps: - - /* handler: j.GIDMaps type=[]idtools.IDMap kind=slice quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.GIDMaps = nil - } else { - - j.GIDMaps = []idtools.IDMap{} - - wantVal := true - - for { - - var tmpJGIDMaps idtools.IDMap - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: tmpJGIDMaps type=idtools.IDMap kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMap kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &tmpJGIDMaps) - if err != nil { - return fs.WrapErr(err) - } - } - - j.GIDMaps = append(j.GIDMaps, tmpJGIDMaps) - - wantVal = false - } - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_IgnoreChownErrors: - - /* handler: j.IgnoreChownErrors type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.IgnoreChownErrors = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.IgnoreChownErrors = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ChownOpts: - - /* handler: j.ChownOpts type=idtools.IDPair kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDPair kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.ChownOpts) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_IncludeSourceDir: - - /* handler: j.IncludeSourceDir type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.IncludeSourceDir = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.IncludeSourceDir = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_WhiteoutFormat: - - /* handler: j.WhiteoutFormat type=archive.WhiteoutFormat kind=int quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for WhiteoutFormat", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.WhiteoutFormat = WhiteoutFormat(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_WhiteoutData: - - /* handler: j.WhiteoutData type=interface {} kind=interface quoted=false*/ - - { - /* Falling back. type=interface {} kind=interface */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.WhiteoutData) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_NoOverwriteDirNonDir: - - /* handler: j.NoOverwriteDirNonDir type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.NoOverwriteDirNonDir = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.NoOverwriteDirNonDir = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_RebaseNames: - - /* handler: j.RebaseNames type=map[string]string kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.RebaseNames = nil - } else { - - j.RebaseNames = make(map[string]string, 0) - - wantVal := true - - for { - - var k string - - var tmpJRebaseNames string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - k = string(string(outBuf)) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJRebaseNames type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJRebaseNames = string(string(outBuf)) - - } - } - - j.RebaseNames[k] = tmpJRebaseNames - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_InUserNS: - - /* handler: j.InUserNS type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.InUserNS = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.InUserNS = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_CopyPass: - - /* handler: j.CopyPass type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.CopyPass = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.CopyPass = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ForceMask: - - /* handler: j.ForceMask type=os.FileMode kind=uint32 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for FileMode", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - j.ForceMask = nil - - } else { - - tval, err := fflib.ParseUint(fs.Output.Bytes(), 10, 32) - - if err != nil { - return fs.WrapErr(err) - } - - ttypval := os.FileMode(tval) - j.ForceMask = &ttypval - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *TempArchive) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *TempArchive) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - buf.WriteString(`{"Size":`) - fflib.FormatBits2(buf, uint64(j.Size), 10, j.Size < 0) - buf.WriteByte('}') - return nil -} - -const ( - ffjtTempArchivebase = iota - ffjtTempArchivenosuchkey - - ffjtTempArchiveSize -) - -var ffjKeyTempArchiveSize = []byte("Size") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *TempArchive) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *TempArchive) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjtTempArchivebase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjtTempArchivenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'S': - - if bytes.Equal(ffjKeyTempArchiveSize, kn) { - currentKey = ffjtTempArchiveSize - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeyTempArchiveSize, kn) { - currentKey = ffjtTempArchiveSize - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjtTempArchivenosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjtTempArchiveSize: - goto handle_Size - - case ffjtTempArchivenosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_Size: - - /* handler: j.Size type=int64 kind=int64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - j.Size = int64(tval) - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} - -// MarshalJSON marshal bytes to json - template -func (j *tarAppender) MarshalJSON() ([]byte, error) { - var buf fflib.Buffer - if j == nil { - buf.WriteString("null") - return buf.Bytes(), nil - } - err := j.MarshalJSONBuf(&buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// MarshalJSONBuf marshal buff to json - template -func (j *tarAppender) MarshalJSONBuf(buf fflib.EncodingBuffer) error { - if j == nil { - buf.WriteString("null") - return nil - } - var err error - var obj []byte - _ = obj - _ = err - if j.TarWriter != nil { - /* Struct fall back. type=tar.Writer kind=struct */ - buf.WriteString(`{"TarWriter":`) - err = buf.Encode(j.TarWriter) - if err != nil { - return err - } - } else { - buf.WriteString(`{"TarWriter":null`) - } - if j.Buffer != nil { - /* Struct fall back. type=bufio.Writer kind=struct */ - buf.WriteString(`,"Buffer":`) - err = buf.Encode(j.Buffer) - if err != nil { - return err - } - } else { - buf.WriteString(`,"Buffer":null`) - } - /* Falling back. type=map[uint64]string kind=map */ - buf.WriteString(`,"SeenFiles":`) - err = buf.Encode(j.SeenFiles) - if err != nil { - return err - } - if j.IDMappings != nil { - /* Struct fall back. type=idtools.IDMappings kind=struct */ - buf.WriteString(`,"IDMappings":`) - err = buf.Encode(j.IDMappings) - if err != nil { - return err - } - } else { - buf.WriteString(`,"IDMappings":null`) - } - if j.ChownOpts != nil { - /* Struct fall back. type=idtools.IDPair kind=struct */ - buf.WriteString(`,"ChownOpts":`) - err = buf.Encode(j.ChownOpts) - if err != nil { - return err - } - } else { - buf.WriteString(`,"ChownOpts":null`) - } - buf.WriteString(`,"WhiteoutConverter":`) - /* Interface types must use runtime reflection. type=archive.TarWhiteoutConverter kind=interface */ - err = buf.Encode(j.WhiteoutConverter) - if err != nil { - return err - } - if j.CopyPass { - buf.WriteString(`,"CopyPass":true`) - } else { - buf.WriteString(`,"CopyPass":false`) - } - buf.WriteByte('}') - return nil -} - -const ( - ffjttarAppenderbase = iota - ffjttarAppendernosuchkey - - ffjttarAppenderTarWriter - - ffjttarAppenderBuffer - - ffjttarAppenderSeenFiles - - ffjttarAppenderIDMappings - - ffjttarAppenderChownOpts - - ffjttarAppenderWhiteoutConverter - - ffjttarAppenderCopyPass -) - -var ffjKeytarAppenderTarWriter = []byte("TarWriter") - -var ffjKeytarAppenderBuffer = []byte("Buffer") - -var ffjKeytarAppenderSeenFiles = []byte("SeenFiles") - -var ffjKeytarAppenderIDMappings = []byte("IDMappings") - -var ffjKeytarAppenderChownOpts = []byte("ChownOpts") - -var ffjKeytarAppenderWhiteoutConverter = []byte("WhiteoutConverter") - -var ffjKeytarAppenderCopyPass = []byte("CopyPass") - -// UnmarshalJSON umarshall json - template of ffjson -func (j *tarAppender) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *tarAppender) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjttarAppenderbase - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjttarAppendernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - - case 'B': - - if bytes.Equal(ffjKeytarAppenderBuffer, kn) { - currentKey = ffjttarAppenderBuffer - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'C': - - if bytes.Equal(ffjKeytarAppenderChownOpts, kn) { - currentKey = ffjttarAppenderChownOpts - state = fflib.FFParse_want_colon - goto mainparse - - } else if bytes.Equal(ffjKeytarAppenderCopyPass, kn) { - currentKey = ffjttarAppenderCopyPass - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'I': - - if bytes.Equal(ffjKeytarAppenderIDMappings, kn) { - currentKey = ffjttarAppenderIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'S': - - if bytes.Equal(ffjKeytarAppenderSeenFiles, kn) { - currentKey = ffjttarAppenderSeenFiles - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'T': - - if bytes.Equal(ffjKeytarAppenderTarWriter, kn) { - currentKey = ffjttarAppenderTarWriter - state = fflib.FFParse_want_colon - goto mainparse - } - - case 'W': - - if bytes.Equal(ffjKeytarAppenderWhiteoutConverter, kn) { - currentKey = ffjttarAppenderWhiteoutConverter - state = fflib.FFParse_want_colon - goto mainparse - } - - } - - if fflib.EqualFoldRight(ffjKeytarAppenderCopyPass, kn) { - currentKey = ffjttarAppenderCopyPass - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeytarAppenderWhiteoutConverter, kn) { - currentKey = ffjttarAppenderWhiteoutConverter - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeytarAppenderChownOpts, kn) { - currentKey = ffjttarAppenderChownOpts - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeytarAppenderIDMappings, kn) { - currentKey = ffjttarAppenderIDMappings - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.EqualFoldRight(ffjKeytarAppenderSeenFiles, kn) { - currentKey = ffjttarAppenderSeenFiles - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeytarAppenderBuffer, kn) { - currentKey = ffjttarAppenderBuffer - state = fflib.FFParse_want_colon - goto mainparse - } - - if fflib.SimpleLetterEqualFold(ffjKeytarAppenderTarWriter, kn) { - currentKey = ffjttarAppenderTarWriter - state = fflib.FFParse_want_colon - goto mainparse - } - - currentKey = ffjttarAppendernosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null { - switch currentKey { - - case ffjttarAppenderTarWriter: - goto handle_TarWriter - - case ffjttarAppenderBuffer: - goto handle_Buffer - - case ffjttarAppenderSeenFiles: - goto handle_SeenFiles - - case ffjttarAppenderIDMappings: - goto handle_IDMappings - - case ffjttarAppenderChownOpts: - goto handle_ChownOpts - - case ffjttarAppenderWhiteoutConverter: - goto handle_WhiteoutConverter - - case ffjttarAppenderCopyPass: - goto handle_CopyPass - - case ffjttarAppendernosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } - -handle_TarWriter: - - /* handler: j.TarWriter type=tar.Writer kind=struct quoted=false*/ - - { - /* Falling back. type=tar.Writer kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.TarWriter) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_Buffer: - - /* handler: j.Buffer type=bufio.Writer kind=struct quoted=false*/ - - { - /* Falling back. type=bufio.Writer kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.Buffer) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_SeenFiles: - - /* handler: j.SeenFiles type=map[uint64]string kind=map quoted=false*/ - - { - - { - if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok)) - } - } - - if tok == fflib.FFTok_null { - j.SeenFiles = nil - } else { - - j.SeenFiles = make(map[uint64]string, 0) - - wantVal := true - - for { - - var k uint64 - - var tmpJSeenFiles string - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - /* handler: k type=uint64 kind=uint64 quoted=false*/ - - { - if tok != fflib.FFTok_integer && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for uint64", tok)) - } - } - - { - - if tok == fflib.FFTok_null { - - } else { - - tval, err := fflib.ParseUint(fs.Output.Bytes(), 10, 64) - - if err != nil { - return fs.WrapErr(err) - } - - k = uint64(tval) - - } - } - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - /* handler: tmpJSeenFiles type=string kind=string quoted=false*/ - - { - - { - if tok != fflib.FFTok_string && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok)) - } - } - - if tok == fflib.FFTok_null { - - } else { - - outBuf := fs.Output.Bytes() - - tmpJSeenFiles = string(string(outBuf)) - - } - } - - j.SeenFiles[k] = tmpJSeenFiles - - wantVal = false - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_IDMappings: - - /* handler: j.IDMappings type=idtools.IDMappings kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDMappings kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.IDMappings) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_ChownOpts: - - /* handler: j.ChownOpts type=idtools.IDPair kind=struct quoted=false*/ - - { - /* Falling back. type=idtools.IDPair kind=struct */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.ChownOpts) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_WhiteoutConverter: - - /* handler: j.WhiteoutConverter type=archive.TarWhiteoutConverter kind=interface quoted=false*/ - - { - /* Falling back. type=archive.TarWhiteoutConverter kind=interface */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &j.WhiteoutConverter) - if err != nil { - return fs.WrapErr(err) - } - } - - state = fflib.FFParse_after_value - goto mainparse - -handle_CopyPass: - - /* handler: j.CopyPass type=bool kind=bool quoted=false*/ - - { - if tok != fflib.FFTok_bool && tok != fflib.FFTok_null { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for bool", tok)) - } - } - - { - if tok == fflib.FFTok_null { - - } else { - tmpb := fs.Output.Bytes() - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - - j.CopyPass = true - - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - - j.CopyPass = false - - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - } - } - - state = fflib.FFParse_after_value - goto mainparse - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: - - return nil -} diff --git a/tests/tools/go.mod b/tests/tools/go.mod index f45f7c0e8c..ddedb55956 100644 --- a/tests/tools/go.mod +++ b/tests/tools/go.mod @@ -4,6 +4,5 @@ go 1.13 require ( github.com/cpuguy83/go-md2man v1.0.10 - github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 github.com/vbatts/git-validation v1.0.0 ) diff --git a/tests/tools/go.sum b/tests/tools/go.sum index 9b9589e1a3..d7902fac38 100644 --- a/tests/tools/go.sum +++ b/tests/tools/go.sum @@ -8,8 +8,6 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGi github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 h1:gGBSHPOU7g8YjTbhwn+lvFm2VDEhhA+PwDIlstkgSxE= -github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7/go.mod h1:YARuvh7BUWHNhzDq2OM5tzR2RiCcN2D7sapiKyCel/M= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k= diff --git a/tests/tools/tools.go b/tests/tools/tools.go index b707fdb7de..fe01b9c53b 100644 --- a/tests/tools/tools.go +++ b/tests/tools/tools.go @@ -7,6 +7,5 @@ package tools import ( _ "github.com/cpuguy83/go-md2man" - _ "github.com/pquerna/ffjson" _ "github.com/vbatts/git-validation" ) diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/.gitignore b/tests/tools/vendor/github.com/pquerna/ffjson/.gitignore deleted file mode 100644 index 2b62c96e52..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -tests/go.stripe/ff/customer_ffjson.go -tests/goser/ff/goser_ffjson.go -tests/types/ff/everything_ffjson.go -tests/number/ff/number_ffjson.go -tests/ff_ffjson.go diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/.travis.yml b/tests/tools/vendor/github.com/pquerna/ffjson/.travis.yml deleted file mode 100644 index 7ddac450cf..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: go - -install: - - A=${PWD#*github.com/};A=${A%/ffjson};cd ../..;mv $A pquerna;cd pquerna/ffjson - - go get -d -v -t ./... - -script: make clean && make lint && make test && make test - -go: - - "1.10.x" - - "1.11.x" - -env: - - GO15VENDOREXPERIMENT=1 diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/LICENSE b/tests/tools/vendor/github.com/pquerna/ffjson/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/Makefile b/tests/tools/vendor/github.com/pquerna/ffjson/Makefile deleted file mode 100644 index 88e68da69f..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/Makefile +++ /dev/null @@ -1,49 +0,0 @@ - -all: test install - @echo "Done" - -install: - go install github.com/pquerna/ffjson - -deps: - -fmt: - go fmt github.com/pquerna/ffjson/... - -cov: - # TODO: cleanup this make target. - mkdir -p coverage - rm -f coverage/*.html - # gocov test github.com/pquerna/ffjson/generator | gocov-html > coverage/generator.html - # gocov test github.com/pquerna/ffjson/inception | gocov-html > coverage/inception.html - gocov test github.com/pquerna/ffjson/fflib/v1 | gocov-html > coverage/fflib.html - @echo "coverage written" - -test-core: - go test -v github.com/pquerna/ffjson/fflib/v1 github.com/pquerna/ffjson/generator github.com/pquerna/ffjson/inception - -test: ffize test-core - go test -v github.com/pquerna/ffjson/tests/... - -ffize: install - ffjson -force-regenerate tests/ff.go - ffjson -force-regenerate tests/goser/ff/goser.go - ffjson -force-regenerate tests/go.stripe/ff/customer.go - ffjson -force-regenerate -reset-fields tests/types/ff/everything.go - ffjson -force-regenerate tests/number/ff/number.go - -lint: ffize - go get github.com/golang/lint/golint - golint --set_exit_status tests/... - -bench: ffize all - go test -v -benchmem -bench MarshalJSON github.com/pquerna/ffjson/tests - go test -v -benchmem -bench MarshalJSON github.com/pquerna/ffjson/tests/goser github.com/pquerna/ffjson/tests/go.stripe - go test -v -benchmem -bench UnmarshalJSON github.com/pquerna/ffjson/tests/goser github.com/pquerna/ffjson/tests/go.stripe - -clean: - go clean -i github.com/pquerna/ffjson/... - find . -name '*_ffjson.go' -delete - find . -name 'ffjson-inception*' -delete - -.PHONY: deps clean test fmt install all diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/NOTICE b/tests/tools/vendor/github.com/pquerna/ffjson/NOTICE deleted file mode 100644 index 405a49618b..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/NOTICE +++ /dev/null @@ -1,8 +0,0 @@ -ffjson -Copyright (c) 2014, Paul Querna - -This product includes software developed by -Paul Querna (http://paul.querna.org/). - -Portions of this software were developed as -part of Go, Copyright (c) 2012 The Go Authors. \ No newline at end of file diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/README.md b/tests/tools/vendor/github.com/pquerna/ffjson/README.md deleted file mode 100644 index 30b239f104..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/README.md +++ /dev/null @@ -1,232 +0,0 @@ -# ffjson: faster JSON for Go - -[![Build Status](https://travis-ci.org/pquerna/ffjson.svg?branch=master)](https://travis-ci.org/pquerna/ffjson) - -`ffjson` generates static `MarshalJSON` and `UnmarshalJSON` functions for structures in Go. The generated functions reduce the reliance upon runtime reflection to do serialization and are generally 2 to 3 times faster. In cases where `ffjson` doesn't understand a Type involved, it falls back to `encoding/json`, meaning it is a safe drop in replacement. By using `ffjson` your JSON serialization just gets faster with no additional code changes. - -When you change your `struct`, you will need to run `ffjson` again (or make it part of your build tools). - -## Blog Posts - -* 2014-03-31: [First Release and Background](https://journal.paul.querna.org/articles/2014/03/31/ffjson-faster-json-in-go/) - -## Getting Started - -If `myfile.go` contains the `struct` types you would like to be faster, and assuming `GOPATH` is set to a reasonable value for an existing project (meaning that in this particular example if `myfile.go` is in the `myproject` directory, the project should be under `$GOPATH/src/myproject`), you can just run: - - go get -u github.com/pquerna/ffjson - ffjson myfile.go - git add myfile_ffjson.go - - -## Performance Status: - -* `MarshalJSON` is **2x to 3x** faster than `encoding/json`. -* `UnmarshalJSON` is **2x to 3x** faster than `encoding/json`. - -## Features - -* **Unmarshal Support:** Since v0.9, `ffjson` supports Unmarshaling of structures. -* **Drop in Replacement:** Because `ffjson` implements the interfaces already defined by `encoding/json` the performance enhancements are transparent to users of your structures. -* **Supports all types:** `ffjson` has native support for most of Go's types -- for any type it doesn't support with fast paths, it falls back to using `encoding/json`. This means all structures should work out of the box. If they don't, [open a issue!](https://github.com/pquerna/ffjson/issues) -* **ffjson: skip**: If you have a structure you want `ffjson` to ignore, add `ffjson: skip` to the doc string for this structure. -* **Extensive Tests:** `ffjson` contains an extensive test suite including fuzz'ing against the JSON parser. - - -# Using ffjson - -`ffjson` generates code based upon existing `struct` types. For example, `ffjson foo.go` will by default create a new file `foo_ffjson.go` that contains serialization functions for all structs found in `foo.go`. - -``` -Usage of ffjson: - - ffjson [options] [input_file] - -ffjson generates Go code for optimized JSON serialization. - - -go-cmd="": Path to go command; Useful for `goapp` support. - -import-name="": Override import name in case it cannot be detected. - -nodecoder: Do not generate decoder functions - -noencoder: Do not generate encoder functions - -w="": Write generate code to this path instead of ${input}_ffjson.go. -``` - -Your code must be in a compilable state for `ffjson` to work. If you code doesn't compile ffjson will most likely exit with an error. - -## Disabling code generation for structs - -You might not want all your structs to have JSON code generated. To completely disable generation for a struct, add `ffjson: skip` to the struct comment. For example: - -```Go -// ffjson: skip -type Foo struct { - Bar string -} -``` - -You can also choose not to have either the decoder or encoder generated by including `ffjson: nodecoder` or `ffjson: noencoder` in your comment. For instance, this will only generate the encoder (marshal) part for this struct: - -```Go -// ffjson: nodecoder -type Foo struct { - Bar string -} -``` - -You can also disable encoders/decoders entirely for a file by using the `-noencoder`/`-nodecoder` commandline flags. - -## Using ffjson with `go generate` - -`ffjson` is a great fit with `go generate`. It allows you to specify the ffjson command inside your individual go files and run them all at once. This way you don't have to maintain a separate build file with the files you need to generate. - -Add this comment anywhere inside your go files: - -```Go -//go:generate ffjson $GOFILE -``` - -To re-generate ffjson for all files with the tag in a folder, simply execute: - -```sh -go generate -``` - -To generate for the current package and all sub-packages, use: - -```sh -go generate ./... -``` -This is most of what you need to know about go generate, but you can sese more about [go generate on the golang blog](http://blog.golang.org/generate). - -## Should I include ffjson files in VCS? - -That question is really up to you. If you don't, you will have a more complex build process. If you do, you have to keep the generated files updated if you change the content of your structs. - -That said, ffjson operates deterministically, so it will generate the same code every time it run, so unless your code changes, the generated content should not change. Note however that this is only true if you are using the same ffjson version, so if you have several people working on a project, you might need to synchronize your ffjson version. - -## Performance pitfalls - -`ffjson` has a few cases where it will fall back to using the runtime encoder/decoder. Notable cases are: - -* Interface struct members. Since it isn't possible to know the type of these types before runtime, ffjson has to use the reflect based coder. -* Structs with custom marshal/unmarshal. -* Map with a complex value. Simple types like `map[string]int` is fine though. -* Inline struct definitions `type A struct{B struct{ X int} }` are handled by the encoder, but currently has fallback in the decoder. -* Slices of slices / slices of maps are currently falling back when generating the decoder. - -## Reducing Garbage Collection - -`ffjson` already does a lot to help garbage generation. However whenever you go through the json.Marshal you get a new byte slice back. On very high throughput servers this can lead to increased GC pressure. - -### Tip 1: Use ffjson.Marshal() / ffjson.Unmarshal() - -This is probably the easiest optimization for you. Instead of going through encoding/json, you can call ffjson. This will disable the checks that encoding/json does to the json when it receives it from struct functions. - -```Go - import "github.com/pquerna/ffjson/ffjson" - - // BEFORE: - buf, err := json.Marshal(&item) - - // AFTER: - buf, err := ffjson.Marshal(&item) -``` -This simple change is likely to double the speed of your encoding/decoding. - - -[![GoDoc][1]][2] -[1]: https://godoc.org/github.com/pquerna/ffjson/ffjson?status.svg -[2]: https://godoc.org/github.com/pquerna/ffjson/ffjson#Marshal - -### Tip 2: Pooling the buffer - -On servers where you have a lot of concurrent encoding going on, you can hand back the byte buffer you get from json.Marshal once you are done using it. An example could look like this: -```Go -import "github.com/pquerna/ffjson/ffjson" - -func Encode(item interface{}, out io.Writer) { - // Encode - buf, err := ffjson.Marshal(&item) - - // Write the buffer - _,_ = out.Write(buf) - - // We are now no longer need the buffer so we pool it. - ffjson.Pool(buf) -} -``` -Note that the buffers you put back in the pool can still be reclaimed by the garbage collector, so you wont risk your program building up a big memory use by pooling the buffers. - -[![GoDoc][1]][2] -[1]: https://godoc.org/github.com/pquerna/ffjson/ffjson?status.svg -[2]: https://godoc.org/github.com/pquerna/ffjson/ffjson#Pool - -### Tip 3: Creating an Encoder - -There might be cases where you need to encode many objects at once. This could be a server backing up, writing a lot of entries to files, etc. - -To do this, there is an interface similar to `encoding/json`, that allow you to create a re-usable encoder. Here is an example where we want to encode an array of the `Item` type, with a comma between entries: -```Go -import "github.com/pquerna/ffjson/ffjson" - -func EncodeItems(items []Item, out io.Writer) { - // We create an encoder. - enc := ffjson.NewEncoder(out) - - for i, item := range items { - // Encode into the buffer - err := enc.Encode(&item) - - // If err is nil, the content is written to out, so we can write to it as well. - if i != len(items) -1 { - _,_ = out.Write([]byte{','}) - } - } -} -``` - - -Documentation: [![GoDoc][1]][2] -[1]: https://godoc.org/github.com/pquerna/ffjson/ffjson?status.svg -[2]: https://godoc.org/github.com/pquerna/ffjson/ffjson#Encoder - -## Tip 4: Avoid interfaces - -We don't want to dictate how you structure your data, but having interfaces in your code will make ffjson use the golang encoder for these. When ffjson has to do this, it may even become slower than using `json.Marshal` directly. - -To see where that happens, search the generated `_ffjson.go` file for the text `Falling back`, which will indicate where ffjson is unable to generate code for your data structure. - -## Tip 5: `ffjson` all the things! - -You should not only create ffjson code for your main struct, but also any structs that is included/used in your json code. - -So if your struct looks like this: -```Go -type Foo struct { - V Bar -} -``` -You should also make sure that code is generated for `Bar` if it is placed in another file. Also note that currently it requires you to do this in order, since generating code for `Foo` will check if code for `Bar` exists. This is only an issue if `Foo` and `Bar` are placed in different files. We are currently working on allowing simultaneous generation of an entire package. - - -## Improvements, bugs, adding features, and taking ffjson new directions! - -Please [open issues in Github](https://github.com/pquerna/ffjson/issues) for ideas, bugs, and general thoughts. Pull requests are of course preferred :) - -## Similar projects - -* [go-codec](https://github.com/ugorji/go/tree/master/codec#readme). Very good project, that also allows streaming en/decoding, but requires you to call the library to use. -* [megajson](https://github.com/benbjohnson/megajson). This has limited support, and development seems to have almost stopped at the time of writing. - -# Credits - -`ffjson` has recieved significant contributions from: - -* [Klaus Post](https://github.com/klauspost) -* [Paul Querna](https://github.com/pquerna) -* [Erik Dubbelboer](https://github.com/erikdubbelboer) - -## License - -`ffjson` is licensed under the [Apache License, Version 2.0](./LICENSE) - diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/ffjson.go b/tests/tools/vendor/github.com/pquerna/ffjson/ffjson.go deleted file mode 100644 index 656a9c3ef0..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/ffjson.go +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package main - -import ( - _ "github.com/pquerna/ffjson/fflib/v1" - "github.com/pquerna/ffjson/generator" - _ "github.com/pquerna/ffjson/inception" - - "flag" - "fmt" - "os" - "path/filepath" - "regexp" -) - -var outputPathFlag = flag.String("w", "", "Write generate code to this path instead of ${input}_ffjson.go.") -var goCmdFlag = flag.String("go-cmd", "", "Path to go command; Useful for `goapp` support.") -var importNameFlag = flag.String("import-name", "", "Override import name in case it cannot be detected.") -var forceRegenerateFlag = flag.Bool("force-regenerate", false, "Regenerate every input file, without checking modification date.") -var resetFields = flag.Bool("reset-fields", false, "When unmarshalling reset all fields missing in the JSON") - -func usage() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n\n", os.Args[0]) - fmt.Fprintf(os.Stderr, "\t%s [options] [input_file]\n\n", os.Args[0]) - fmt.Fprintf(os.Stderr, "%s generates Go code for optimized JSON serialization.\n\n", os.Args[0]) - flag.PrintDefaults() - os.Exit(1) -} - -var extRe = regexp.MustCompile(`(.*)(\.go)$`) - -func main() { - flag.Parse() - extra := flag.Args() - - if len(extra) != 1 { - usage() - } - - inputPath := filepath.ToSlash(extra[0]) - - var outputPath string - if outputPathFlag == nil || *outputPathFlag == "" { - outputPath = extRe.ReplaceAllString(inputPath, "${1}_ffjson.go") - } else { - outputPath = *outputPathFlag - } - - var goCmd string - if goCmdFlag == nil || *goCmdFlag == "" { - goCmd = "go" - } else { - goCmd = *goCmdFlag - } - - var importName string - if importNameFlag != nil && *importNameFlag != "" { - importName = *importNameFlag - } - - err := generator.GenerateFiles(goCmd, inputPath, outputPath, importName, *forceRegenerateFlag, *resetFields) - - if err != nil { - fmt.Fprintf(os.Stderr, "Error: %s:\n\n", err) - os.Exit(1) - } - - println(outputPath) -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go deleted file mode 100644 index 7f63a8582d..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go +++ /dev/null @@ -1,421 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -// Simple byte buffer for marshaling data. - -import ( - "bytes" - "encoding/json" - "errors" - "io" - "unicode/utf8" -) - -type grower interface { - Grow(n int) -} - -type truncater interface { - Truncate(n int) - Reset() -} - -type bytesReader interface { - Bytes() []byte - String() string -} - -type runeWriter interface { - WriteRune(r rune) (n int, err error) -} - -type stringWriter interface { - WriteString(s string) (n int, err error) -} - -type lener interface { - Len() int -} - -type rewinder interface { - Rewind(n int) (err error) -} - -type encoder interface { - Encode(interface{}) error -} - -// TODO(pquerna): continue to reduce these interfaces - -type EncodingBuffer interface { - io.Writer - io.WriterTo - io.ByteWriter - stringWriter - truncater - grower - rewinder - encoder -} - -type DecodingBuffer interface { - io.ReadWriter - io.ByteWriter - stringWriter - runeWriter - truncater - grower - bytesReader - lener -} - -// A Buffer is a variable-sized buffer of bytes with Read and Write methods. -// The zero value for Buffer is an empty buffer ready to use. -type Buffer struct { - buf []byte // contents are the bytes buf[off : len(buf)] - off int // read at &buf[off], write at &buf[len(buf)] - runeBytes [utf8.UTFMax]byte // avoid allocation of slice on each WriteByte or Rune - encoder *json.Encoder - skipTrailingByte bool -} - -// ErrTooLarge is passed to panic if memory cannot be allocated to store data in a buffer. -var ErrTooLarge = errors.New("fflib.v1.Buffer: too large") - -// Bytes returns a slice of the contents of the unread portion of the buffer; -// len(b.Bytes()) == b.Len(). If the caller changes the contents of the -// returned slice, the contents of the buffer will change provided there -// are no intervening method calls on the Buffer. -func (b *Buffer) Bytes() []byte { return b.buf[b.off:] } - -// String returns the contents of the unread portion of the buffer -// as a string. If the Buffer is a nil pointer, it returns "". -func (b *Buffer) String() string { - if b == nil { - // Special case, useful in debugging. - return "" - } - return string(b.buf[b.off:]) -} - -// Len returns the number of bytes of the unread portion of the buffer; -// b.Len() == len(b.Bytes()). -func (b *Buffer) Len() int { return len(b.buf) - b.off } - -// Truncate discards all but the first n unread bytes from the buffer. -// It panics if n is negative or greater than the length of the buffer. -func (b *Buffer) Truncate(n int) { - if n == 0 { - b.off = 0 - b.buf = b.buf[0:0] - } else { - b.buf = b.buf[0 : b.off+n] - } -} - -// Reset resets the buffer so it has no content. -// b.Reset() is the same as b.Truncate(0). -func (b *Buffer) Reset() { b.Truncate(0) } - -// grow grows the buffer to guarantee space for n more bytes. -// It returns the index where bytes should be written. -// If the buffer can't grow it will panic with ErrTooLarge. -func (b *Buffer) grow(n int) int { - // If we have no buffer, get one from the pool - m := b.Len() - if m == 0 { - if b.buf == nil { - b.buf = makeSlice(2 * n) - b.off = 0 - } else if b.off != 0 { - // If buffer is empty, reset to recover space. - b.Truncate(0) - } - } - if len(b.buf)+n > cap(b.buf) { - var buf []byte - if m+n <= cap(b.buf)/2 { - // We can slide things down instead of allocating a new - // slice. We only need m+n <= cap(b.buf) to slide, but - // we instead let capacity get twice as large so we - // don't spend all our time copying. - copy(b.buf[:], b.buf[b.off:]) - buf = b.buf[:m] - } else { - // not enough space anywhere - buf = makeSlice(2*cap(b.buf) + n) - copy(buf, b.buf[b.off:]) - Pool(b.buf) - b.buf = buf - } - b.off = 0 - } - b.buf = b.buf[0 : b.off+m+n] - return b.off + m -} - -// Grow grows the buffer's capacity, if necessary, to guarantee space for -// another n bytes. After Grow(n), at least n bytes can be written to the -// buffer without another allocation. -// If n is negative, Grow will panic. -// If the buffer can't grow it will panic with ErrTooLarge. -func (b *Buffer) Grow(n int) { - if n < 0 { - panic("bytes.Buffer.Grow: negative count") - } - m := b.grow(n) - b.buf = b.buf[0:m] -} - -// Write appends the contents of p to the buffer, growing the buffer as -// needed. The return value n is the length of p; err is always nil. If the -// buffer becomes too large, Write will panic with ErrTooLarge. -func (b *Buffer) Write(p []byte) (n int, err error) { - if b.skipTrailingByte { - p = p[:len(p)-1] - } - m := b.grow(len(p)) - return copy(b.buf[m:], p), nil -} - -// WriteString appends the contents of s to the buffer, growing the buffer as -// needed. The return value n is the length of s; err is always nil. If the -// buffer becomes too large, WriteString will panic with ErrTooLarge. -func (b *Buffer) WriteString(s string) (n int, err error) { - m := b.grow(len(s)) - return copy(b.buf[m:], s), nil -} - -// MinRead is the minimum slice size passed to a Read call by -// Buffer.ReadFrom. As long as the Buffer has at least MinRead bytes beyond -// what is required to hold the contents of r, ReadFrom will not grow the -// underlying buffer. -const minRead = 512 - -// ReadFrom reads data from r until EOF and appends it to the buffer, growing -// the buffer as needed. The return value n is the number of bytes read. Any -// error except io.EOF encountered during the read is also returned. If the -// buffer becomes too large, ReadFrom will panic with ErrTooLarge. -func (b *Buffer) ReadFrom(r io.Reader) (n int64, err error) { - // If buffer is empty, reset to recover space. - if b.off >= len(b.buf) { - b.Truncate(0) - } - for { - if free := cap(b.buf) - len(b.buf); free < minRead { - // not enough space at end - newBuf := b.buf - if b.off+free < minRead { - // not enough space using beginning of buffer; - // double buffer capacity - newBuf = makeSlice(2*cap(b.buf) + minRead) - } - copy(newBuf, b.buf[b.off:]) - Pool(b.buf) - b.buf = newBuf[:len(b.buf)-b.off] - b.off = 0 - } - m, e := r.Read(b.buf[len(b.buf):cap(b.buf)]) - b.buf = b.buf[0 : len(b.buf)+m] - n += int64(m) - if e == io.EOF { - break - } - if e != nil { - return n, e - } - } - return n, nil // err is EOF, so return nil explicitly -} - -// WriteTo writes data to w until the buffer is drained or an error occurs. -// The return value n is the number of bytes written; it always fits into an -// int, but it is int64 to match the io.WriterTo interface. Any error -// encountered during the write is also returned. -func (b *Buffer) WriteTo(w io.Writer) (n int64, err error) { - if b.off < len(b.buf) { - nBytes := b.Len() - m, e := w.Write(b.buf[b.off:]) - if m > nBytes { - panic("bytes.Buffer.WriteTo: invalid Write count") - } - b.off += m - n = int64(m) - if e != nil { - return n, e - } - // all bytes should have been written, by definition of - // Write method in io.Writer - if m != nBytes { - return n, io.ErrShortWrite - } - } - // Buffer is now empty; reset. - b.Truncate(0) - return -} - -// WriteByte appends the byte c to the buffer, growing the buffer as needed. -// The returned error is always nil, but is included to match bufio.Writer's -// WriteByte. If the buffer becomes too large, WriteByte will panic with -// ErrTooLarge. -func (b *Buffer) WriteByte(c byte) error { - m := b.grow(1) - b.buf[m] = c - return nil -} - -func (b *Buffer) Rewind(n int) error { - b.buf = b.buf[:len(b.buf)-n] - return nil -} - -func (b *Buffer) Encode(v interface{}) error { - if b.encoder == nil { - b.encoder = json.NewEncoder(b) - } - b.skipTrailingByte = true - err := b.encoder.Encode(v) - b.skipTrailingByte = false - return err -} - -// WriteRune appends the UTF-8 encoding of Unicode code point r to the -// buffer, returning its length and an error, which is always nil but is -// included to match bufio.Writer's WriteRune. The buffer is grown as needed; -// if it becomes too large, WriteRune will panic with ErrTooLarge. -func (b *Buffer) WriteRune(r rune) (n int, err error) { - if r < utf8.RuneSelf { - b.WriteByte(byte(r)) - return 1, nil - } - n = utf8.EncodeRune(b.runeBytes[0:], r) - b.Write(b.runeBytes[0:n]) - return n, nil -} - -// Read reads the next len(p) bytes from the buffer or until the buffer -// is drained. The return value n is the number of bytes read. If the -// buffer has no data to return, err is io.EOF (unless len(p) is zero); -// otherwise it is nil. -func (b *Buffer) Read(p []byte) (n int, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - if len(p) == 0 { - return - } - return 0, io.EOF - } - n = copy(p, b.buf[b.off:]) - b.off += n - return -} - -// Next returns a slice containing the next n bytes from the buffer, -// advancing the buffer as if the bytes had been returned by Read. -// If there are fewer than n bytes in the buffer, Next returns the entire buffer. -// The slice is only valid until the next call to a read or write method. -func (b *Buffer) Next(n int) []byte { - m := b.Len() - if n > m { - n = m - } - data := b.buf[b.off : b.off+n] - b.off += n - return data -} - -// ReadByte reads and returns the next byte from the buffer. -// If no byte is available, it returns error io.EOF. -func (b *Buffer) ReadByte() (c byte, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - return 0, io.EOF - } - c = b.buf[b.off] - b.off++ - return c, nil -} - -// ReadRune reads and returns the next UTF-8-encoded -// Unicode code point from the buffer. -// If no bytes are available, the error returned is io.EOF. -// If the bytes are an erroneous UTF-8 encoding, it -// consumes one byte and returns U+FFFD, 1. -func (b *Buffer) ReadRune() (r rune, size int, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - return 0, 0, io.EOF - } - c := b.buf[b.off] - if c < utf8.RuneSelf { - b.off++ - return rune(c), 1, nil - } - r, n := utf8.DecodeRune(b.buf[b.off:]) - b.off += n - return r, n, nil -} - -// ReadBytes reads until the first occurrence of delim in the input, -// returning a slice containing the data up to and including the delimiter. -// If ReadBytes encounters an error before finding a delimiter, -// it returns the data read before the error and the error itself (often io.EOF). -// ReadBytes returns err != nil if and only if the returned data does not end in -// delim. -func (b *Buffer) ReadBytes(delim byte) (line []byte, err error) { - slice, err := b.readSlice(delim) - // return a copy of slice. The buffer's backing array may - // be overwritten by later calls. - line = append(line, slice...) - return -} - -// readSlice is like ReadBytes but returns a reference to internal buffer data. -func (b *Buffer) readSlice(delim byte) (line []byte, err error) { - i := bytes.IndexByte(b.buf[b.off:], delim) - end := b.off + i + 1 - if i < 0 { - end = len(b.buf) - err = io.EOF - } - line = b.buf[b.off:end] - b.off = end - return line, err -} - -// ReadString reads until the first occurrence of delim in the input, -// returning a string containing the data up to and including the delimiter. -// If ReadString encounters an error before finding a delimiter, -// it returns the data read before the error and the error itself (often io.EOF). -// ReadString returns err != nil if and only if the returned data does not end -// in delim. -func (b *Buffer) ReadString(delim byte) (line string, err error) { - slice, err := b.readSlice(delim) - return string(slice), err -} - -// NewBuffer creates and initializes a new Buffer using buf as its initial -// contents. It is intended to prepare a Buffer to read existing data. It -// can also be used to size the internal buffer for writing. To do that, -// buf should have the desired capacity but a length of zero. -// -// In most cases, new(Buffer) (or just declaring a Buffer variable) is -// sufficient to initialize a Buffer. -func NewBuffer(buf []byte) *Buffer { return &Buffer{buf: buf} } - -// NewBufferString creates and initializes a new Buffer using string s as its -// initial contents. It is intended to prepare a buffer to read an existing -// string. -// -// In most cases, new(Buffer) (or just declaring a Buffer variable) is -// sufficient to initialize a Buffer. -func NewBufferString(s string) *Buffer { - return &Buffer{buf: []byte(s)} -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go deleted file mode 100644 index b84af6ff96..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build !go1.3 - -package v1 - -// Stub version of buffer_pool.go for Go 1.2, which doesn't have sync.Pool. - -func Pool(b []byte) {} - -func makeSlice(n int) []byte { - return make([]byte, n) -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go deleted file mode 100644 index a021c57cf4..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.3 - -package v1 - -// Allocation pools for Buffers. - -import "sync" - -var pools [14]sync.Pool -var pool64 *sync.Pool - -func init() { - var i uint - // TODO(pquerna): add science here around actual pool sizes. - for i = 6; i < 20; i++ { - n := 1 << i - pools[poolNum(n)].New = func() interface{} { return make([]byte, 0, n) } - } - pool64 = &pools[0] -} - -// This returns the pool number that will give a buffer of -// at least 'i' bytes. -func poolNum(i int) int { - // TODO(pquerna): convert to log2 w/ bsr asm instruction: - // - if i <= 64 { - return 0 - } else if i <= 128 { - return 1 - } else if i <= 256 { - return 2 - } else if i <= 512 { - return 3 - } else if i <= 1024 { - return 4 - } else if i <= 2048 { - return 5 - } else if i <= 4096 { - return 6 - } else if i <= 8192 { - return 7 - } else if i <= 16384 { - return 8 - } else if i <= 32768 { - return 9 - } else if i <= 65536 { - return 10 - } else if i <= 131072 { - return 11 - } else if i <= 262144 { - return 12 - } else if i <= 524288 { - return 13 - } else { - return -1 - } -} - -// Send a buffer to the Pool to reuse for other instances. -// You may no longer utilize the content of the buffer, since it may be used -// by other goroutines. -func Pool(b []byte) { - if b == nil { - return - } - c := cap(b) - - // Our smallest buffer is 64 bytes, so we discard smaller buffers. - if c < 64 { - return - } - - // We need to put the incoming buffer into the NEXT buffer, - // since a buffer guarantees AT LEAST the number of bytes available - // that is the top of this buffer. - // That is the reason for dividing the cap by 2, so it gets into the NEXT bucket. - // We add 2 to avoid rounding down if size is exactly power of 2. - pn := poolNum((c + 2) >> 1) - if pn != -1 { - pools[pn].Put(b[0:0]) - } - // if we didn't have a slot for this []byte, we just drop it and let the GC - // take care of it. -} - -// makeSlice allocates a slice of size n -- it will attempt to use a pool'ed -// instance whenever possible. -func makeSlice(n int) []byte { - if n <= 64 { - return pool64.Get().([]byte)[0:n] - } - - pn := poolNum(n) - - if pn != -1 { - return pools[pn].Get().([]byte)[0:n] - } else { - return make([]byte, n) - } -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go deleted file mode 100644 index 08477409ac..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go +++ /dev/null @@ -1,88 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/iota.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "github.com/pquerna/ffjson/fflib/v1/internal" -) - -func ParseFloat(s []byte, bitSize int) (f float64, err error) { - return internal.ParseFloat(s, bitSize) -} - -// ParseUint is like ParseInt but for unsigned numbers, and oeprating on []byte -func ParseUint(s []byte, base int, bitSize int) (n uint64, err error) { - if len(s) == 1 { - switch s[0] { - case '0': - return 0, nil - case '1': - return 1, nil - case '2': - return 2, nil - case '3': - return 3, nil - case '4': - return 4, nil - case '5': - return 5, nil - case '6': - return 6, nil - case '7': - return 7, nil - case '8': - return 8, nil - case '9': - return 9, nil - } - } - return internal.ParseUint(s, base, bitSize) -} - -func ParseInt(s []byte, base int, bitSize int) (i int64, err error) { - if len(s) == 1 { - switch s[0] { - case '0': - return 0, nil - case '1': - return 1, nil - case '2': - return 2, nil - case '3': - return 3, nil - case '4': - return 4, nil - case '5': - return 5, nil - case '6': - return 6, nil - case '7': - return 7, nil - case '8': - return 8, nil - case '9': - return 9, nil - } - } - return internal.ParseInt(s, base, bitSize) -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go deleted file mode 100644 index 069df7a02a..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go +++ /dev/null @@ -1,378 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Multiprecision decimal numbers. -// For floating-point formatting only; not general purpose. -// Only operations are assign and (binary) left/right shift. -// Can do binary floating point in multiprecision decimal precisely -// because 2 divides 10; cannot do decimal floating point -// in multiprecision binary precisely. - -package v1 - -type decimal struct { - d [800]byte // digits - nd int // number of digits used - dp int // decimal point - neg bool - trunc bool // discarded nonzero digits beyond d[:nd] -} - -func (a *decimal) String() string { - n := 10 + a.nd - if a.dp > 0 { - n += a.dp - } - if a.dp < 0 { - n += -a.dp - } - - buf := make([]byte, n) - w := 0 - switch { - case a.nd == 0: - return "0" - - case a.dp <= 0: - // zeros fill space between decimal point and digits - buf[w] = '0' - w++ - buf[w] = '.' - w++ - w += digitZero(buf[w : w+-a.dp]) - w += copy(buf[w:], a.d[0:a.nd]) - - case a.dp < a.nd: - // decimal point in middle of digits - w += copy(buf[w:], a.d[0:a.dp]) - buf[w] = '.' - w++ - w += copy(buf[w:], a.d[a.dp:a.nd]) - - default: - // zeros fill space between digits and decimal point - w += copy(buf[w:], a.d[0:a.nd]) - w += digitZero(buf[w : w+a.dp-a.nd]) - } - return string(buf[0:w]) -} - -func digitZero(dst []byte) int { - for i := range dst { - dst[i] = '0' - } - return len(dst) -} - -// trim trailing zeros from number. -// (They are meaningless; the decimal point is tracked -// independent of the number of digits.) -func trim(a *decimal) { - for a.nd > 0 && a.d[a.nd-1] == '0' { - a.nd-- - } - if a.nd == 0 { - a.dp = 0 - } -} - -// Assign v to a. -func (a *decimal) Assign(v uint64) { - var buf [24]byte - - // Write reversed decimal in buf. - n := 0 - for v > 0 { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n++ - v = v1 - } - - // Reverse again to produce forward decimal in a.d. - a.nd = 0 - for n--; n >= 0; n-- { - a.d[a.nd] = buf[n] - a.nd++ - } - a.dp = a.nd - trim(a) -} - -// Maximum shift that we can do in one pass without overflow. -// Signed int has 31 bits, and we have to be able to accommodate 9<>k == 0; r++ { - if r >= a.nd { - if n == 0 { - // a == 0; shouldn't get here, but handle anyway. - a.nd = 0 - return - } - for n>>k == 0 { - n = n * 10 - r++ - } - break - } - c := int(a.d[r]) - n = n*10 + c - '0' - } - a.dp -= r - 1 - - // Pick up a digit, put down a digit. - for ; r < a.nd; r++ { - c := int(a.d[r]) - dig := n >> k - n -= dig << k - a.d[w] = byte(dig + '0') - w++ - n = n*10 + c - '0' - } - - // Put down extra digits. - for n > 0 { - dig := n >> k - n -= dig << k - if w < len(a.d) { - a.d[w] = byte(dig + '0') - w++ - } else if dig > 0 { - a.trunc = true - } - n = n * 10 - } - - a.nd = w - trim(a) -} - -// Cheat sheet for left shift: table indexed by shift count giving -// number of new digits that will be introduced by that shift. -// -// For example, leftcheats[4] = {2, "625"}. That means that -// if we are shifting by 4 (multiplying by 16), it will add 2 digits -// when the string prefix is "625" through "999", and one fewer digit -// if the string prefix is "000" through "624". -// -// Credit for this trick goes to Ken. - -type leftCheat struct { - delta int // number of new digits - cutoff string // minus one digit if original < a. -} - -var leftcheats = []leftCheat{ - // Leading digits of 1/2^i = 5^i. - // 5^23 is not an exact 64-bit floating point number, - // so have to use bc for the math. - /* - seq 27 | sed 's/^/5^/' | bc | - awk 'BEGIN{ print "\tleftCheat{ 0, \"\" }," } - { - log2 = log(2)/log(10) - printf("\tleftCheat{ %d, \"%s\" },\t// * %d\n", - int(log2*NR+1), $0, 2**NR) - }' - */ - {0, ""}, - {1, "5"}, // * 2 - {1, "25"}, // * 4 - {1, "125"}, // * 8 - {2, "625"}, // * 16 - {2, "3125"}, // * 32 - {2, "15625"}, // * 64 - {3, "78125"}, // * 128 - {3, "390625"}, // * 256 - {3, "1953125"}, // * 512 - {4, "9765625"}, // * 1024 - {4, "48828125"}, // * 2048 - {4, "244140625"}, // * 4096 - {4, "1220703125"}, // * 8192 - {5, "6103515625"}, // * 16384 - {5, "30517578125"}, // * 32768 - {5, "152587890625"}, // * 65536 - {6, "762939453125"}, // * 131072 - {6, "3814697265625"}, // * 262144 - {6, "19073486328125"}, // * 524288 - {7, "95367431640625"}, // * 1048576 - {7, "476837158203125"}, // * 2097152 - {7, "2384185791015625"}, // * 4194304 - {7, "11920928955078125"}, // * 8388608 - {8, "59604644775390625"}, // * 16777216 - {8, "298023223876953125"}, // * 33554432 - {8, "1490116119384765625"}, // * 67108864 - {9, "7450580596923828125"}, // * 134217728 -} - -// Is the leading prefix of b lexicographically less than s? -func prefixIsLessThan(b []byte, s string) bool { - for i := 0; i < len(s); i++ { - if i >= len(b) { - return true - } - if b[i] != s[i] { - return b[i] < s[i] - } - } - return false -} - -// Binary shift left (/ 2) by k bits. k <= maxShift to avoid overflow. -func leftShift(a *decimal, k uint) { - delta := leftcheats[k].delta - if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) { - delta-- - } - - r := a.nd // read index - w := a.nd + delta // write index - n := 0 - - // Pick up a digit, put down a digit. - for r--; r >= 0; r-- { - n += (int(a.d[r]) - '0') << k - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - // Put down extra digits. - for n > 0 { - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - a.nd += delta - if a.nd >= len(a.d) { - a.nd = len(a.d) - } - a.dp += delta - trim(a) -} - -// Binary shift left (k > 0) or right (k < 0). -func (a *decimal) Shift(k int) { - switch { - case a.nd == 0: - // nothing to do: a == 0 - case k > 0: - for k > maxShift { - leftShift(a, maxShift) - k -= maxShift - } - leftShift(a, uint(k)) - case k < 0: - for k < -maxShift { - rightShift(a, maxShift) - k += maxShift - } - rightShift(a, uint(-k)) - } -} - -// If we chop a at nd digits, should we round up? -func shouldRoundUp(a *decimal, nd int) bool { - if nd < 0 || nd >= a.nd { - return false - } - if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even - // if we truncated, a little higher than what's recorded - always round up - if a.trunc { - return true - } - return nd > 0 && (a.d[nd-1]-'0')%2 != 0 - } - // not halfway - digit tells all - return a.d[nd] >= '5' -} - -// Round a to nd digits (or fewer). -// If nd is zero, it means we're rounding -// just to the left of the digits, as in -// 0.09 -> 0.1. -func (a *decimal) Round(nd int) { - if nd < 0 || nd >= a.nd { - return - } - if shouldRoundUp(a, nd) { - a.RoundUp(nd) - } else { - a.RoundDown(nd) - } -} - -// Round a down to nd digits (or fewer). -func (a *decimal) RoundDown(nd int) { - if nd < 0 || nd >= a.nd { - return - } - a.nd = nd - trim(a) -} - -// Round a up to nd digits (or fewer). -func (a *decimal) RoundUp(nd int) { - if nd < 0 || nd >= a.nd { - return - } - - // round up - for i := nd - 1; i >= 0; i-- { - c := a.d[i] - if c < '9' { // can stop after this digit - a.d[i]++ - a.nd = i + 1 - return - } - } - - // Number is all 9s. - // Change to single 1 with adjusted decimal point. - a.d[0] = '1' - a.nd = 1 - a.dp++ -} - -// Extract integer part, rounded appropriately. -// No guarantees about overflow. -func (a *decimal) RoundedInteger() uint64 { - if a.dp > 20 { - return 0xFFFFFFFFFFFFFFFF - } - var i int - n := uint64(0) - for i = 0; i < a.dp && i < a.nd; i++ { - n = n*10 + uint64(a.d[i]-'0') - } - for ; i < a.dp; i++ { - n *= 10 - } - if shouldRoundUp(a, a.dp) { - n++ - } - return n -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go deleted file mode 100644 index 508ddc6bed..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go +++ /dev/null @@ -1,668 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -// An extFloat represents an extended floating-point number, with more -// precision than a float64. It does not try to save bits: the -// number represented by the structure is mant*(2^exp), with a negative -// sign if neg is true. -type extFloat struct { - mant uint64 - exp int - neg bool -} - -// Powers of ten taken from double-conversion library. -// http://code.google.com/p/double-conversion/ -const ( - firstPowerOfTen = -348 - stepPowerOfTen = 8 -) - -var smallPowersOfTen = [...]extFloat{ - {1 << 63, -63, false}, // 1 - {0xa << 60, -60, false}, // 1e1 - {0x64 << 57, -57, false}, // 1e2 - {0x3e8 << 54, -54, false}, // 1e3 - {0x2710 << 50, -50, false}, // 1e4 - {0x186a0 << 47, -47, false}, // 1e5 - {0xf4240 << 44, -44, false}, // 1e6 - {0x989680 << 40, -40, false}, // 1e7 -} - -var powersOfTen = [...]extFloat{ - {0xfa8fd5a0081c0288, -1220, false}, // 10^-348 - {0xbaaee17fa23ebf76, -1193, false}, // 10^-340 - {0x8b16fb203055ac76, -1166, false}, // 10^-332 - {0xcf42894a5dce35ea, -1140, false}, // 10^-324 - {0x9a6bb0aa55653b2d, -1113, false}, // 10^-316 - {0xe61acf033d1a45df, -1087, false}, // 10^-308 - {0xab70fe17c79ac6ca, -1060, false}, // 10^-300 - {0xff77b1fcbebcdc4f, -1034, false}, // 10^-292 - {0xbe5691ef416bd60c, -1007, false}, // 10^-284 - {0x8dd01fad907ffc3c, -980, false}, // 10^-276 - {0xd3515c2831559a83, -954, false}, // 10^-268 - {0x9d71ac8fada6c9b5, -927, false}, // 10^-260 - {0xea9c227723ee8bcb, -901, false}, // 10^-252 - {0xaecc49914078536d, -874, false}, // 10^-244 - {0x823c12795db6ce57, -847, false}, // 10^-236 - {0xc21094364dfb5637, -821, false}, // 10^-228 - {0x9096ea6f3848984f, -794, false}, // 10^-220 - {0xd77485cb25823ac7, -768, false}, // 10^-212 - {0xa086cfcd97bf97f4, -741, false}, // 10^-204 - {0xef340a98172aace5, -715, false}, // 10^-196 - {0xb23867fb2a35b28e, -688, false}, // 10^-188 - {0x84c8d4dfd2c63f3b, -661, false}, // 10^-180 - {0xc5dd44271ad3cdba, -635, false}, // 10^-172 - {0x936b9fcebb25c996, -608, false}, // 10^-164 - {0xdbac6c247d62a584, -582, false}, // 10^-156 - {0xa3ab66580d5fdaf6, -555, false}, // 10^-148 - {0xf3e2f893dec3f126, -529, false}, // 10^-140 - {0xb5b5ada8aaff80b8, -502, false}, // 10^-132 - {0x87625f056c7c4a8b, -475, false}, // 10^-124 - {0xc9bcff6034c13053, -449, false}, // 10^-116 - {0x964e858c91ba2655, -422, false}, // 10^-108 - {0xdff9772470297ebd, -396, false}, // 10^-100 - {0xa6dfbd9fb8e5b88f, -369, false}, // 10^-92 - {0xf8a95fcf88747d94, -343, false}, // 10^-84 - {0xb94470938fa89bcf, -316, false}, // 10^-76 - {0x8a08f0f8bf0f156b, -289, false}, // 10^-68 - {0xcdb02555653131b6, -263, false}, // 10^-60 - {0x993fe2c6d07b7fac, -236, false}, // 10^-52 - {0xe45c10c42a2b3b06, -210, false}, // 10^-44 - {0xaa242499697392d3, -183, false}, // 10^-36 - {0xfd87b5f28300ca0e, -157, false}, // 10^-28 - {0xbce5086492111aeb, -130, false}, // 10^-20 - {0x8cbccc096f5088cc, -103, false}, // 10^-12 - {0xd1b71758e219652c, -77, false}, // 10^-4 - {0x9c40000000000000, -50, false}, // 10^4 - {0xe8d4a51000000000, -24, false}, // 10^12 - {0xad78ebc5ac620000, 3, false}, // 10^20 - {0x813f3978f8940984, 30, false}, // 10^28 - {0xc097ce7bc90715b3, 56, false}, // 10^36 - {0x8f7e32ce7bea5c70, 83, false}, // 10^44 - {0xd5d238a4abe98068, 109, false}, // 10^52 - {0x9f4f2726179a2245, 136, false}, // 10^60 - {0xed63a231d4c4fb27, 162, false}, // 10^68 - {0xb0de65388cc8ada8, 189, false}, // 10^76 - {0x83c7088e1aab65db, 216, false}, // 10^84 - {0xc45d1df942711d9a, 242, false}, // 10^92 - {0x924d692ca61be758, 269, false}, // 10^100 - {0xda01ee641a708dea, 295, false}, // 10^108 - {0xa26da3999aef774a, 322, false}, // 10^116 - {0xf209787bb47d6b85, 348, false}, // 10^124 - {0xb454e4a179dd1877, 375, false}, // 10^132 - {0x865b86925b9bc5c2, 402, false}, // 10^140 - {0xc83553c5c8965d3d, 428, false}, // 10^148 - {0x952ab45cfa97a0b3, 455, false}, // 10^156 - {0xde469fbd99a05fe3, 481, false}, // 10^164 - {0xa59bc234db398c25, 508, false}, // 10^172 - {0xf6c69a72a3989f5c, 534, false}, // 10^180 - {0xb7dcbf5354e9bece, 561, false}, // 10^188 - {0x88fcf317f22241e2, 588, false}, // 10^196 - {0xcc20ce9bd35c78a5, 614, false}, // 10^204 - {0x98165af37b2153df, 641, false}, // 10^212 - {0xe2a0b5dc971f303a, 667, false}, // 10^220 - {0xa8d9d1535ce3b396, 694, false}, // 10^228 - {0xfb9b7cd9a4a7443c, 720, false}, // 10^236 - {0xbb764c4ca7a44410, 747, false}, // 10^244 - {0x8bab8eefb6409c1a, 774, false}, // 10^252 - {0xd01fef10a657842c, 800, false}, // 10^260 - {0x9b10a4e5e9913129, 827, false}, // 10^268 - {0xe7109bfba19c0c9d, 853, false}, // 10^276 - {0xac2820d9623bf429, 880, false}, // 10^284 - {0x80444b5e7aa7cf85, 907, false}, // 10^292 - {0xbf21e44003acdd2d, 933, false}, // 10^300 - {0x8e679c2f5e44ff8f, 960, false}, // 10^308 - {0xd433179d9c8cb841, 986, false}, // 10^316 - {0x9e19db92b4e31ba9, 1013, false}, // 10^324 - {0xeb96bf6ebadf77d9, 1039, false}, // 10^332 - {0xaf87023b9bf0ee6b, 1066, false}, // 10^340 -} - -// floatBits returns the bits of the float64 that best approximates -// the extFloat passed as receiver. Overflow is set to true if -// the resulting float64 is ±Inf. -func (f *extFloat) floatBits(flt *floatInfo) (bits uint64, overflow bool) { - f.Normalize() - - exp := f.exp + 63 - - // Exponent too small. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - f.mant >>= uint(n) - exp += n - } - - // Extract 1+flt.mantbits bits from the 64-bit mantissa. - mant := f.mant >> (63 - flt.mantbits) - if f.mant&(1<<(62-flt.mantbits)) != 0 { - // Round up. - mant += 1 - } - - // Rounding might have added a bit; shift down. - if mant == 2<>= 1 - exp++ - } - - // Infinities. - if exp-flt.bias >= 1<>uint(-f.exp))<>= uint(-f.exp) - f.exp = 0 - return *f, *f - } - expBiased := exp - flt.bias - - upper = extFloat{mant: 2*f.mant + 1, exp: f.exp - 1, neg: f.neg} - if mant != 1<>(64-32) == 0 { - mant <<= 32 - exp -= 32 - } - if mant>>(64-16) == 0 { - mant <<= 16 - exp -= 16 - } - if mant>>(64-8) == 0 { - mant <<= 8 - exp -= 8 - } - if mant>>(64-4) == 0 { - mant <<= 4 - exp -= 4 - } - if mant>>(64-2) == 0 { - mant <<= 2 - exp -= 2 - } - if mant>>(64-1) == 0 { - mant <<= 1 - exp -= 1 - } - shift = uint(f.exp - exp) - f.mant, f.exp = mant, exp - return -} - -// Multiply sets f to the product f*g: the result is correctly rounded, -// but not normalized. -func (f *extFloat) Multiply(g extFloat) { - fhi, flo := f.mant>>32, uint64(uint32(f.mant)) - ghi, glo := g.mant>>32, uint64(uint32(g.mant)) - - // Cross products. - cross1 := fhi * glo - cross2 := flo * ghi - - // f.mant*g.mant is fhi*ghi << 64 + (cross1+cross2) << 32 + flo*glo - f.mant = fhi*ghi + (cross1 >> 32) + (cross2 >> 32) - rem := uint64(uint32(cross1)) + uint64(uint32(cross2)) + ((flo * glo) >> 32) - // Round up. - rem += (1 << 31) - - f.mant += (rem >> 32) - f.exp = f.exp + g.exp + 64 -} - -var uint64pow10 = [...]uint64{ - 1, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, - 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, -} - -// AssignDecimal sets f to an approximate value mantissa*10^exp. It -// returns true if the value represented by f is guaranteed to be the -// best approximation of d after being rounded to a float64 or -// float32 depending on flt. -func (f *extFloat) AssignDecimal(mantissa uint64, exp10 int, neg bool, trunc bool, flt *floatInfo) (ok bool) { - const uint64digits = 19 - const errorscale = 8 - errors := 0 // An upper bound for error, computed in errorscale*ulp. - if trunc { - // the decimal number was truncated. - errors += errorscale / 2 - } - - f.mant = mantissa - f.exp = 0 - f.neg = neg - - // Multiply by powers of ten. - i := (exp10 - firstPowerOfTen) / stepPowerOfTen - if exp10 < firstPowerOfTen || i >= len(powersOfTen) { - return false - } - adjExp := (exp10 - firstPowerOfTen) % stepPowerOfTen - - // We multiply by exp%step - if adjExp < uint64digits && mantissa < uint64pow10[uint64digits-adjExp] { - // We can multiply the mantissa exactly. - f.mant *= uint64pow10[adjExp] - f.Normalize() - } else { - f.Normalize() - f.Multiply(smallPowersOfTen[adjExp]) - errors += errorscale / 2 - } - - // We multiply by 10 to the exp - exp%step. - f.Multiply(powersOfTen[i]) - if errors > 0 { - errors += 1 - } - errors += errorscale / 2 - - // Normalize - shift := f.Normalize() - errors <<= shift - - // Now f is a good approximation of the decimal. - // Check whether the error is too large: that is, if the mantissa - // is perturbated by the error, the resulting float64 will change. - // The 64 bits mantissa is 1 + 52 bits for float64 + 11 extra bits. - // - // In many cases the approximation will be good enough. - denormalExp := flt.bias - 63 - var extrabits uint - if f.exp <= denormalExp { - // f.mant * 2^f.exp is smaller than 2^(flt.bias+1). - extrabits = uint(63 - flt.mantbits + 1 + uint(denormalExp-f.exp)) - } else { - extrabits = uint(63 - flt.mantbits) - } - - halfway := uint64(1) << (extrabits - 1) - mant_extra := f.mant & (1< expMax: - i-- - default: - break Loop - } - } - // Apply the desired decimal shift on f. It will have exponent - // in the desired range. This is multiplication by 10^-exp10. - f.Multiply(powersOfTen[i]) - - return -(firstPowerOfTen + i*stepPowerOfTen), i -} - -// frexp10Many applies a common shift by a power of ten to a, b, c. -func frexp10Many(a, b, c *extFloat) (exp10 int) { - exp10, i := c.frexp10() - a.Multiply(powersOfTen[i]) - b.Multiply(powersOfTen[i]) - return -} - -// FixedDecimal stores in d the first n significant digits -// of the decimal representation of f. It returns false -// if it cannot be sure of the answer. -func (f *extFloat) FixedDecimal(d *decimalSlice, n int) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if n == 0 { - panic("strconv: internal error: extFloat.FixedDecimal called with n == 0") - } - // Multiply by an appropriate power of ten to have a reasonable - // number to process. - f.Normalize() - exp10, _ := f.frexp10() - - shift := uint(-f.exp) - integer := uint32(f.mant >> shift) - fraction := f.mant - (uint64(integer) << shift) - ε := uint64(1) // ε is the uncertainty we have on the mantissa of f. - - // Write exactly n digits to d. - needed := n // how many digits are left to write. - integerDigits := 0 // the number of decimal digits of integer. - pow10 := uint64(1) // the power of ten by which f was scaled. - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - rest := integer - if integerDigits > needed { - // the integral part is already large, trim the last digits. - pow10 = uint64pow10[integerDigits-needed] - integer /= uint32(pow10) - rest -= integer * uint32(pow10) - } else { - rest = 0 - } - - // Write the digits of integer: the digits of rest are omitted. - var buf [32]byte - pos := len(buf) - for v := integer; v > 0; { - v1 := v / 10 - v -= 10 * v1 - pos-- - buf[pos] = byte(v + '0') - v = v1 - } - for i := pos; i < len(buf); i++ { - d.d[i-pos] = buf[i] - } - nd := len(buf) - pos - d.nd = nd - d.dp = integerDigits + exp10 - needed -= nd - - if needed > 0 { - if rest != 0 || pow10 != 1 { - panic("strconv: internal error, rest != 0 but needed > 0") - } - // Emit digits for the fractional part. Each time, 10*fraction - // fits in a uint64 without overflow. - for needed > 0 { - fraction *= 10 - ε *= 10 // the uncertainty scales as we multiply by ten. - if 2*ε > 1<> shift - d.d[nd] = byte(digit + '0') - fraction -= digit << shift - nd++ - needed-- - } - d.nd = nd - } - - // We have written a truncation of f (a numerator / 10^d.dp). The remaining part - // can be interpreted as a small number (< 1) to be added to the last digit of the - // numerator. - // - // If rest > 0, the amount is: - // (rest< 0 guarantees that pow10 << shift does not overflow a uint64. - // - // If rest = 0, pow10 == 1 and the amount is - // fraction / (1 << shift) - // fraction being known with a ±ε uncertainty. - // - // We pass this information to the rounding routine for adjustment. - - ok := adjustLastDigitFixed(d, uint64(rest)<= 0; i-- { - if d.d[i] != '0' { - d.nd = i + 1 - break - } - } - return true -} - -// adjustLastDigitFixed assumes d contains the representation of the integral part -// of some number, whose fractional part is num / (den << shift). The numerator -// num is only known up to an uncertainty of size ε, assumed to be less than -// (den << shift)/2. -// -// It will increase the last digit by one to account for correct rounding, typically -// when the fractional part is greater than 1/2, and will return false if ε is such -// that no correct answer can be given. -func adjustLastDigitFixed(d *decimalSlice, num, den uint64, shift uint, ε uint64) bool { - if num > den< den< den< (den< den<= 0; i-- { - if d.d[i] == '9' { - d.nd-- - } else { - break - } - } - if i < 0 { - d.d[0] = '1' - d.nd = 1 - d.dp++ - } else { - d.d[i]++ - } - return true - } - return false -} - -// ShortestDecimal stores in d the shortest decimal representation of f -// which belongs to the open interval (lower, upper), where f is supposed -// to lie. It returns false whenever the result is unsure. The implementation -// uses the Grisu3 algorithm. -func (f *extFloat) ShortestDecimal(d *decimalSlice, lower, upper *extFloat) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if f.exp == 0 && *lower == *f && *lower == *upper { - // an exact integer. - var buf [24]byte - n := len(buf) - 1 - for v := f.mant; v > 0; { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n-- - v = v1 - } - nd := len(buf) - n - 1 - for i := 0; i < nd; i++ { - d.d[i] = buf[n+1+i] - } - d.nd, d.dp = nd, nd - for d.nd > 0 && d.d[d.nd-1] == '0' { - d.nd-- - } - if d.nd == 0 { - d.dp = 0 - } - d.neg = f.neg - return true - } - upper.Normalize() - // Uniformize exponents. - if f.exp > upper.exp { - f.mant <<= uint(f.exp - upper.exp) - f.exp = upper.exp - } - if lower.exp > upper.exp { - lower.mant <<= uint(lower.exp - upper.exp) - lower.exp = upper.exp - } - - exp10 := frexp10Many(lower, f, upper) - // Take a safety margin due to rounding in frexp10Many, but we lose precision. - upper.mant++ - lower.mant-- - - // The shortest representation of f is either rounded up or down, but - // in any case, it is a truncation of upper. - shift := uint(-upper.exp) - integer := uint32(upper.mant >> shift) - fraction := upper.mant - (uint64(integer) << shift) - - // How far we can go down from upper until the result is wrong. - allowance := upper.mant - lower.mant - // How far we should go to get a very precise result. - targetDiff := upper.mant - f.mant - - // Count integral digits: there are at most 10. - var integerDigits int - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - for i := 0; i < integerDigits; i++ { - pow := uint64pow10[integerDigits-i-1] - digit := integer / uint32(pow) - d.d[i] = byte(digit + '0') - integer -= digit * uint32(pow) - // evaluate whether we should stop. - if currentDiff := uint64(integer)<> shift) - d.d[d.nd] = byte(digit + '0') - d.nd++ - fraction -= uint64(digit) << shift - if fraction < allowance*multiplier { - // We are in the admissible range. Note that if allowance is about to - // overflow, that is, allowance > 2^64/10, the condition is automatically - // true due to the limited range of fraction. - return adjustLastDigit(d, - fraction, targetDiff*multiplier, allowance*multiplier, - 1< maxDiff-ulpBinary { - // we went too far - return false - } - if d.nd == 1 && d.d[0] == '0' { - // the number has actually reached zero. - d.nd = 0 - d.dp = 0 - } - return true -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go deleted file mode 100644 index 4d33e6f77d..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's encoding/json/fold.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "unicode/utf8" -) - -const ( - caseMask = ^byte(0x20) // Mask to ignore case in ASCII. - kelvin = '\u212a' - smallLongEss = '\u017f' -) - -// equalFoldRight is a specialization of bytes.EqualFold when s is -// known to be all ASCII (including punctuation), but contains an 's', -// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. -// See comments on foldFunc. -func EqualFoldRight(s, t []byte) bool { - for _, sb := range s { - if len(t) == 0 { - return false - } - tb := t[0] - if tb < utf8.RuneSelf { - if sb != tb { - sbUpper := sb & caseMask - if 'A' <= sbUpper && sbUpper <= 'Z' { - if sbUpper != tb&caseMask { - return false - } - } else { - return false - } - } - t = t[1:] - continue - } - // sb is ASCII and t is not. t must be either kelvin - // sign or long s; sb must be s, S, k, or K. - tr, size := utf8.DecodeRune(t) - switch sb { - case 's', 'S': - if tr != smallLongEss { - return false - } - case 'k', 'K': - if tr != kelvin { - return false - } - default: - return false - } - t = t[size:] - - } - if len(t) > 0 { - return false - } - return true -} - -// asciiEqualFold is a specialization of bytes.EqualFold for use when -// s is all ASCII (but may contain non-letters) and contains no -// special-folding letters. -// See comments on foldFunc. -func AsciiEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, sb := range s { - tb := t[i] - if sb == tb { - continue - } - if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { - if sb&caseMask != tb&caseMask { - return false - } - } else { - return false - } - } - return true -} - -// simpleLetterEqualFold is a specialization of bytes.EqualFold for -// use when s is all ASCII letters (no underscores, etc) and also -// doesn't contain 'k', 'K', 's', or 'S'. -// See comments on foldFunc. -func SimpleLetterEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, b := range s { - if b&caseMask != t[i]&caseMask { - return false - } - } - return true -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go deleted file mode 100644 index 360d6dbcf9..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go +++ /dev/null @@ -1,542 +0,0 @@ -package v1 - -/** - * Copyright 2015 Paul Querna, Klaus Post - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Most of this file are on Go stdlib's strconv/ftoa.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -import "math" - -// TODO: move elsewhere? -type floatInfo struct { - mantbits uint - expbits uint - bias int -} - -var optimize = true // can change for testing - -var float32info = floatInfo{23, 8, -127} -var float64info = floatInfo{52, 11, -1023} - -// AppendFloat appends the string form of the floating-point number f, -// as generated by FormatFloat -func AppendFloat(dst EncodingBuffer, val float64, fmt byte, prec, bitSize int) { - var bits uint64 - var flt *floatInfo - switch bitSize { - case 32: - bits = uint64(math.Float32bits(float32(val))) - flt = &float32info - case 64: - bits = math.Float64bits(val) - flt = &float64info - default: - panic("strconv: illegal AppendFloat/FormatFloat bitSize") - } - - neg := bits>>(flt.expbits+flt.mantbits) != 0 - exp := int(bits>>flt.mantbits) & (1< digs.nd && digs.nd >= digs.dp { - eprec = digs.nd - } - // %e is used if the exponent from the conversion - // is less than -4 or greater than or equal to the precision. - // if precision was the shortest possible, use precision 6 for this decision. - if shortest { - eprec = 6 - } - exp := digs.dp - 1 - if exp < -4 || exp >= eprec { - if prec > digs.nd { - prec = digs.nd - } - fmtE(dst, neg, digs, prec-1, fmt+'e'-'g') - return - } - if prec > digs.dp { - prec = digs.nd - } - fmtF(dst, neg, digs, max(prec-digs.dp, 0)) - return - } - - // unknown format - dst.Write([]byte{'%', fmt}) - return -} - -// Round d (= mant * 2^exp) to the shortest number of digits -// that will let the original floating point value be precisely -// reconstructed. Size is original floating point size (64 or 32). -func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { - // If mantissa is zero, the number is zero; stop now. - if mant == 0 { - d.nd = 0 - return - } - - // Compute upper and lower such that any decimal number - // between upper and lower (possibly inclusive) - // will round to the original floating point number. - - // We may see at once that the number is already shortest. - // - // Suppose d is not denormal, so that 2^exp <= d < 10^dp. - // The closest shorter number is at least 10^(dp-nd) away. - // The lower/upper bounds computed below are at distance - // at most 2^(exp-mantbits). - // - // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits), - // or equivalently log2(10)*(dp-nd) > exp-mantbits. - // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32). - minexp := flt.bias + 1 // minimum possible exponent - if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) { - // The number is already shortest. - return - } - - // d = mant << (exp - mantbits) - // Next highest floating point number is mant+1 << exp-mantbits. - // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1. - upper := new(decimal) - upper.Assign(mant*2 + 1) - upper.Shift(exp - int(flt.mantbits) - 1) - - // d = mant << (exp - mantbits) - // Next lowest floating point number is mant-1 << exp-mantbits, - // unless mant-1 drops the significant bit and exp is not the minimum exp, - // in which case the next lowest is mant*2-1 << exp-mantbits-1. - // Either way, call it mantlo << explo-mantbits. - // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1. - var mantlo uint64 - var explo int - if mant > 1< 0 { - dst.WriteByte('.') - i := 1 - m := min(d.nd, prec+1) - if i < m { - dst.Write(d.d[i:m]) - i = m - } - for i <= prec { - dst.WriteByte('0') - i++ - } - } - - // e± - dst.WriteByte(fmt) - exp := d.dp - 1 - if d.nd == 0 { // special case: 0 has exponent 0 - exp = 0 - } - if exp < 0 { - ch = '-' - exp = -exp - } else { - ch = '+' - } - dst.WriteByte(ch) - - // dd or ddd - switch { - case exp < 10: - dst.WriteByte('0') - dst.WriteByte(byte(exp) + '0') - case exp < 100: - dst.WriteByte(byte(exp/10) + '0') - dst.WriteByte(byte(exp%10) + '0') - default: - dst.WriteByte(byte(exp/100) + '0') - dst.WriteByte(byte(exp/10)%10 + '0') - dst.WriteByte(byte(exp%10) + '0') - } - - return -} - -// %f: -ddddddd.ddddd -func fmtF(dst EncodingBuffer, neg bool, d decimalSlice, prec int) { - // sign - if neg { - dst.WriteByte('-') - } - - // integer, padded with zeros as needed. - if d.dp > 0 { - m := min(d.nd, d.dp) - dst.Write(d.d[:m]) - for ; m < d.dp; m++ { - dst.WriteByte('0') - } - } else { - dst.WriteByte('0') - } - - // fraction - if prec > 0 { - dst.WriteByte('.') - for i := 0; i < prec; i++ { - ch := byte('0') - if j := d.dp + i; 0 <= j && j < d.nd { - ch = d.d[j] - } - dst.WriteByte(ch) - } - } - - return -} - -// %b: -ddddddddp±ddd -func fmtB(dst EncodingBuffer, neg bool, mant uint64, exp int, flt *floatInfo) { - // sign - if neg { - dst.WriteByte('-') - } - - // mantissa - formatBits(dst, mant, 10, false) - - // p - dst.WriteByte('p') - - // ±exponent - exp -= int(flt.mantbits) - if exp >= 0 { - dst.WriteByte('+') - } - formatBits(dst, uint64(exp), 10, exp < 0) - - return -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -// formatBits computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. -func formatBits(dst EncodingBuffer, u uint64, base int, neg bool) { - if base < 2 || base > len(digits) { - panic("strconv: illegal AppendInt/FormatInt base") - } - // 2 <= base && base <= len(digits) - - var a [64 + 1]byte // +1 for sign of 64bit value in base 2 - i := len(a) - - if neg { - u = -u - } - - // convert bits - if base == 10 { - // common case: use constants for / because - // the compiler can optimize it into a multiply+shift - - if ^uintptr(0)>>32 == 0 { - for u > uint64(^uintptr(0)) { - q := u / 1e9 - us := uintptr(u - q*1e9) // us % 1e9 fits into a uintptr - for j := 9; j > 0; j-- { - i-- - qs := us / 10 - a[i] = byte(us - qs*10 + '0') - us = qs - } - u = q - } - } - - // u guaranteed to fit into a uintptr - us := uintptr(u) - for us >= 10 { - i-- - q := us / 10 - a[i] = byte(us - q*10 + '0') - us = q - } - // u < 10 - i-- - a[i] = byte(us + '0') - - } else if s := shifts[base]; s > 0 { - // base is power of 2: use shifts and masks instead of / and % - b := uint64(base) - m := uintptr(b) - 1 // == 1<= b { - i-- - a[i] = digits[uintptr(u)&m] - u >>= s - } - // u < base - i-- - a[i] = digits[uintptr(u)] - - } else { - // general case - b := uint64(base) - for u >= b { - i-- - q := u / b - a[i] = digits[uintptr(u-q*b)] - u = q - } - // u < base - i-- - a[i] = digits[uintptr(u)] - } - - // add sign, if any - if neg { - i-- - a[i] = '-' - } - - dst.Write(a[i:]) -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go deleted file mode 100644 index 46c1289ec4..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go +++ /dev/null @@ -1,936 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/atof.go */ - -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// decimal to binary floating point conversion. -// Algorithm: -// 1) Store input in multiprecision decimal. -// 2) Multiply/divide decimal by powers of two until in range [0.5, 1) -// 3) Multiply by 2^precision and round to get mantissa. - -import "math" - -var optimize = true // can change for testing - -func equalIgnoreCase(s1 []byte, s2 []byte) bool { - if len(s1) != len(s2) { - return false - } - for i := 0; i < len(s1); i++ { - c1 := s1[i] - if 'A' <= c1 && c1 <= 'Z' { - c1 += 'a' - 'A' - } - c2 := s2[i] - if 'A' <= c2 && c2 <= 'Z' { - c2 += 'a' - 'A' - } - if c1 != c2 { - return false - } - } - return true -} - -func special(s []byte) (f float64, ok bool) { - if len(s) == 0 { - return - } - switch s[0] { - default: - return - case '+': - if equalIgnoreCase(s, []byte("+inf")) || equalIgnoreCase(s, []byte("+infinity")) { - return math.Inf(1), true - } - case '-': - if equalIgnoreCase(s, []byte("-inf")) || equalIgnoreCase(s, []byte("-infinity")) { - return math.Inf(-1), true - } - case 'n', 'N': - if equalIgnoreCase(s, []byte("nan")) { - return math.NaN(), true - } - case 'i', 'I': - if equalIgnoreCase(s, []byte("inf")) || equalIgnoreCase(s, []byte("infinity")) { - return math.Inf(1), true - } - } - return -} - -func (b *decimal) set(s []byte) (ok bool) { - i := 0 - b.neg = false - b.trunc = false - - // optional sign - if i >= len(s) { - return - } - switch { - case s[i] == '+': - i++ - case s[i] == '-': - b.neg = true - i++ - } - - // digits - sawdot := false - sawdigits := false - for ; i < len(s); i++ { - switch { - case s[i] == '.': - if sawdot { - return - } - sawdot = true - b.dp = b.nd - continue - - case '0' <= s[i] && s[i] <= '9': - sawdigits = true - if s[i] == '0' && b.nd == 0 { // ignore leading zeros - b.dp-- - continue - } - if b.nd < len(b.d) { - b.d[b.nd] = s[i] - b.nd++ - } else if s[i] != '0' { - b.trunc = true - } - continue - } - break - } - if !sawdigits { - return - } - if !sawdot { - b.dp = b.nd - } - - // optional exponent moves decimal point. - // if we read a very large, very long number, - // just be sure to move the decimal point by - // a lot (say, 100000). it doesn't matter if it's - // not the exact number. - if i < len(s) && (s[i] == 'e' || s[i] == 'E') { - i++ - if i >= len(s) { - return - } - esign := 1 - if s[i] == '+' { - i++ - } else if s[i] == '-' { - i++ - esign = -1 - } - if i >= len(s) || s[i] < '0' || s[i] > '9' { - return - } - e := 0 - for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ { - if e < 10000 { - e = e*10 + int(s[i]) - '0' - } - } - b.dp += e * esign - } - - if i != len(s) { - return - } - - ok = true - return -} - -// readFloat reads a decimal mantissa and exponent from a float -// string representation. It sets ok to false if the number could -// not fit return types or is invalid. -func readFloat(s []byte) (mantissa uint64, exp int, neg, trunc, ok bool) { - const uint64digits = 19 - i := 0 - - // optional sign - if i >= len(s) { - return - } - switch { - case s[i] == '+': - i++ - case s[i] == '-': - neg = true - i++ - } - - // digits - sawdot := false - sawdigits := false - nd := 0 - ndMant := 0 - dp := 0 - for ; i < len(s); i++ { - switch c := s[i]; true { - case c == '.': - if sawdot { - return - } - sawdot = true - dp = nd - continue - - case '0' <= c && c <= '9': - sawdigits = true - if c == '0' && nd == 0 { // ignore leading zeros - dp-- - continue - } - nd++ - if ndMant < uint64digits { - mantissa *= 10 - mantissa += uint64(c - '0') - ndMant++ - } else if s[i] != '0' { - trunc = true - } - continue - } - break - } - if !sawdigits { - return - } - if !sawdot { - dp = nd - } - - // optional exponent moves decimal point. - // if we read a very large, very long number, - // just be sure to move the decimal point by - // a lot (say, 100000). it doesn't matter if it's - // not the exact number. - if i < len(s) && (s[i] == 'e' || s[i] == 'E') { - i++ - if i >= len(s) { - return - } - esign := 1 - if s[i] == '+' { - i++ - } else if s[i] == '-' { - i++ - esign = -1 - } - if i >= len(s) || s[i] < '0' || s[i] > '9' { - return - } - e := 0 - for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ { - if e < 10000 { - e = e*10 + int(s[i]) - '0' - } - } - dp += e * esign - } - - if i != len(s) { - return - } - - exp = dp - ndMant - ok = true - return - -} - -// decimal power of ten to binary power of two. -var powtab = []int{1, 3, 6, 9, 13, 16, 19, 23, 26} - -func (d *decimal) floatBits(flt *floatInfo) (b uint64, overflow bool) { - var exp int - var mant uint64 - - // Zero is always a special case. - if d.nd == 0 { - mant = 0 - exp = flt.bias - goto out - } - - // Obvious overflow/underflow. - // These bounds are for 64-bit floats. - // Will have to change if we want to support 80-bit floats in the future. - if d.dp > 310 { - goto overflow - } - if d.dp < -330 { - // zero - mant = 0 - exp = flt.bias - goto out - } - - // Scale by powers of two until in range [0.5, 1.0) - exp = 0 - for d.dp > 0 { - var n int - if d.dp >= len(powtab) { - n = 27 - } else { - n = powtab[d.dp] - } - d.Shift(-n) - exp += n - } - for d.dp < 0 || d.dp == 0 && d.d[0] < '5' { - var n int - if -d.dp >= len(powtab) { - n = 27 - } else { - n = powtab[-d.dp] - } - d.Shift(n) - exp -= n - } - - // Our range is [0.5,1) but floating point range is [1,2). - exp-- - - // Minimum representable exponent is flt.bias+1. - // If the exponent is smaller, move it up and - // adjust d accordingly. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - d.Shift(-n) - exp += n - } - - if exp-flt.bias >= 1<>= 1 - exp++ - if exp-flt.bias >= 1<>float64info.mantbits != 0 { - return - } - f = float64(mantissa) - if neg { - f = -f - } - switch { - case exp == 0: - // an integer. - return f, true - // Exact integers are <= 10^15. - // Exact powers of ten are <= 10^22. - case exp > 0 && exp <= 15+22: // int * 10^k - // If exponent is big but number of digits is not, - // can move a few zeros into the integer part. - if exp > 22 { - f *= float64pow10[exp-22] - exp = 22 - } - if f > 1e15 || f < -1e15 { - // the exponent was really too large. - return - } - return f * float64pow10[exp], true - case exp < 0 && exp >= -22: // int / 10^k - return f / float64pow10[-exp], true - } - return -} - -// If possible to compute mantissa*10^exp to 32-bit float f exactly, -// entirely in floating-point math, do so, avoiding the machinery above. -func atof32exact(mantissa uint64, exp int, neg bool) (f float32, ok bool) { - if mantissa>>float32info.mantbits != 0 { - return - } - f = float32(mantissa) - if neg { - f = -f - } - switch { - case exp == 0: - return f, true - // Exact integers are <= 10^7. - // Exact powers of ten are <= 10^10. - case exp > 0 && exp <= 7+10: // int * 10^k - // If exponent is big but number of digits is not, - // can move a few zeros into the integer part. - if exp > 10 { - f *= float32pow10[exp-10] - exp = 10 - } - if f > 1e7 || f < -1e7 { - // the exponent was really too large. - return - } - return f * float32pow10[exp], true - case exp < 0 && exp >= -10: // int / 10^k - return f / float32pow10[-exp], true - } - return -} - -const fnParseFloat = "ParseFloat" - -func atof32(s []byte) (f float32, err error) { - if val, ok := special(s); ok { - return float32(val), nil - } - - if optimize { - // Parse mantissa and exponent. - mantissa, exp, neg, trunc, ok := readFloat(s) - if ok { - // Try pure floating-point arithmetic conversion. - if !trunc { - if f, ok := atof32exact(mantissa, exp, neg); ok { - return f, nil - } - } - // Try another fast path. - ext := new(extFloat) - if ok := ext.AssignDecimal(mantissa, exp, neg, trunc, &float32info); ok { - b, ovf := ext.floatBits(&float32info) - f = math.Float32frombits(uint32(b)) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err - } - } - } - var d decimal - if !d.set(s) { - return 0, syntaxError(fnParseFloat, string(s)) - } - b, ovf := d.floatBits(&float32info) - f = math.Float32frombits(uint32(b)) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err -} - -func atof64(s []byte) (f float64, err error) { - if val, ok := special(s); ok { - return val, nil - } - - if optimize { - // Parse mantissa and exponent. - mantissa, exp, neg, trunc, ok := readFloat(s) - if ok { - // Try pure floating-point arithmetic conversion. - if !trunc { - if f, ok := atof64exact(mantissa, exp, neg); ok { - return f, nil - } - } - // Try another fast path. - ext := new(extFloat) - if ok := ext.AssignDecimal(mantissa, exp, neg, trunc, &float64info); ok { - b, ovf := ext.floatBits(&float64info) - f = math.Float64frombits(b) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err - } - } - } - var d decimal - if !d.set(s) { - return 0, syntaxError(fnParseFloat, string(s)) - } - b, ovf := d.floatBits(&float64info) - f = math.Float64frombits(b) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err -} - -// ParseFloat converts the string s to a floating-point number -// with the precision specified by bitSize: 32 for float32, or 64 for float64. -// When bitSize=32, the result still has type float64, but it will be -// convertible to float32 without changing its value. -// -// If s is well-formed and near a valid floating point number, -// ParseFloat returns the nearest floating point number rounded -// using IEEE754 unbiased rounding. -// -// The errors that ParseFloat returns have concrete type *NumError -// and include err.Num = s. -// -// If s is not syntactically well-formed, ParseFloat returns err.Err = ErrSyntax. -// -// If s is syntactically well-formed but is more than 1/2 ULP -// away from the largest floating point number of the given size, -// ParseFloat returns f = ±Inf, err.Err = ErrRange. -func ParseFloat(s []byte, bitSize int) (f float64, err error) { - if bitSize == 32 { - f1, err1 := atof32(s) - return float64(f1), err1 - } - f1, err1 := atof64(s) - return f1, err1 -} - -// oroginal: strconv/decimal.go, but not exported, and needed for PareFloat. - -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Multiprecision decimal numbers. -// For floating-point formatting only; not general purpose. -// Only operations are assign and (binary) left/right shift. -// Can do binary floating point in multiprecision decimal precisely -// because 2 divides 10; cannot do decimal floating point -// in multiprecision binary precisely. - -type decimal struct { - d [800]byte // digits - nd int // number of digits used - dp int // decimal point - neg bool - trunc bool // discarded nonzero digits beyond d[:nd] -} - -func (a *decimal) String() string { - n := 10 + a.nd - if a.dp > 0 { - n += a.dp - } - if a.dp < 0 { - n += -a.dp - } - - buf := make([]byte, n) - w := 0 - switch { - case a.nd == 0: - return "0" - - case a.dp <= 0: - // zeros fill space between decimal point and digits - buf[w] = '0' - w++ - buf[w] = '.' - w++ - w += digitZero(buf[w : w+-a.dp]) - w += copy(buf[w:], a.d[0:a.nd]) - - case a.dp < a.nd: - // decimal point in middle of digits - w += copy(buf[w:], a.d[0:a.dp]) - buf[w] = '.' - w++ - w += copy(buf[w:], a.d[a.dp:a.nd]) - - default: - // zeros fill space between digits and decimal point - w += copy(buf[w:], a.d[0:a.nd]) - w += digitZero(buf[w : w+a.dp-a.nd]) - } - return string(buf[0:w]) -} - -func digitZero(dst []byte) int { - for i := range dst { - dst[i] = '0' - } - return len(dst) -} - -// trim trailing zeros from number. -// (They are meaningless; the decimal point is tracked -// independent of the number of digits.) -func trim(a *decimal) { - for a.nd > 0 && a.d[a.nd-1] == '0' { - a.nd-- - } - if a.nd == 0 { - a.dp = 0 - } -} - -// Assign v to a. -func (a *decimal) Assign(v uint64) { - var buf [24]byte - - // Write reversed decimal in buf. - n := 0 - for v > 0 { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n++ - v = v1 - } - - // Reverse again to produce forward decimal in a.d. - a.nd = 0 - for n--; n >= 0; n-- { - a.d[a.nd] = buf[n] - a.nd++ - } - a.dp = a.nd - trim(a) -} - -// Maximum shift that we can do in one pass without overflow. -// Signed int has 31 bits, and we have to be able to accommodate 9<>k == 0; r++ { - if r >= a.nd { - if n == 0 { - // a == 0; shouldn't get here, but handle anyway. - a.nd = 0 - return - } - for n>>k == 0 { - n = n * 10 - r++ - } - break - } - c := int(a.d[r]) - n = n*10 + c - '0' - } - a.dp -= r - 1 - - // Pick up a digit, put down a digit. - for ; r < a.nd; r++ { - c := int(a.d[r]) - dig := n >> k - n -= dig << k - a.d[w] = byte(dig + '0') - w++ - n = n*10 + c - '0' - } - - // Put down extra digits. - for n > 0 { - dig := n >> k - n -= dig << k - if w < len(a.d) { - a.d[w] = byte(dig + '0') - w++ - } else if dig > 0 { - a.trunc = true - } - n = n * 10 - } - - a.nd = w - trim(a) -} - -// Cheat sheet for left shift: table indexed by shift count giving -// number of new digits that will be introduced by that shift. -// -// For example, leftcheats[4] = {2, "625"}. That means that -// if we are shifting by 4 (multiplying by 16), it will add 2 digits -// when the string prefix is "625" through "999", and one fewer digit -// if the string prefix is "000" through "624". -// -// Credit for this trick goes to Ken. - -type leftCheat struct { - delta int // number of new digits - cutoff string // minus one digit if original < a. -} - -var leftcheats = []leftCheat{ - // Leading digits of 1/2^i = 5^i. - // 5^23 is not an exact 64-bit floating point number, - // so have to use bc for the math. - /* - seq 27 | sed 's/^/5^/' | bc | - awk 'BEGIN{ print "\tleftCheat{ 0, \"\" }," } - { - log2 = log(2)/log(10) - printf("\tleftCheat{ %d, \"%s\" },\t// * %d\n", - int(log2*NR+1), $0, 2**NR) - }' - */ - {0, ""}, - {1, "5"}, // * 2 - {1, "25"}, // * 4 - {1, "125"}, // * 8 - {2, "625"}, // * 16 - {2, "3125"}, // * 32 - {2, "15625"}, // * 64 - {3, "78125"}, // * 128 - {3, "390625"}, // * 256 - {3, "1953125"}, // * 512 - {4, "9765625"}, // * 1024 - {4, "48828125"}, // * 2048 - {4, "244140625"}, // * 4096 - {4, "1220703125"}, // * 8192 - {5, "6103515625"}, // * 16384 - {5, "30517578125"}, // * 32768 - {5, "152587890625"}, // * 65536 - {6, "762939453125"}, // * 131072 - {6, "3814697265625"}, // * 262144 - {6, "19073486328125"}, // * 524288 - {7, "95367431640625"}, // * 1048576 - {7, "476837158203125"}, // * 2097152 - {7, "2384185791015625"}, // * 4194304 - {7, "11920928955078125"}, // * 8388608 - {8, "59604644775390625"}, // * 16777216 - {8, "298023223876953125"}, // * 33554432 - {8, "1490116119384765625"}, // * 67108864 - {9, "7450580596923828125"}, // * 134217728 -} - -// Is the leading prefix of b lexicographically less than s? -func prefixIsLessThan(b []byte, s string) bool { - for i := 0; i < len(s); i++ { - if i >= len(b) { - return true - } - if b[i] != s[i] { - return b[i] < s[i] - } - } - return false -} - -// Binary shift left (/ 2) by k bits. k <= maxShift to avoid overflow. -func leftShift(a *decimal, k uint) { - delta := leftcheats[k].delta - if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) { - delta-- - } - - r := a.nd // read index - w := a.nd + delta // write index - n := 0 - - // Pick up a digit, put down a digit. - for r--; r >= 0; r-- { - n += (int(a.d[r]) - '0') << k - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - // Put down extra digits. - for n > 0 { - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - a.nd += delta - if a.nd >= len(a.d) { - a.nd = len(a.d) - } - a.dp += delta - trim(a) -} - -// Binary shift left (k > 0) or right (k < 0). -func (a *decimal) Shift(k int) { - switch { - case a.nd == 0: - // nothing to do: a == 0 - case k > 0: - for k > maxShift { - leftShift(a, maxShift) - k -= maxShift - } - leftShift(a, uint(k)) - case k < 0: - for k < -maxShift { - rightShift(a, maxShift) - k += maxShift - } - rightShift(a, uint(-k)) - } -} - -// If we chop a at nd digits, should we round up? -func shouldRoundUp(a *decimal, nd int) bool { - if nd < 0 || nd >= a.nd { - return false - } - if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even - // if we truncated, a little higher than what's recorded - always round up - if a.trunc { - return true - } - return nd > 0 && (a.d[nd-1]-'0')%2 != 0 - } - // not halfway - digit tells all - return a.d[nd] >= '5' -} - -// Round a to nd digits (or fewer). -// If nd is zero, it means we're rounding -// just to the left of the digits, as in -// 0.09 -> 0.1. -func (a *decimal) Round(nd int) { - if nd < 0 || nd >= a.nd { - return - } - if shouldRoundUp(a, nd) { - a.RoundUp(nd) - } else { - a.RoundDown(nd) - } -} - -// Round a down to nd digits (or fewer). -func (a *decimal) RoundDown(nd int) { - if nd < 0 || nd >= a.nd { - return - } - a.nd = nd - trim(a) -} - -// Round a up to nd digits (or fewer). -func (a *decimal) RoundUp(nd int) { - if nd < 0 || nd >= a.nd { - return - } - - // round up - for i := nd - 1; i >= 0; i-- { - c := a.d[i] - if c < '9' { // can stop after this digit - a.d[i]++ - a.nd = i + 1 - return - } - } - - // Number is all 9s. - // Change to single 1 with adjusted decimal point. - a.d[0] = '1' - a.nd = 1 - a.dp++ -} - -// Extract integer part, rounded appropriately. -// No guarantees about overflow. -func (a *decimal) RoundedInteger() uint64 { - if a.dp > 20 { - return 0xFFFFFFFFFFFFFFFF - } - var i int - n := uint64(0) - for i = 0; i < a.dp && i < a.nd; i++ { - n = n*10 + uint64(a.d[i]-'0') - } - for ; i < a.dp; i++ { - n *= 10 - } - if shouldRoundUp(a, a.dp) { - n++ - } - return n -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go deleted file mode 100644 index 06eb2ec29f..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/atoi.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -import ( - "errors" - "strconv" -) - -// ErrRange indicates that a value is out of range for the target type. -var ErrRange = errors.New("value out of range") - -// ErrSyntax indicates that a value does not have the right syntax for the target type. -var ErrSyntax = errors.New("invalid syntax") - -// A NumError records a failed conversion. -type NumError struct { - Func string // the failing function (ParseBool, ParseInt, ParseUint, ParseFloat) - Num string // the input - Err error // the reason the conversion failed (ErrRange, ErrSyntax) -} - -func (e *NumError) Error() string { - return "strconv." + e.Func + ": " + "parsing " + strconv.Quote(e.Num) + ": " + e.Err.Error() -} - -func syntaxError(fn, str string) *NumError { - return &NumError{fn, str, ErrSyntax} -} - -func rangeError(fn, str string) *NumError { - return &NumError{fn, str, ErrRange} -} - -const intSize = 32 << uint(^uint(0)>>63) - -// IntSize is the size in bits of an int or uint value. -const IntSize = intSize - -// Return the first number n such that n*base >= 1<<64. -func cutoff64(base int) uint64 { - if base < 2 { - return 0 - } - return (1<<64-1)/uint64(base) + 1 -} - -// ParseUint is like ParseInt but for unsigned numbers, and oeprating on []byte -func ParseUint(s []byte, base int, bitSize int) (n uint64, err error) { - var cutoff, maxVal uint64 - - if bitSize == 0 { - bitSize = int(IntSize) - } - - s0 := s - switch { - case len(s) < 1: - err = ErrSyntax - goto Error - - case 2 <= base && base <= 36: - // valid base; nothing to do - - case base == 0: - // Look for octal, hex prefix. - switch { - case s[0] == '0' && len(s) > 1 && (s[1] == 'x' || s[1] == 'X'): - base = 16 - s = s[2:] - if len(s) < 1 { - err = ErrSyntax - goto Error - } - case s[0] == '0': - base = 8 - default: - base = 10 - } - - default: - err = errors.New("invalid base " + strconv.Itoa(base)) - goto Error - } - - n = 0 - cutoff = cutoff64(base) - maxVal = 1<= base { - n = 0 - err = ErrSyntax - goto Error - } - - if n >= cutoff { - // n*base overflows - n = 1<<64 - 1 - err = ErrRange - goto Error - } - n *= uint64(base) - - n1 := n + uint64(v) - if n1 < n || n1 > maxVal { - // n+v overflows - n = 1<<64 - 1 - err = ErrRange - goto Error - } - n = n1 - } - - return n, nil - -Error: - return n, &NumError{"ParseUint", string(s0), err} -} - -// ParseInt interprets a string s in the given base (2 to 36) and -// returns the corresponding value i. If base == 0, the base is -// implied by the string's prefix: base 16 for "0x", base 8 for -// "0", and base 10 otherwise. -// -// The bitSize argument specifies the integer type -// that the result must fit into. Bit sizes 0, 8, 16, 32, and 64 -// correspond to int, int8, int16, int32, and int64. -// -// The errors that ParseInt returns have concrete type *NumError -// and include err.Num = s. If s is empty or contains invalid -// digits, err.Err = ErrSyntax and the returned value is 0; -// if the value corresponding to s cannot be represented by a -// signed integer of the given size, err.Err = ErrRange and the -// returned value is the maximum magnitude integer of the -// appropriate bitSize and sign. -func ParseInt(s []byte, base int, bitSize int) (i int64, err error) { - const fnParseInt = "ParseInt" - - if bitSize == 0 { - bitSize = int(IntSize) - } - - // Empty string bad. - if len(s) == 0 { - return 0, syntaxError(fnParseInt, string(s)) - } - - // Pick off leading sign. - s0 := s - neg := false - if s[0] == '+' { - s = s[1:] - } else if s[0] == '-' { - neg = true - s = s[1:] - } - - // Convert unsigned and check range. - var un uint64 - un, err = ParseUint(s, base, bitSize) - if err != nil && err.(*NumError).Err != ErrRange { - err.(*NumError).Func = fnParseInt - err.(*NumError).Num = string(s0) - return 0, err - } - cutoff := uint64(1 << uint(bitSize-1)) - if !neg && un >= cutoff { - return int64(cutoff - 1), rangeError(fnParseInt, string(s0)) - } - if neg && un > cutoff { - return -int64(cutoff), rangeError(fnParseInt, string(s0)) - } - n := int64(un) - if neg { - n = -n - } - return n, nil -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go deleted file mode 100644 index ab791085a4..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go +++ /dev/null @@ -1,668 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// An extFloat represents an extended floating-point number, with more -// precision than a float64. It does not try to save bits: the -// number represented by the structure is mant*(2^exp), with a negative -// sign if neg is true. -type extFloat struct { - mant uint64 - exp int - neg bool -} - -// Powers of ten taken from double-conversion library. -// http://code.google.com/p/double-conversion/ -const ( - firstPowerOfTen = -348 - stepPowerOfTen = 8 -) - -var smallPowersOfTen = [...]extFloat{ - {1 << 63, -63, false}, // 1 - {0xa << 60, -60, false}, // 1e1 - {0x64 << 57, -57, false}, // 1e2 - {0x3e8 << 54, -54, false}, // 1e3 - {0x2710 << 50, -50, false}, // 1e4 - {0x186a0 << 47, -47, false}, // 1e5 - {0xf4240 << 44, -44, false}, // 1e6 - {0x989680 << 40, -40, false}, // 1e7 -} - -var powersOfTen = [...]extFloat{ - {0xfa8fd5a0081c0288, -1220, false}, // 10^-348 - {0xbaaee17fa23ebf76, -1193, false}, // 10^-340 - {0x8b16fb203055ac76, -1166, false}, // 10^-332 - {0xcf42894a5dce35ea, -1140, false}, // 10^-324 - {0x9a6bb0aa55653b2d, -1113, false}, // 10^-316 - {0xe61acf033d1a45df, -1087, false}, // 10^-308 - {0xab70fe17c79ac6ca, -1060, false}, // 10^-300 - {0xff77b1fcbebcdc4f, -1034, false}, // 10^-292 - {0xbe5691ef416bd60c, -1007, false}, // 10^-284 - {0x8dd01fad907ffc3c, -980, false}, // 10^-276 - {0xd3515c2831559a83, -954, false}, // 10^-268 - {0x9d71ac8fada6c9b5, -927, false}, // 10^-260 - {0xea9c227723ee8bcb, -901, false}, // 10^-252 - {0xaecc49914078536d, -874, false}, // 10^-244 - {0x823c12795db6ce57, -847, false}, // 10^-236 - {0xc21094364dfb5637, -821, false}, // 10^-228 - {0x9096ea6f3848984f, -794, false}, // 10^-220 - {0xd77485cb25823ac7, -768, false}, // 10^-212 - {0xa086cfcd97bf97f4, -741, false}, // 10^-204 - {0xef340a98172aace5, -715, false}, // 10^-196 - {0xb23867fb2a35b28e, -688, false}, // 10^-188 - {0x84c8d4dfd2c63f3b, -661, false}, // 10^-180 - {0xc5dd44271ad3cdba, -635, false}, // 10^-172 - {0x936b9fcebb25c996, -608, false}, // 10^-164 - {0xdbac6c247d62a584, -582, false}, // 10^-156 - {0xa3ab66580d5fdaf6, -555, false}, // 10^-148 - {0xf3e2f893dec3f126, -529, false}, // 10^-140 - {0xb5b5ada8aaff80b8, -502, false}, // 10^-132 - {0x87625f056c7c4a8b, -475, false}, // 10^-124 - {0xc9bcff6034c13053, -449, false}, // 10^-116 - {0x964e858c91ba2655, -422, false}, // 10^-108 - {0xdff9772470297ebd, -396, false}, // 10^-100 - {0xa6dfbd9fb8e5b88f, -369, false}, // 10^-92 - {0xf8a95fcf88747d94, -343, false}, // 10^-84 - {0xb94470938fa89bcf, -316, false}, // 10^-76 - {0x8a08f0f8bf0f156b, -289, false}, // 10^-68 - {0xcdb02555653131b6, -263, false}, // 10^-60 - {0x993fe2c6d07b7fac, -236, false}, // 10^-52 - {0xe45c10c42a2b3b06, -210, false}, // 10^-44 - {0xaa242499697392d3, -183, false}, // 10^-36 - {0xfd87b5f28300ca0e, -157, false}, // 10^-28 - {0xbce5086492111aeb, -130, false}, // 10^-20 - {0x8cbccc096f5088cc, -103, false}, // 10^-12 - {0xd1b71758e219652c, -77, false}, // 10^-4 - {0x9c40000000000000, -50, false}, // 10^4 - {0xe8d4a51000000000, -24, false}, // 10^12 - {0xad78ebc5ac620000, 3, false}, // 10^20 - {0x813f3978f8940984, 30, false}, // 10^28 - {0xc097ce7bc90715b3, 56, false}, // 10^36 - {0x8f7e32ce7bea5c70, 83, false}, // 10^44 - {0xd5d238a4abe98068, 109, false}, // 10^52 - {0x9f4f2726179a2245, 136, false}, // 10^60 - {0xed63a231d4c4fb27, 162, false}, // 10^68 - {0xb0de65388cc8ada8, 189, false}, // 10^76 - {0x83c7088e1aab65db, 216, false}, // 10^84 - {0xc45d1df942711d9a, 242, false}, // 10^92 - {0x924d692ca61be758, 269, false}, // 10^100 - {0xda01ee641a708dea, 295, false}, // 10^108 - {0xa26da3999aef774a, 322, false}, // 10^116 - {0xf209787bb47d6b85, 348, false}, // 10^124 - {0xb454e4a179dd1877, 375, false}, // 10^132 - {0x865b86925b9bc5c2, 402, false}, // 10^140 - {0xc83553c5c8965d3d, 428, false}, // 10^148 - {0x952ab45cfa97a0b3, 455, false}, // 10^156 - {0xde469fbd99a05fe3, 481, false}, // 10^164 - {0xa59bc234db398c25, 508, false}, // 10^172 - {0xf6c69a72a3989f5c, 534, false}, // 10^180 - {0xb7dcbf5354e9bece, 561, false}, // 10^188 - {0x88fcf317f22241e2, 588, false}, // 10^196 - {0xcc20ce9bd35c78a5, 614, false}, // 10^204 - {0x98165af37b2153df, 641, false}, // 10^212 - {0xe2a0b5dc971f303a, 667, false}, // 10^220 - {0xa8d9d1535ce3b396, 694, false}, // 10^228 - {0xfb9b7cd9a4a7443c, 720, false}, // 10^236 - {0xbb764c4ca7a44410, 747, false}, // 10^244 - {0x8bab8eefb6409c1a, 774, false}, // 10^252 - {0xd01fef10a657842c, 800, false}, // 10^260 - {0x9b10a4e5e9913129, 827, false}, // 10^268 - {0xe7109bfba19c0c9d, 853, false}, // 10^276 - {0xac2820d9623bf429, 880, false}, // 10^284 - {0x80444b5e7aa7cf85, 907, false}, // 10^292 - {0xbf21e44003acdd2d, 933, false}, // 10^300 - {0x8e679c2f5e44ff8f, 960, false}, // 10^308 - {0xd433179d9c8cb841, 986, false}, // 10^316 - {0x9e19db92b4e31ba9, 1013, false}, // 10^324 - {0xeb96bf6ebadf77d9, 1039, false}, // 10^332 - {0xaf87023b9bf0ee6b, 1066, false}, // 10^340 -} - -// floatBits returns the bits of the float64 that best approximates -// the extFloat passed as receiver. Overflow is set to true if -// the resulting float64 is ±Inf. -func (f *extFloat) floatBits(flt *floatInfo) (bits uint64, overflow bool) { - f.Normalize() - - exp := f.exp + 63 - - // Exponent too small. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - f.mant >>= uint(n) - exp += n - } - - // Extract 1+flt.mantbits bits from the 64-bit mantissa. - mant := f.mant >> (63 - flt.mantbits) - if f.mant&(1<<(62-flt.mantbits)) != 0 { - // Round up. - mant += 1 - } - - // Rounding might have added a bit; shift down. - if mant == 2<>= 1 - exp++ - } - - // Infinities. - if exp-flt.bias >= 1<>uint(-f.exp))<>= uint(-f.exp) - f.exp = 0 - return *f, *f - } - expBiased := exp - flt.bias - - upper = extFloat{mant: 2*f.mant + 1, exp: f.exp - 1, neg: f.neg} - if mant != 1<>(64-32) == 0 { - mant <<= 32 - exp -= 32 - } - if mant>>(64-16) == 0 { - mant <<= 16 - exp -= 16 - } - if mant>>(64-8) == 0 { - mant <<= 8 - exp -= 8 - } - if mant>>(64-4) == 0 { - mant <<= 4 - exp -= 4 - } - if mant>>(64-2) == 0 { - mant <<= 2 - exp -= 2 - } - if mant>>(64-1) == 0 { - mant <<= 1 - exp -= 1 - } - shift = uint(f.exp - exp) - f.mant, f.exp = mant, exp - return -} - -// Multiply sets f to the product f*g: the result is correctly rounded, -// but not normalized. -func (f *extFloat) Multiply(g extFloat) { - fhi, flo := f.mant>>32, uint64(uint32(f.mant)) - ghi, glo := g.mant>>32, uint64(uint32(g.mant)) - - // Cross products. - cross1 := fhi * glo - cross2 := flo * ghi - - // f.mant*g.mant is fhi*ghi << 64 + (cross1+cross2) << 32 + flo*glo - f.mant = fhi*ghi + (cross1 >> 32) + (cross2 >> 32) - rem := uint64(uint32(cross1)) + uint64(uint32(cross2)) + ((flo * glo) >> 32) - // Round up. - rem += (1 << 31) - - f.mant += (rem >> 32) - f.exp = f.exp + g.exp + 64 -} - -var uint64pow10 = [...]uint64{ - 1, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, - 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, -} - -// AssignDecimal sets f to an approximate value mantissa*10^exp. It -// returns true if the value represented by f is guaranteed to be the -// best approximation of d after being rounded to a float64 or -// float32 depending on flt. -func (f *extFloat) AssignDecimal(mantissa uint64, exp10 int, neg bool, trunc bool, flt *floatInfo) (ok bool) { - const uint64digits = 19 - const errorscale = 8 - errors := 0 // An upper bound for error, computed in errorscale*ulp. - if trunc { - // the decimal number was truncated. - errors += errorscale / 2 - } - - f.mant = mantissa - f.exp = 0 - f.neg = neg - - // Multiply by powers of ten. - i := (exp10 - firstPowerOfTen) / stepPowerOfTen - if exp10 < firstPowerOfTen || i >= len(powersOfTen) { - return false - } - adjExp := (exp10 - firstPowerOfTen) % stepPowerOfTen - - // We multiply by exp%step - if adjExp < uint64digits && mantissa < uint64pow10[uint64digits-adjExp] { - // We can multiply the mantissa exactly. - f.mant *= uint64pow10[adjExp] - f.Normalize() - } else { - f.Normalize() - f.Multiply(smallPowersOfTen[adjExp]) - errors += errorscale / 2 - } - - // We multiply by 10 to the exp - exp%step. - f.Multiply(powersOfTen[i]) - if errors > 0 { - errors += 1 - } - errors += errorscale / 2 - - // Normalize - shift := f.Normalize() - errors <<= shift - - // Now f is a good approximation of the decimal. - // Check whether the error is too large: that is, if the mantissa - // is perturbated by the error, the resulting float64 will change. - // The 64 bits mantissa is 1 + 52 bits for float64 + 11 extra bits. - // - // In many cases the approximation will be good enough. - denormalExp := flt.bias - 63 - var extrabits uint - if f.exp <= denormalExp { - // f.mant * 2^f.exp is smaller than 2^(flt.bias+1). - extrabits = uint(63 - flt.mantbits + 1 + uint(denormalExp-f.exp)) - } else { - extrabits = uint(63 - flt.mantbits) - } - - halfway := uint64(1) << (extrabits - 1) - mant_extra := f.mant & (1< expMax: - i-- - default: - break Loop - } - } - // Apply the desired decimal shift on f. It will have exponent - // in the desired range. This is multiplication by 10^-exp10. - f.Multiply(powersOfTen[i]) - - return -(firstPowerOfTen + i*stepPowerOfTen), i -} - -// frexp10Many applies a common shift by a power of ten to a, b, c. -func frexp10Many(a, b, c *extFloat) (exp10 int) { - exp10, i := c.frexp10() - a.Multiply(powersOfTen[i]) - b.Multiply(powersOfTen[i]) - return -} - -// FixedDecimal stores in d the first n significant digits -// of the decimal representation of f. It returns false -// if it cannot be sure of the answer. -func (f *extFloat) FixedDecimal(d *decimalSlice, n int) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if n == 0 { - panic("strconv: internal error: extFloat.FixedDecimal called with n == 0") - } - // Multiply by an appropriate power of ten to have a reasonable - // number to process. - f.Normalize() - exp10, _ := f.frexp10() - - shift := uint(-f.exp) - integer := uint32(f.mant >> shift) - fraction := f.mant - (uint64(integer) << shift) - ε := uint64(1) // ε is the uncertainty we have on the mantissa of f. - - // Write exactly n digits to d. - needed := n // how many digits are left to write. - integerDigits := 0 // the number of decimal digits of integer. - pow10 := uint64(1) // the power of ten by which f was scaled. - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - rest := integer - if integerDigits > needed { - // the integral part is already large, trim the last digits. - pow10 = uint64pow10[integerDigits-needed] - integer /= uint32(pow10) - rest -= integer * uint32(pow10) - } else { - rest = 0 - } - - // Write the digits of integer: the digits of rest are omitted. - var buf [32]byte - pos := len(buf) - for v := integer; v > 0; { - v1 := v / 10 - v -= 10 * v1 - pos-- - buf[pos] = byte(v + '0') - v = v1 - } - for i := pos; i < len(buf); i++ { - d.d[i-pos] = buf[i] - } - nd := len(buf) - pos - d.nd = nd - d.dp = integerDigits + exp10 - needed -= nd - - if needed > 0 { - if rest != 0 || pow10 != 1 { - panic("strconv: internal error, rest != 0 but needed > 0") - } - // Emit digits for the fractional part. Each time, 10*fraction - // fits in a uint64 without overflow. - for needed > 0 { - fraction *= 10 - ε *= 10 // the uncertainty scales as we multiply by ten. - if 2*ε > 1<> shift - d.d[nd] = byte(digit + '0') - fraction -= digit << shift - nd++ - needed-- - } - d.nd = nd - } - - // We have written a truncation of f (a numerator / 10^d.dp). The remaining part - // can be interpreted as a small number (< 1) to be added to the last digit of the - // numerator. - // - // If rest > 0, the amount is: - // (rest< 0 guarantees that pow10 << shift does not overflow a uint64. - // - // If rest = 0, pow10 == 1 and the amount is - // fraction / (1 << shift) - // fraction being known with a ±ε uncertainty. - // - // We pass this information to the rounding routine for adjustment. - - ok := adjustLastDigitFixed(d, uint64(rest)<= 0; i-- { - if d.d[i] != '0' { - d.nd = i + 1 - break - } - } - return true -} - -// adjustLastDigitFixed assumes d contains the representation of the integral part -// of some number, whose fractional part is num / (den << shift). The numerator -// num is only known up to an uncertainty of size ε, assumed to be less than -// (den << shift)/2. -// -// It will increase the last digit by one to account for correct rounding, typically -// when the fractional part is greater than 1/2, and will return false if ε is such -// that no correct answer can be given. -func adjustLastDigitFixed(d *decimalSlice, num, den uint64, shift uint, ε uint64) bool { - if num > den< den< den< (den< den<= 0; i-- { - if d.d[i] == '9' { - d.nd-- - } else { - break - } - } - if i < 0 { - d.d[0] = '1' - d.nd = 1 - d.dp++ - } else { - d.d[i]++ - } - return true - } - return false -} - -// ShortestDecimal stores in d the shortest decimal representation of f -// which belongs to the open interval (lower, upper), where f is supposed -// to lie. It returns false whenever the result is unsure. The implementation -// uses the Grisu3 algorithm. -func (f *extFloat) ShortestDecimal(d *decimalSlice, lower, upper *extFloat) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if f.exp == 0 && *lower == *f && *lower == *upper { - // an exact integer. - var buf [24]byte - n := len(buf) - 1 - for v := f.mant; v > 0; { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n-- - v = v1 - } - nd := len(buf) - n - 1 - for i := 0; i < nd; i++ { - d.d[i] = buf[n+1+i] - } - d.nd, d.dp = nd, nd - for d.nd > 0 && d.d[d.nd-1] == '0' { - d.nd-- - } - if d.nd == 0 { - d.dp = 0 - } - d.neg = f.neg - return true - } - upper.Normalize() - // Uniformize exponents. - if f.exp > upper.exp { - f.mant <<= uint(f.exp - upper.exp) - f.exp = upper.exp - } - if lower.exp > upper.exp { - lower.mant <<= uint(lower.exp - upper.exp) - lower.exp = upper.exp - } - - exp10 := frexp10Many(lower, f, upper) - // Take a safety margin due to rounding in frexp10Many, but we lose precision. - upper.mant++ - lower.mant-- - - // The shortest representation of f is either rounded up or down, but - // in any case, it is a truncation of upper. - shift := uint(-upper.exp) - integer := uint32(upper.mant >> shift) - fraction := upper.mant - (uint64(integer) << shift) - - // How far we can go down from upper until the result is wrong. - allowance := upper.mant - lower.mant - // How far we should go to get a very precise result. - targetDiff := upper.mant - f.mant - - // Count integral digits: there are at most 10. - var integerDigits int - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - for i := 0; i < integerDigits; i++ { - pow := uint64pow10[integerDigits-i-1] - digit := integer / uint32(pow) - d.d[i] = byte(digit + '0') - integer -= digit * uint32(pow) - // evaluate whether we should stop. - if currentDiff := uint64(integer)<> shift) - d.d[d.nd] = byte(digit + '0') - d.nd++ - fraction -= uint64(digit) << shift - if fraction < allowance*multiplier { - // We are in the admissible range. Note that if allowance is about to - // overflow, that is, allowance > 2^64/10, the condition is automatically - // true due to the limited range of fraction. - return adjustLastDigit(d, - fraction, targetDiff*multiplier, allowance*multiplier, - 1< maxDiff-ulpBinary { - // we went too far - return false - } - if d.nd == 1 && d.d[0] == '0' { - // the number has actually reached zero. - d.nd = 0 - d.dp = 0 - } - return true -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go deleted file mode 100644 index 253f83b45a..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Binary to decimal floating point conversion. -// Algorithm: -// 1) store mantissa in multiprecision decimal -// 2) shift decimal by exponent -// 3) read digits out & format - -package internal - -import "math" - -// TODO: move elsewhere? -type floatInfo struct { - mantbits uint - expbits uint - bias int -} - -var float32info = floatInfo{23, 8, -127} -var float64info = floatInfo{52, 11, -1023} - -// FormatFloat converts the floating-point number f to a string, -// according to the format fmt and precision prec. It rounds the -// result assuming that the original was obtained from a floating-point -// value of bitSize bits (32 for float32, 64 for float64). -// -// The format fmt is one of -// 'b' (-ddddp±ddd, a binary exponent), -// 'e' (-d.dddde±dd, a decimal exponent), -// 'E' (-d.ddddE±dd, a decimal exponent), -// 'f' (-ddd.dddd, no exponent), -// 'g' ('e' for large exponents, 'f' otherwise), or -// 'G' ('E' for large exponents, 'f' otherwise). -// -// The precision prec controls the number of digits -// (excluding the exponent) printed by the 'e', 'E', 'f', 'g', and 'G' formats. -// For 'e', 'E', and 'f' it is the number of digits after the decimal point. -// For 'g' and 'G' it is the total number of digits. -// The special precision -1 uses the smallest number of digits -// necessary such that ParseFloat will return f exactly. -func formatFloat(f float64, fmt byte, prec, bitSize int) string { - return string(genericFtoa(make([]byte, 0, max(prec+4, 24)), f, fmt, prec, bitSize)) -} - -// AppendFloat appends the string form of the floating-point number f, -// as generated by FormatFloat, to dst and returns the extended buffer. -func appendFloat(dst []byte, f float64, fmt byte, prec int, bitSize int) []byte { - return genericFtoa(dst, f, fmt, prec, bitSize) -} - -func genericFtoa(dst []byte, val float64, fmt byte, prec, bitSize int) []byte { - var bits uint64 - var flt *floatInfo - switch bitSize { - case 32: - bits = uint64(math.Float32bits(float32(val))) - flt = &float32info - case 64: - bits = math.Float64bits(val) - flt = &float64info - default: - panic("strconv: illegal AppendFloat/FormatFloat bitSize") - } - - neg := bits>>(flt.expbits+flt.mantbits) != 0 - exp := int(bits>>flt.mantbits) & (1< digs.nd && digs.nd >= digs.dp { - eprec = digs.nd - } - // %e is used if the exponent from the conversion - // is less than -4 or greater than or equal to the precision. - // if precision was the shortest possible, use precision 6 for this decision. - if shortest { - eprec = 6 - } - exp := digs.dp - 1 - if exp < -4 || exp >= eprec { - if prec > digs.nd { - prec = digs.nd - } - return fmtE(dst, neg, digs, prec-1, fmt+'e'-'g') - } - if prec > digs.dp { - prec = digs.nd - } - return fmtF(dst, neg, digs, max(prec-digs.dp, 0)) - } - - // unknown format - return append(dst, '%', fmt) -} - -// Round d (= mant * 2^exp) to the shortest number of digits -// that will let the original floating point value be precisely -// reconstructed. Size is original floating point size (64 or 32). -func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { - // If mantissa is zero, the number is zero; stop now. - if mant == 0 { - d.nd = 0 - return - } - - // Compute upper and lower such that any decimal number - // between upper and lower (possibly inclusive) - // will round to the original floating point number. - - // We may see at once that the number is already shortest. - // - // Suppose d is not denormal, so that 2^exp <= d < 10^dp. - // The closest shorter number is at least 10^(dp-nd) away. - // The lower/upper bounds computed below are at distance - // at most 2^(exp-mantbits). - // - // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits), - // or equivalently log2(10)*(dp-nd) > exp-mantbits. - // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32). - minexp := flt.bias + 1 // minimum possible exponent - if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) { - // The number is already shortest. - return - } - - // d = mant << (exp - mantbits) - // Next highest floating point number is mant+1 << exp-mantbits. - // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1. - upper := new(decimal) - upper.Assign(mant*2 + 1) - upper.Shift(exp - int(flt.mantbits) - 1) - - // d = mant << (exp - mantbits) - // Next lowest floating point number is mant-1 << exp-mantbits, - // unless mant-1 drops the significant bit and exp is not the minimum exp, - // in which case the next lowest is mant*2-1 << exp-mantbits-1. - // Either way, call it mantlo << explo-mantbits. - // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1. - var mantlo uint64 - var explo int - if mant > 1< 0 { - dst = append(dst, '.') - i := 1 - m := d.nd + prec + 1 - max(d.nd, prec+1) - for i < m { - dst = append(dst, d.d[i]) - i++ - } - for i <= prec { - dst = append(dst, '0') - i++ - } - } - - // e± - dst = append(dst, fmt) - exp := d.dp - 1 - if d.nd == 0 { // special case: 0 has exponent 0 - exp = 0 - } - if exp < 0 { - ch = '-' - exp = -exp - } else { - ch = '+' - } - dst = append(dst, ch) - - // dddd - var buf [3]byte - i := len(buf) - for exp >= 10 { - i-- - buf[i] = byte(exp%10 + '0') - exp /= 10 - } - // exp < 10 - i-- - buf[i] = byte(exp + '0') - - switch i { - case 0: - dst = append(dst, buf[0], buf[1], buf[2]) - case 1: - dst = append(dst, buf[1], buf[2]) - case 2: - // leading zeroes - dst = append(dst, '0', buf[2]) - } - return dst -} - -// %f: -ddddddd.ddddd -func fmtF(dst []byte, neg bool, d decimalSlice, prec int) []byte { - // sign - if neg { - dst = append(dst, '-') - } - - // integer, padded with zeros as needed. - if d.dp > 0 { - var i int - for i = 0; i < d.dp && i < d.nd; i++ { - dst = append(dst, d.d[i]) - } - for ; i < d.dp; i++ { - dst = append(dst, '0') - } - } else { - dst = append(dst, '0') - } - - // fraction - if prec > 0 { - dst = append(dst, '.') - for i := 0; i < prec; i++ { - ch := byte('0') - if j := d.dp + i; 0 <= j && j < d.nd { - ch = d.d[j] - } - dst = append(dst, ch) - } - } - - return dst -} - -// %b: -ddddddddp+ddd -func fmtB(dst []byte, neg bool, mant uint64, exp int, flt *floatInfo) []byte { - var buf [50]byte - w := len(buf) - exp -= int(flt.mantbits) - esign := byte('+') - if exp < 0 { - esign = '-' - exp = -exp - } - n := 0 - for exp > 0 || n < 1 { - n++ - w-- - buf[w] = byte(exp%10 + '0') - exp /= 10 - } - w-- - buf[w] = esign - w-- - buf[w] = 'p' - n = 0 - for mant > 0 || n < 1 { - n++ - w-- - buf[w] = byte(mant%10 + '0') - mant /= 10 - } - if neg { - w-- - buf[w] = '-' - } - return append(dst, buf[w:]...) -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go deleted file mode 100644 index 3e50f0c418..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/iota.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "io" -) - -const ( - digits = "0123456789abcdefghijklmnopqrstuvwxyz" - digits01 = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - digits10 = "0000000000111111111122222222223333333333444444444455555555556666666666777777777788888888889999999999" -) - -var shifts = [len(digits) + 1]uint{ - 1 << 1: 1, - 1 << 2: 2, - 1 << 3: 3, - 1 << 4: 4, - 1 << 5: 5, -} - -var smallNumbers = [][]byte{ - []byte("0"), - []byte("1"), - []byte("2"), - []byte("3"), - []byte("4"), - []byte("5"), - []byte("6"), - []byte("7"), - []byte("8"), - []byte("9"), - []byte("10"), -} - -type FormatBitsWriter interface { - io.Writer - io.ByteWriter -} - -type FormatBitsScratch struct{} - -// -// DEPRECIATED: `scratch` is no longer used, FormatBits2 is available. -// -// FormatBits computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. If append_ is -// set, the string is appended to dst and the resulting byte slice is -// returned as the first result value; otherwise the string is returned -// as the second result value. -// -func FormatBits(scratch *FormatBitsScratch, dst FormatBitsWriter, u uint64, base int, neg bool) { - FormatBits2(dst, u, base, neg) -} - -// FormatBits2 computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. If append_ is -// set, the string is appended to dst and the resulting byte slice is -// returned as the first result value; otherwise the string is returned -// as the second result value. -// -func FormatBits2(dst FormatBitsWriter, u uint64, base int, neg bool) { - if base < 2 || base > len(digits) { - panic("strconv: illegal AppendInt/FormatInt base") - } - // fast path for small common numbers - if u <= 10 { - if neg { - dst.WriteByte('-') - } - dst.Write(smallNumbers[u]) - return - } - - // 2 <= base && base <= len(digits) - - var a = makeSlice(65) - // var a [64 + 1]byte // +1 for sign of 64bit value in base 2 - i := len(a) - - if neg { - u = -u - } - - // convert bits - if base == 10 { - // common case: use constants for / and % because - // the compiler can optimize it into a multiply+shift, - // and unroll loop - for u >= 100 { - i -= 2 - q := u / 100 - j := uintptr(u - q*100) - a[i+1] = digits01[j] - a[i+0] = digits10[j] - u = q - } - if u >= 10 { - i-- - q := u / 10 - a[i] = digits[uintptr(u-q*10)] - u = q - } - - } else if s := shifts[base]; s > 0 { - // base is power of 2: use shifts and masks instead of / and % - b := uint64(base) - m := uintptr(b) - 1 // == 1<= b { - i-- - a[i] = digits[uintptr(u)&m] - u >>= s - } - - } else { - // general case - b := uint64(base) - for u >= b { - i-- - a[i] = digits[uintptr(u%b)] - u /= b - } - } - - // u < base - i-- - a[i] = digits[uintptr(u)] - - // add sign, if any - if neg { - i-- - a[i] = '-' - } - - dst.Write(a[i:]) - - Pool(a) - - return -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go deleted file mode 100644 index 513b45d570..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go +++ /dev/null @@ -1,512 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's encoding/json/encode.go */ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "io" - "unicode/utf8" - "strconv" - "unicode/utf16" - "unicode" -) - -const hex = "0123456789abcdef" - -type JsonStringWriter interface { - io.Writer - io.ByteWriter - stringWriter -} - -func WriteJsonString(buf JsonStringWriter, s string) { - WriteJson(buf, []byte(s)) -} - -/** - * Function ported from encoding/json: func (e *encodeState) string(s string) (int, error) - */ -func WriteJson(buf JsonStringWriter, s []byte) { - buf.WriteByte('"') - start := 0 - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - /* - if 0x20 <= b && b != '\\' && b != '"' && b != '<' && b != '>' && b != '&' { - i++ - continue - } - */ - if lt[b] == true { - i++ - continue - } - - if start < i { - buf.Write(s[start:i]) - } - switch b { - case '\\', '"': - buf.WriteByte('\\') - buf.WriteByte(b) - case '\n': - buf.WriteByte('\\') - buf.WriteByte('n') - case '\r': - buf.WriteByte('\\') - buf.WriteByte('r') - default: - // This encodes bytes < 0x20 except for \n and \r, - // as well as < and >. The latter are escaped because they - // can lead to security holes when user-controlled strings - // are rendered into JSON and served to some browsers. - buf.WriteString(`\u00`) - buf.WriteByte(hex[b>>4]) - buf.WriteByte(hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRune(s[i:]) - if c == utf8.RuneError && size == 1 { - if start < i { - buf.Write(s[start:i]) - } - buf.WriteString(`\ufffd`) - i += size - start = i - continue - } - // U+2028 is LINE SEPARATOR. - // U+2029 is PARAGRAPH SEPARATOR. - // They are both technically valid characters in JSON strings, - // but don't work in JSONP, which has to be evaluated as JavaScript, - // and can lead to security holes there. It is valid JSON to - // escape them, so we do so unconditionally. - // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. - if c == '\u2028' || c == '\u2029' { - if start < i { - buf.Write(s[start:i]) - } - buf.WriteString(`\u202`) - buf.WriteByte(hex[c&0xF]) - i += size - start = i - continue - } - i += size - } - if start < len(s) { - buf.Write(s[start:]) - } - buf.WriteByte('"') -} - -// UnquoteBytes will decode []byte containing json string to go string -// ported from encoding/json/decode.go -func UnquoteBytes(s []byte) (t []byte, ok bool) { - if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { - return - } - s = s[1 : len(s)-1] - - // Check for unusual characters. If there are none, - // then no unquoting is needed, so return a slice of the - // original bytes. - r := 0 - for r < len(s) { - c := s[r] - if c == '\\' || c == '"' || c < ' ' { - break - } - if c < utf8.RuneSelf { - r++ - continue - } - rr, size := utf8.DecodeRune(s[r:]) - if rr == utf8.RuneError && size == 1 { - break - } - r += size - } - if r == len(s) { - return s, true - } - - b := make([]byte, len(s)+2*utf8.UTFMax) - w := copy(b, s[0:r]) - for r < len(s) { - // Out of room? Can only happen if s is full of - // malformed UTF-8 and we're replacing each - // byte with RuneError. - if w >= len(b)-2*utf8.UTFMax { - nb := make([]byte, (len(b)+utf8.UTFMax)*2) - copy(nb, b[0:w]) - b = nb - } - switch c := s[r]; { - case c == '\\': - r++ - if r >= len(s) { - return - } - switch s[r] { - default: - return - case '"', '\\', '/', '\'': - b[w] = s[r] - r++ - w++ - case 'b': - b[w] = '\b' - r++ - w++ - case 'f': - b[w] = '\f' - r++ - w++ - case 'n': - b[w] = '\n' - r++ - w++ - case 'r': - b[w] = '\r' - r++ - w++ - case 't': - b[w] = '\t' - r++ - w++ - case 'u': - r-- - rr := getu4(s[r:]) - if rr < 0 { - return - } - r += 6 - if utf16.IsSurrogate(rr) { - rr1 := getu4(s[r:]) - if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { - // A valid pair; consume. - r += 6 - w += utf8.EncodeRune(b[w:], dec) - break - } - // Invalid surrogate; fall back to replacement rune. - rr = unicode.ReplacementChar - } - w += utf8.EncodeRune(b[w:], rr) - } - - // Quote, control characters are invalid. - case c == '"', c < ' ': - return - - // ASCII - case c < utf8.RuneSelf: - b[w] = c - r++ - w++ - - // Coerce to well-formed UTF-8. - default: - rr, size := utf8.DecodeRune(s[r:]) - r += size - w += utf8.EncodeRune(b[w:], rr) - } - } - return b[0:w], true -} - -// getu4 decodes \uXXXX from the beginning of s, returning the hex value, -// or it returns -1. -func getu4(s []byte) rune { - if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { - return -1 - } - r, err := strconv.ParseUint(string(s[2:6]), 16, 64) - if err != nil { - return -1 - } - return rune(r) -} - -// TODO(pquerna): consider combining wibth the normal byte mask. -var lt [256]bool = [256]bool{ - false, /* 0 */ - false, /* 1 */ - false, /* 2 */ - false, /* 3 */ - false, /* 4 */ - false, /* 5 */ - false, /* 6 */ - false, /* 7 */ - false, /* 8 */ - false, /* 9 */ - false, /* 10 */ - false, /* 11 */ - false, /* 12 */ - false, /* 13 */ - false, /* 14 */ - false, /* 15 */ - false, /* 16 */ - false, /* 17 */ - false, /* 18 */ - false, /* 19 */ - false, /* 20 */ - false, /* 21 */ - false, /* 22 */ - false, /* 23 */ - false, /* 24 */ - false, /* 25 */ - false, /* 26 */ - false, /* 27 */ - false, /* 28 */ - false, /* 29 */ - false, /* 30 */ - false, /* 31 */ - true, /* 32 */ - true, /* 33 */ - false, /* 34 */ - true, /* 35 */ - true, /* 36 */ - true, /* 37 */ - false, /* 38 */ - true, /* 39 */ - true, /* 40 */ - true, /* 41 */ - true, /* 42 */ - true, /* 43 */ - true, /* 44 */ - true, /* 45 */ - true, /* 46 */ - true, /* 47 */ - true, /* 48 */ - true, /* 49 */ - true, /* 50 */ - true, /* 51 */ - true, /* 52 */ - true, /* 53 */ - true, /* 54 */ - true, /* 55 */ - true, /* 56 */ - true, /* 57 */ - true, /* 58 */ - true, /* 59 */ - false, /* 60 */ - true, /* 61 */ - false, /* 62 */ - true, /* 63 */ - true, /* 64 */ - true, /* 65 */ - true, /* 66 */ - true, /* 67 */ - true, /* 68 */ - true, /* 69 */ - true, /* 70 */ - true, /* 71 */ - true, /* 72 */ - true, /* 73 */ - true, /* 74 */ - true, /* 75 */ - true, /* 76 */ - true, /* 77 */ - true, /* 78 */ - true, /* 79 */ - true, /* 80 */ - true, /* 81 */ - true, /* 82 */ - true, /* 83 */ - true, /* 84 */ - true, /* 85 */ - true, /* 86 */ - true, /* 87 */ - true, /* 88 */ - true, /* 89 */ - true, /* 90 */ - true, /* 91 */ - false, /* 92 */ - true, /* 93 */ - true, /* 94 */ - true, /* 95 */ - true, /* 96 */ - true, /* 97 */ - true, /* 98 */ - true, /* 99 */ - true, /* 100 */ - true, /* 101 */ - true, /* 102 */ - true, /* 103 */ - true, /* 104 */ - true, /* 105 */ - true, /* 106 */ - true, /* 107 */ - true, /* 108 */ - true, /* 109 */ - true, /* 110 */ - true, /* 111 */ - true, /* 112 */ - true, /* 113 */ - true, /* 114 */ - true, /* 115 */ - true, /* 116 */ - true, /* 117 */ - true, /* 118 */ - true, /* 119 */ - true, /* 120 */ - true, /* 121 */ - true, /* 122 */ - true, /* 123 */ - true, /* 124 */ - true, /* 125 */ - true, /* 126 */ - true, /* 127 */ - true, /* 128 */ - true, /* 129 */ - true, /* 130 */ - true, /* 131 */ - true, /* 132 */ - true, /* 133 */ - true, /* 134 */ - true, /* 135 */ - true, /* 136 */ - true, /* 137 */ - true, /* 138 */ - true, /* 139 */ - true, /* 140 */ - true, /* 141 */ - true, /* 142 */ - true, /* 143 */ - true, /* 144 */ - true, /* 145 */ - true, /* 146 */ - true, /* 147 */ - true, /* 148 */ - true, /* 149 */ - true, /* 150 */ - true, /* 151 */ - true, /* 152 */ - true, /* 153 */ - true, /* 154 */ - true, /* 155 */ - true, /* 156 */ - true, /* 157 */ - true, /* 158 */ - true, /* 159 */ - true, /* 160 */ - true, /* 161 */ - true, /* 162 */ - true, /* 163 */ - true, /* 164 */ - true, /* 165 */ - true, /* 166 */ - true, /* 167 */ - true, /* 168 */ - true, /* 169 */ - true, /* 170 */ - true, /* 171 */ - true, /* 172 */ - true, /* 173 */ - true, /* 174 */ - true, /* 175 */ - true, /* 176 */ - true, /* 177 */ - true, /* 178 */ - true, /* 179 */ - true, /* 180 */ - true, /* 181 */ - true, /* 182 */ - true, /* 183 */ - true, /* 184 */ - true, /* 185 */ - true, /* 186 */ - true, /* 187 */ - true, /* 188 */ - true, /* 189 */ - true, /* 190 */ - true, /* 191 */ - true, /* 192 */ - true, /* 193 */ - true, /* 194 */ - true, /* 195 */ - true, /* 196 */ - true, /* 197 */ - true, /* 198 */ - true, /* 199 */ - true, /* 200 */ - true, /* 201 */ - true, /* 202 */ - true, /* 203 */ - true, /* 204 */ - true, /* 205 */ - true, /* 206 */ - true, /* 207 */ - true, /* 208 */ - true, /* 209 */ - true, /* 210 */ - true, /* 211 */ - true, /* 212 */ - true, /* 213 */ - true, /* 214 */ - true, /* 215 */ - true, /* 216 */ - true, /* 217 */ - true, /* 218 */ - true, /* 219 */ - true, /* 220 */ - true, /* 221 */ - true, /* 222 */ - true, /* 223 */ - true, /* 224 */ - true, /* 225 */ - true, /* 226 */ - true, /* 227 */ - true, /* 228 */ - true, /* 229 */ - true, /* 230 */ - true, /* 231 */ - true, /* 232 */ - true, /* 233 */ - true, /* 234 */ - true, /* 235 */ - true, /* 236 */ - true, /* 237 */ - true, /* 238 */ - true, /* 239 */ - true, /* 240 */ - true, /* 241 */ - true, /* 242 */ - true, /* 243 */ - true, /* 244 */ - true, /* 245 */ - true, /* 246 */ - true, /* 247 */ - true, /* 248 */ - true, /* 249 */ - true, /* 250 */ - true, /* 251 */ - true, /* 252 */ - true, /* 253 */ - true, /* 254 */ - true, /* 255 */ -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go deleted file mode 100644 index 5589292ff2..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go +++ /dev/null @@ -1,937 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on derived from yajl: */ -/* - * Copyright (c) 2007-2014, Lloyd Hilaiel - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package v1 - -import ( - "errors" - "fmt" - "io" -) - -type FFParseState int - -const ( - FFParse_map_start FFParseState = iota - FFParse_want_key - FFParse_want_colon - FFParse_want_value - FFParse_after_value -) - -type FFTok int - -const ( - FFTok_init FFTok = iota - FFTok_bool FFTok = iota - FFTok_colon FFTok = iota - FFTok_comma FFTok = iota - FFTok_eof FFTok = iota - FFTok_error FFTok = iota - FFTok_left_brace FFTok = iota - FFTok_left_bracket FFTok = iota - FFTok_null FFTok = iota - FFTok_right_brace FFTok = iota - FFTok_right_bracket FFTok = iota - - /* we differentiate between integers and doubles to allow the - * parser to interpret the number without re-scanning */ - FFTok_integer FFTok = iota - FFTok_double FFTok = iota - - FFTok_string FFTok = iota - - /* comment tokens are not currently returned to the parser, ever */ - FFTok_comment FFTok = iota -) - -type FFErr int - -const ( - FFErr_e_ok FFErr = iota - FFErr_io FFErr = iota - FFErr_string_invalid_utf8 FFErr = iota - FFErr_string_invalid_escaped_char FFErr = iota - FFErr_string_invalid_json_char FFErr = iota - FFErr_string_invalid_hex_char FFErr = iota - FFErr_invalid_char FFErr = iota - FFErr_invalid_string FFErr = iota - FFErr_missing_integer_after_decimal FFErr = iota - FFErr_missing_integer_after_exponent FFErr = iota - FFErr_missing_integer_after_minus FFErr = iota - FFErr_unallowed_comment FFErr = iota - FFErr_incomplete_comment FFErr = iota - FFErr_unexpected_token_type FFErr = iota // TODO: improve this error -) - -type FFLexer struct { - reader *ffReader - Output DecodingBuffer - Token FFTok - Error FFErr - BigError error - // TODO: convert all of this to an interface - lastCurrentChar int - captureAll bool - buf Buffer -} - -func NewFFLexer(input []byte) *FFLexer { - fl := &FFLexer{ - Token: FFTok_init, - Error: FFErr_e_ok, - reader: newffReader(input), - Output: &Buffer{}, - } - // TODO: guess size? - //fl.Output.Grow(64) - return fl -} - -type LexerError struct { - offset int - line int - char int - err error -} - -// Reset the Lexer and add new input. -func (ffl *FFLexer) Reset(input []byte) { - ffl.Token = FFTok_init - ffl.Error = FFErr_e_ok - ffl.BigError = nil - ffl.reader.Reset(input) - ffl.lastCurrentChar = 0 - ffl.Output.Reset() -} - -func (le *LexerError) Error() string { - return fmt.Sprintf(`ffjson error: (%T)%s offset=%d line=%d char=%d`, - le.err, le.err.Error(), - le.offset, le.line, le.char) -} - -func (ffl *FFLexer) WrapErr(err error) error { - line, char := ffl.reader.PosWithLine() - // TOOD: calcualte lines/characters based on offset - return &LexerError{ - offset: ffl.reader.Pos(), - line: line, - char: char, - err: err, - } -} - -func (ffl *FFLexer) scanReadByte() (byte, error) { - var c byte - var err error - if ffl.captureAll { - c, err = ffl.reader.ReadByte() - } else { - c, err = ffl.reader.ReadByteNoWS() - } - - if err != nil { - ffl.Error = FFErr_io - ffl.BigError = err - return 0, err - } - - return c, nil -} - -func (ffl *FFLexer) readByte() (byte, error) { - - c, err := ffl.reader.ReadByte() - if err != nil { - ffl.Error = FFErr_io - ffl.BigError = err - return 0, err - } - - return c, nil -} - -func (ffl *FFLexer) unreadByte() { - ffl.reader.UnreadByte() -} - -func (ffl *FFLexer) wantBytes(want []byte, iftrue FFTok) FFTok { - startPos := ffl.reader.Pos() - for _, b := range want { - c, err := ffl.readByte() - - if err != nil { - return FFTok_error - } - - if c != b { - ffl.unreadByte() - // fmt.Printf("wanted bytes: %s\n", string(want)) - // TODO(pquerna): thsi is a bad error message - ffl.Error = FFErr_invalid_string - return FFTok_error - } - } - - endPos := ffl.reader.Pos() - ffl.Output.Write(ffl.reader.Slice(startPos, endPos)) - return iftrue -} - -func (ffl *FFLexer) lexComment() FFTok { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '/' { - // a // comment, scan until line ends. - for { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '\n' { - return FFTok_comment - } - } - } else if c == '*' { - // a /* */ comment, scan */ - for { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '*' { - c, err := ffl.readByte() - - if err != nil { - return FFTok_error - } - - if c == '/' { - return FFTok_comment - } - - ffl.Error = FFErr_incomplete_comment - return FFTok_error - } - } - } else { - ffl.Error = FFErr_incomplete_comment - return FFTok_error - } -} - -func (ffl *FFLexer) lexString() FFTok { - if ffl.captureAll { - ffl.buf.Reset() - err := ffl.reader.SliceString(&ffl.buf) - - if err != nil { - ffl.BigError = err - return FFTok_error - } - - WriteJson(ffl.Output, ffl.buf.Bytes()) - - return FFTok_string - } else { - err := ffl.reader.SliceString(ffl.Output) - - if err != nil { - ffl.BigError = err - return FFTok_error - } - - return FFTok_string - } -} - -func (ffl *FFLexer) lexNumber() FFTok { - var numRead int = 0 - tok := FFTok_integer - startPos := ffl.reader.Pos() - - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - /* optional leading minus */ - if c == '-' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - /* a single zero, or a series of integers */ - if c == '0' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } else if c >= '1' && c <= '9' { - for c >= '0' && c <= '9' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - } else { - ffl.unreadByte() - ffl.Error = FFErr_missing_integer_after_minus - return FFTok_error - } - - if c == '.' { - numRead = 0 - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - - for c >= '0' && c <= '9' { - numRead++ - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - if numRead == 0 { - ffl.unreadByte() - - ffl.Error = FFErr_missing_integer_after_decimal - return FFTok_error - } - - tok = FFTok_double - } - - /* optional exponent (indicates this is floating point) */ - if c == 'e' || c == 'E' { - numRead = 0 - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - - /* optional sign */ - if c == '+' || c == '-' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - for c >= '0' && c <= '9' { - numRead++ - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - if numRead == 0 { - ffl.Error = FFErr_missing_integer_after_exponent - return FFTok_error - } - - tok = FFTok_double - } - - ffl.unreadByte() - - endPos := ffl.reader.Pos() - ffl.Output.Write(ffl.reader.Slice(startPos, endPos)) - return tok -} - -var true_bytes = []byte{'r', 'u', 'e'} -var false_bytes = []byte{'a', 'l', 's', 'e'} -var null_bytes = []byte{'u', 'l', 'l'} - -func (ffl *FFLexer) Scan() FFTok { - tok := FFTok_error - if ffl.captureAll == false { - ffl.Output.Reset() - } - ffl.Token = FFTok_init - - for { - c, err := ffl.scanReadByte() - if err != nil { - if err == io.EOF { - return FFTok_eof - } else { - return FFTok_error - } - } - - switch c { - case '{': - tok = FFTok_left_bracket - if ffl.captureAll { - ffl.Output.WriteByte('{') - } - goto lexed - case '}': - tok = FFTok_right_bracket - if ffl.captureAll { - ffl.Output.WriteByte('}') - } - goto lexed - case '[': - tok = FFTok_left_brace - if ffl.captureAll { - ffl.Output.WriteByte('[') - } - goto lexed - case ']': - tok = FFTok_right_brace - if ffl.captureAll { - ffl.Output.WriteByte(']') - } - goto lexed - case ',': - tok = FFTok_comma - if ffl.captureAll { - ffl.Output.WriteByte(',') - } - goto lexed - case ':': - tok = FFTok_colon - if ffl.captureAll { - ffl.Output.WriteByte(':') - } - goto lexed - case '\t', '\n', '\v', '\f', '\r', ' ': - if ffl.captureAll { - ffl.Output.WriteByte(c) - } - case 't': - ffl.Output.WriteByte('t') - tok = ffl.wantBytes(true_bytes, FFTok_bool) - goto lexed - case 'f': - ffl.Output.WriteByte('f') - tok = ffl.wantBytes(false_bytes, FFTok_bool) - goto lexed - case 'n': - ffl.Output.WriteByte('n') - tok = ffl.wantBytes(null_bytes, FFTok_null) - goto lexed - case '"': - tok = ffl.lexString() - goto lexed - case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': - ffl.unreadByte() - tok = ffl.lexNumber() - goto lexed - case '/': - tok = ffl.lexComment() - goto lexed - default: - tok = FFTok_error - ffl.Error = FFErr_invalid_char - goto lexed - } - } - -lexed: - ffl.Token = tok - return tok -} - -func (ffl *FFLexer) scanField(start FFTok, capture bool) ([]byte, error) { - switch start { - case FFTok_left_brace, - FFTok_left_bracket: - { - end := FFTok_right_brace - if start == FFTok_left_bracket { - end = FFTok_right_bracket - if capture { - ffl.Output.WriteByte('{') - } - } else { - if capture { - ffl.Output.WriteByte('[') - } - } - - depth := 1 - if capture { - ffl.captureAll = true - } - // TODO: work. - scanloop: - for { - tok := ffl.Scan() - //fmt.Printf("capture-token: %v end: %v depth: %v\n", tok, end, depth) - switch tok { - case FFTok_eof: - return nil, errors.New("ffjson: unexpected EOF") - case FFTok_error: - if ffl.BigError != nil { - return nil, ffl.BigError - } - return nil, ffl.Error.ToError() - case end: - depth-- - if depth == 0 { - break scanloop - } - case start: - depth++ - } - } - - if capture { - ffl.captureAll = false - } - - if capture { - return ffl.Output.Bytes(), nil - } else { - return nil, nil - } - } - case FFTok_bool, - FFTok_integer, - FFTok_null, - FFTok_double: - // simple value, return it. - if capture { - return ffl.Output.Bytes(), nil - } else { - return nil, nil - } - - case FFTok_string: - //TODO(pquerna): so, other users expect this to be a quoted string :( - if capture { - ffl.buf.Reset() - WriteJson(&ffl.buf, ffl.Output.Bytes()) - return ffl.buf.Bytes(), nil - } else { - return nil, nil - } - } - - return nil, fmt.Errorf("ffjson: invalid capture type: %v", start) -} - -// Captures an entire field value, including recursive objects, -// and converts them to a []byte suitable to pass to a sub-object's -// UnmarshalJSON -func (ffl *FFLexer) CaptureField(start FFTok) ([]byte, error) { - return ffl.scanField(start, true) -} - -func (ffl *FFLexer) SkipField(start FFTok) error { - _, err := ffl.scanField(start, false) - return err -} - -// TODO(pquerna): return line number and offset. -func (err FFErr) ToError() error { - switch err { - case FFErr_e_ok: - return nil - case FFErr_io: - return errors.New("ffjson: IO error") - case FFErr_string_invalid_utf8: - return errors.New("ffjson: string with invalid UTF-8 sequence") - case FFErr_string_invalid_escaped_char: - return errors.New("ffjson: string with invalid escaped character") - case FFErr_string_invalid_json_char: - return errors.New("ffjson: string with invalid JSON character") - case FFErr_string_invalid_hex_char: - return errors.New("ffjson: string with invalid hex character") - case FFErr_invalid_char: - return errors.New("ffjson: invalid character") - case FFErr_invalid_string: - return errors.New("ffjson: invalid string") - case FFErr_missing_integer_after_decimal: - return errors.New("ffjson: missing integer after decimal") - case FFErr_missing_integer_after_exponent: - return errors.New("ffjson: missing integer after exponent") - case FFErr_missing_integer_after_minus: - return errors.New("ffjson: missing integer after minus") - case FFErr_unallowed_comment: - return errors.New("ffjson: unallowed comment") - case FFErr_incomplete_comment: - return errors.New("ffjson: incomplete comment") - case FFErr_unexpected_token_type: - return errors.New("ffjson: unexpected token sequence") - } - - panic(fmt.Sprintf("unknown error type: %v ", err)) -} - -func (state FFParseState) String() string { - switch state { - case FFParse_map_start: - return "map:start" - case FFParse_want_key: - return "want_key" - case FFParse_want_colon: - return "want_colon" - case FFParse_want_value: - return "want_value" - case FFParse_after_value: - return "after_value" - } - - panic(fmt.Sprintf("unknown parse state: %d", int(state))) -} - -func (tok FFTok) String() string { - switch tok { - case FFTok_init: - return "tok:init" - case FFTok_bool: - return "tok:bool" - case FFTok_colon: - return "tok:colon" - case FFTok_comma: - return "tok:comma" - case FFTok_eof: - return "tok:eof" - case FFTok_error: - return "tok:error" - case FFTok_left_brace: - return "tok:left_brace" - case FFTok_left_bracket: - return "tok:left_bracket" - case FFTok_null: - return "tok:null" - case FFTok_right_brace: - return "tok:right_brace" - case FFTok_right_bracket: - return "tok:right_bracket" - case FFTok_integer: - return "tok:integer" - case FFTok_double: - return "tok:double" - case FFTok_string: - return "tok:string" - case FFTok_comment: - return "comment" - } - - panic(fmt.Sprintf("unknown token: %d", int(tok))) -} - -/* a lookup table which lets us quickly determine three things: - * cVEC - valid escaped control char - * note. the solidus '/' may be escaped or not. - * cIJC - invalid json char - * cVHC - valid hex char - * cNFP - needs further processing (from a string scanning perspective) - * cNUC - needs utf8 checking when enabled (from a string scanning perspective) - */ - -const ( - cVEC int8 = 0x01 - cIJC int8 = 0x02 - cVHC int8 = 0x04 - cNFP int8 = 0x08 - cNUC int8 = 0x10 -) - -var byteLookupTable [256]int8 = [256]int8{ - cIJC, /* 0 */ - cIJC, /* 1 */ - cIJC, /* 2 */ - cIJC, /* 3 */ - cIJC, /* 4 */ - cIJC, /* 5 */ - cIJC, /* 6 */ - cIJC, /* 7 */ - cIJC, /* 8 */ - cIJC, /* 9 */ - cIJC, /* 10 */ - cIJC, /* 11 */ - cIJC, /* 12 */ - cIJC, /* 13 */ - cIJC, /* 14 */ - cIJC, /* 15 */ - cIJC, /* 16 */ - cIJC, /* 17 */ - cIJC, /* 18 */ - cIJC, /* 19 */ - cIJC, /* 20 */ - cIJC, /* 21 */ - cIJC, /* 22 */ - cIJC, /* 23 */ - cIJC, /* 24 */ - cIJC, /* 25 */ - cIJC, /* 26 */ - cIJC, /* 27 */ - cIJC, /* 28 */ - cIJC, /* 29 */ - cIJC, /* 30 */ - cIJC, /* 31 */ - 0, /* 32 */ - 0, /* 33 */ - cVEC | cIJC | cNFP, /* 34 */ - 0, /* 35 */ - 0, /* 36 */ - 0, /* 37 */ - 0, /* 38 */ - 0, /* 39 */ - 0, /* 40 */ - 0, /* 41 */ - 0, /* 42 */ - 0, /* 43 */ - 0, /* 44 */ - 0, /* 45 */ - 0, /* 46 */ - cVEC, /* 47 */ - cVHC, /* 48 */ - cVHC, /* 49 */ - cVHC, /* 50 */ - cVHC, /* 51 */ - cVHC, /* 52 */ - cVHC, /* 53 */ - cVHC, /* 54 */ - cVHC, /* 55 */ - cVHC, /* 56 */ - cVHC, /* 57 */ - 0, /* 58 */ - 0, /* 59 */ - 0, /* 60 */ - 0, /* 61 */ - 0, /* 62 */ - 0, /* 63 */ - 0, /* 64 */ - cVHC, /* 65 */ - cVHC, /* 66 */ - cVHC, /* 67 */ - cVHC, /* 68 */ - cVHC, /* 69 */ - cVHC, /* 70 */ - 0, /* 71 */ - 0, /* 72 */ - 0, /* 73 */ - 0, /* 74 */ - 0, /* 75 */ - 0, /* 76 */ - 0, /* 77 */ - 0, /* 78 */ - 0, /* 79 */ - 0, /* 80 */ - 0, /* 81 */ - 0, /* 82 */ - 0, /* 83 */ - 0, /* 84 */ - 0, /* 85 */ - 0, /* 86 */ - 0, /* 87 */ - 0, /* 88 */ - 0, /* 89 */ - 0, /* 90 */ - 0, /* 91 */ - cVEC | cIJC | cNFP, /* 92 */ - 0, /* 93 */ - 0, /* 94 */ - 0, /* 95 */ - 0, /* 96 */ - cVHC, /* 97 */ - cVEC | cVHC, /* 98 */ - cVHC, /* 99 */ - cVHC, /* 100 */ - cVHC, /* 101 */ - cVEC | cVHC, /* 102 */ - 0, /* 103 */ - 0, /* 104 */ - 0, /* 105 */ - 0, /* 106 */ - 0, /* 107 */ - 0, /* 108 */ - 0, /* 109 */ - cVEC, /* 110 */ - 0, /* 111 */ - 0, /* 112 */ - 0, /* 113 */ - cVEC, /* 114 */ - 0, /* 115 */ - cVEC, /* 116 */ - 0, /* 117 */ - 0, /* 118 */ - 0, /* 119 */ - 0, /* 120 */ - 0, /* 121 */ - 0, /* 122 */ - 0, /* 123 */ - 0, /* 124 */ - 0, /* 125 */ - 0, /* 126 */ - 0, /* 127 */ - cNUC, /* 128 */ - cNUC, /* 129 */ - cNUC, /* 130 */ - cNUC, /* 131 */ - cNUC, /* 132 */ - cNUC, /* 133 */ - cNUC, /* 134 */ - cNUC, /* 135 */ - cNUC, /* 136 */ - cNUC, /* 137 */ - cNUC, /* 138 */ - cNUC, /* 139 */ - cNUC, /* 140 */ - cNUC, /* 141 */ - cNUC, /* 142 */ - cNUC, /* 143 */ - cNUC, /* 144 */ - cNUC, /* 145 */ - cNUC, /* 146 */ - cNUC, /* 147 */ - cNUC, /* 148 */ - cNUC, /* 149 */ - cNUC, /* 150 */ - cNUC, /* 151 */ - cNUC, /* 152 */ - cNUC, /* 153 */ - cNUC, /* 154 */ - cNUC, /* 155 */ - cNUC, /* 156 */ - cNUC, /* 157 */ - cNUC, /* 158 */ - cNUC, /* 159 */ - cNUC, /* 160 */ - cNUC, /* 161 */ - cNUC, /* 162 */ - cNUC, /* 163 */ - cNUC, /* 164 */ - cNUC, /* 165 */ - cNUC, /* 166 */ - cNUC, /* 167 */ - cNUC, /* 168 */ - cNUC, /* 169 */ - cNUC, /* 170 */ - cNUC, /* 171 */ - cNUC, /* 172 */ - cNUC, /* 173 */ - cNUC, /* 174 */ - cNUC, /* 175 */ - cNUC, /* 176 */ - cNUC, /* 177 */ - cNUC, /* 178 */ - cNUC, /* 179 */ - cNUC, /* 180 */ - cNUC, /* 181 */ - cNUC, /* 182 */ - cNUC, /* 183 */ - cNUC, /* 184 */ - cNUC, /* 185 */ - cNUC, /* 186 */ - cNUC, /* 187 */ - cNUC, /* 188 */ - cNUC, /* 189 */ - cNUC, /* 190 */ - cNUC, /* 191 */ - cNUC, /* 192 */ - cNUC, /* 193 */ - cNUC, /* 194 */ - cNUC, /* 195 */ - cNUC, /* 196 */ - cNUC, /* 197 */ - cNUC, /* 198 */ - cNUC, /* 199 */ - cNUC, /* 200 */ - cNUC, /* 201 */ - cNUC, /* 202 */ - cNUC, /* 203 */ - cNUC, /* 204 */ - cNUC, /* 205 */ - cNUC, /* 206 */ - cNUC, /* 207 */ - cNUC, /* 208 */ - cNUC, /* 209 */ - cNUC, /* 210 */ - cNUC, /* 211 */ - cNUC, /* 212 */ - cNUC, /* 213 */ - cNUC, /* 214 */ - cNUC, /* 215 */ - cNUC, /* 216 */ - cNUC, /* 217 */ - cNUC, /* 218 */ - cNUC, /* 219 */ - cNUC, /* 220 */ - cNUC, /* 221 */ - cNUC, /* 222 */ - cNUC, /* 223 */ - cNUC, /* 224 */ - cNUC, /* 225 */ - cNUC, /* 226 */ - cNUC, /* 227 */ - cNUC, /* 228 */ - cNUC, /* 229 */ - cNUC, /* 230 */ - cNUC, /* 231 */ - cNUC, /* 232 */ - cNUC, /* 233 */ - cNUC, /* 234 */ - cNUC, /* 235 */ - cNUC, /* 236 */ - cNUC, /* 237 */ - cNUC, /* 238 */ - cNUC, /* 239 */ - cNUC, /* 240 */ - cNUC, /* 241 */ - cNUC, /* 242 */ - cNUC, /* 243 */ - cNUC, /* 244 */ - cNUC, /* 245 */ - cNUC, /* 246 */ - cNUC, /* 247 */ - cNUC, /* 248 */ - cNUC, /* 249 */ - cNUC, /* 250 */ - cNUC, /* 251 */ - cNUC, /* 252 */ - cNUC, /* 253 */ - cNUC, /* 254 */ - cNUC, /* 255 */ -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go deleted file mode 100644 index 0f22c469d6..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go +++ /dev/null @@ -1,512 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package v1 - -import ( - "fmt" - "io" - "unicode" - "unicode/utf16" -) - -const sliceStringMask = cIJC | cNFP - -type ffReader struct { - s []byte - i int - l int -} - -func newffReader(d []byte) *ffReader { - return &ffReader{ - s: d, - i: 0, - l: len(d), - } -} - -func (r *ffReader) Slice(start, stop int) []byte { - return r.s[start:stop] -} - -func (r *ffReader) Pos() int { - return r.i -} - -// Reset the reader, and add new input. -func (r *ffReader) Reset(d []byte) { - r.s = d - r.i = 0 - r.l = len(d) -} - -// Calcuates the Position with line and line offset, -// because this isn't counted for performance reasons, -// it will iterate the buffer from the beginning, and should -// only be used in error-paths. -func (r *ffReader) PosWithLine() (int, int) { - currentLine := 1 - currentChar := 0 - - for i := 0; i < r.i; i++ { - c := r.s[i] - currentChar++ - if c == '\n' { - currentLine++ - currentChar = 0 - } - } - - return currentLine, currentChar -} - -func (r *ffReader) ReadByteNoWS() (byte, error) { - if r.i >= r.l { - return 0, io.EOF - } - - j := r.i - - for { - c := r.s[j] - j++ - - // inline whitespace parsing gives another ~8% performance boost - // for many kinds of nicely indented JSON. - // ... and using a [255]bool instead of multiple ifs, gives another 2% - /* - if c != '\t' && - c != '\n' && - c != '\v' && - c != '\f' && - c != '\r' && - c != ' ' { - r.i = j - return c, nil - } - */ - if whitespaceLookupTable[c] == false { - r.i = j - return c, nil - } - - if j >= r.l { - return 0, io.EOF - } - } -} - -func (r *ffReader) ReadByte() (byte, error) { - if r.i >= r.l { - return 0, io.EOF - } - - r.i++ - - return r.s[r.i-1], nil -} - -func (r *ffReader) UnreadByte() error { - if r.i <= 0 { - panic("ffReader.UnreadByte: at beginning of slice") - } - r.i-- - return nil -} - -func (r *ffReader) readU4(j int) (rune, error) { - - var u4 [4]byte - for i := 0; i < 4; i++ { - if j >= r.l { - return -1, io.EOF - } - c := r.s[j] - if byteLookupTable[c]&cVHC != 0 { - u4[i] = c - j++ - continue - } else { - // TODO(pquerna): handle errors better. layering violation. - return -1, fmt.Errorf("lex_string_invalid_hex_char: %v %v", c, string(u4[:])) - } - } - - // TODO(pquerna): utf16.IsSurrogate - rr, err := ParseUint(u4[:], 16, 64) - if err != nil { - return -1, err - } - return rune(rr), nil -} - -func (r *ffReader) handleEscaped(c byte, j int, out DecodingBuffer) (int, error) { - if j >= r.l { - return 0, io.EOF - } - - c = r.s[j] - j++ - - if c == 'u' { - ru, err := r.readU4(j) - if err != nil { - return 0, err - } - - if utf16.IsSurrogate(ru) { - ru2, err := r.readU4(j + 6) - if err != nil { - return 0, err - } - out.Write(r.s[r.i : j-2]) - r.i = j + 10 - j = r.i - rval := utf16.DecodeRune(ru, ru2) - if rval != unicode.ReplacementChar { - out.WriteRune(rval) - } else { - return 0, fmt.Errorf("lex_string_invalid_unicode_surrogate: %v %v", ru, ru2) - } - } else { - out.Write(r.s[r.i : j-2]) - r.i = j + 4 - j = r.i - out.WriteRune(ru) - } - return j, nil - } else if byteLookupTable[c]&cVEC == 0 { - return 0, fmt.Errorf("lex_string_invalid_escaped_char: %v", c) - } else { - out.Write(r.s[r.i : j-2]) - r.i = j - j = r.i - - switch c { - case '"': - out.WriteByte('"') - case '\\': - out.WriteByte('\\') - case '/': - out.WriteByte('/') - case 'b': - out.WriteByte('\b') - case 'f': - out.WriteByte('\f') - case 'n': - out.WriteByte('\n') - case 'r': - out.WriteByte('\r') - case 't': - out.WriteByte('\t') - } - } - - return j, nil -} - -func (r *ffReader) SliceString(out DecodingBuffer) error { - var c byte - // TODO(pquerna): string_with_escapes? de-escape here? - j := r.i - - for { - if j >= r.l { - return io.EOF - } - - j, c = scanString(r.s, j) - - if c == '"' { - if j != r.i { - out.Write(r.s[r.i : j-1]) - r.i = j - } - return nil - } else if c == '\\' { - var err error - j, err = r.handleEscaped(c, j, out) - if err != nil { - return err - } - } else if byteLookupTable[c]&cIJC != 0 { - return fmt.Errorf("lex_string_invalid_json_char: %v", c) - } - continue - } -} - -// TODO(pquerna): consider combining wibth the normal byte mask. -var whitespaceLookupTable [256]bool = [256]bool{ - false, /* 0 */ - false, /* 1 */ - false, /* 2 */ - false, /* 3 */ - false, /* 4 */ - false, /* 5 */ - false, /* 6 */ - false, /* 7 */ - false, /* 8 */ - true, /* 9 */ - true, /* 10 */ - true, /* 11 */ - true, /* 12 */ - true, /* 13 */ - false, /* 14 */ - false, /* 15 */ - false, /* 16 */ - false, /* 17 */ - false, /* 18 */ - false, /* 19 */ - false, /* 20 */ - false, /* 21 */ - false, /* 22 */ - false, /* 23 */ - false, /* 24 */ - false, /* 25 */ - false, /* 26 */ - false, /* 27 */ - false, /* 28 */ - false, /* 29 */ - false, /* 30 */ - false, /* 31 */ - true, /* 32 */ - false, /* 33 */ - false, /* 34 */ - false, /* 35 */ - false, /* 36 */ - false, /* 37 */ - false, /* 38 */ - false, /* 39 */ - false, /* 40 */ - false, /* 41 */ - false, /* 42 */ - false, /* 43 */ - false, /* 44 */ - false, /* 45 */ - false, /* 46 */ - false, /* 47 */ - false, /* 48 */ - false, /* 49 */ - false, /* 50 */ - false, /* 51 */ - false, /* 52 */ - false, /* 53 */ - false, /* 54 */ - false, /* 55 */ - false, /* 56 */ - false, /* 57 */ - false, /* 58 */ - false, /* 59 */ - false, /* 60 */ - false, /* 61 */ - false, /* 62 */ - false, /* 63 */ - false, /* 64 */ - false, /* 65 */ - false, /* 66 */ - false, /* 67 */ - false, /* 68 */ - false, /* 69 */ - false, /* 70 */ - false, /* 71 */ - false, /* 72 */ - false, /* 73 */ - false, /* 74 */ - false, /* 75 */ - false, /* 76 */ - false, /* 77 */ - false, /* 78 */ - false, /* 79 */ - false, /* 80 */ - false, /* 81 */ - false, /* 82 */ - false, /* 83 */ - false, /* 84 */ - false, /* 85 */ - false, /* 86 */ - false, /* 87 */ - false, /* 88 */ - false, /* 89 */ - false, /* 90 */ - false, /* 91 */ - false, /* 92 */ - false, /* 93 */ - false, /* 94 */ - false, /* 95 */ - false, /* 96 */ - false, /* 97 */ - false, /* 98 */ - false, /* 99 */ - false, /* 100 */ - false, /* 101 */ - false, /* 102 */ - false, /* 103 */ - false, /* 104 */ - false, /* 105 */ - false, /* 106 */ - false, /* 107 */ - false, /* 108 */ - false, /* 109 */ - false, /* 110 */ - false, /* 111 */ - false, /* 112 */ - false, /* 113 */ - false, /* 114 */ - false, /* 115 */ - false, /* 116 */ - false, /* 117 */ - false, /* 118 */ - false, /* 119 */ - false, /* 120 */ - false, /* 121 */ - false, /* 122 */ - false, /* 123 */ - false, /* 124 */ - false, /* 125 */ - false, /* 126 */ - false, /* 127 */ - false, /* 128 */ - false, /* 129 */ - false, /* 130 */ - false, /* 131 */ - false, /* 132 */ - false, /* 133 */ - false, /* 134 */ - false, /* 135 */ - false, /* 136 */ - false, /* 137 */ - false, /* 138 */ - false, /* 139 */ - false, /* 140 */ - false, /* 141 */ - false, /* 142 */ - false, /* 143 */ - false, /* 144 */ - false, /* 145 */ - false, /* 146 */ - false, /* 147 */ - false, /* 148 */ - false, /* 149 */ - false, /* 150 */ - false, /* 151 */ - false, /* 152 */ - false, /* 153 */ - false, /* 154 */ - false, /* 155 */ - false, /* 156 */ - false, /* 157 */ - false, /* 158 */ - false, /* 159 */ - false, /* 160 */ - false, /* 161 */ - false, /* 162 */ - false, /* 163 */ - false, /* 164 */ - false, /* 165 */ - false, /* 166 */ - false, /* 167 */ - false, /* 168 */ - false, /* 169 */ - false, /* 170 */ - false, /* 171 */ - false, /* 172 */ - false, /* 173 */ - false, /* 174 */ - false, /* 175 */ - false, /* 176 */ - false, /* 177 */ - false, /* 178 */ - false, /* 179 */ - false, /* 180 */ - false, /* 181 */ - false, /* 182 */ - false, /* 183 */ - false, /* 184 */ - false, /* 185 */ - false, /* 186 */ - false, /* 187 */ - false, /* 188 */ - false, /* 189 */ - false, /* 190 */ - false, /* 191 */ - false, /* 192 */ - false, /* 193 */ - false, /* 194 */ - false, /* 195 */ - false, /* 196 */ - false, /* 197 */ - false, /* 198 */ - false, /* 199 */ - false, /* 200 */ - false, /* 201 */ - false, /* 202 */ - false, /* 203 */ - false, /* 204 */ - false, /* 205 */ - false, /* 206 */ - false, /* 207 */ - false, /* 208 */ - false, /* 209 */ - false, /* 210 */ - false, /* 211 */ - false, /* 212 */ - false, /* 213 */ - false, /* 214 */ - false, /* 215 */ - false, /* 216 */ - false, /* 217 */ - false, /* 218 */ - false, /* 219 */ - false, /* 220 */ - false, /* 221 */ - false, /* 222 */ - false, /* 223 */ - false, /* 224 */ - false, /* 225 */ - false, /* 226 */ - false, /* 227 */ - false, /* 228 */ - false, /* 229 */ - false, /* 230 */ - false, /* 231 */ - false, /* 232 */ - false, /* 233 */ - false, /* 234 */ - false, /* 235 */ - false, /* 236 */ - false, /* 237 */ - false, /* 238 */ - false, /* 239 */ - false, /* 240 */ - false, /* 241 */ - false, /* 242 */ - false, /* 243 */ - false, /* 244 */ - false, /* 245 */ - false, /* 246 */ - false, /* 247 */ - false, /* 248 */ - false, /* 249 */ - false, /* 250 */ - false, /* 251 */ - false, /* 252 */ - false, /* 253 */ - false, /* 254 */ - false, /* 255 */ -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go b/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go deleted file mode 100644 index 47c2607708..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package v1 - -func scanString(s []byte, j int) (int, byte) { - for { - if j >= len(s) { - return j, 0 - } - - c := s[j] - j++ - if byteLookupTable[c]&sliceStringMask == 0 { - continue - } - - return j, c - } -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/generator/generator.go b/tests/tools/vendor/github.com/pquerna/ffjson/generator/generator.go deleted file mode 100644 index 1f50380bf8..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/generator/generator.go +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package generator - -import ( - "errors" - "fmt" - "os" -) - -func GenerateFiles(goCmd string, inputPath string, outputPath string, importName string, forceRegenerate bool, resetFields bool) error { - - if _, StatErr := os.Stat(outputPath); !os.IsNotExist(StatErr) { - inputFileInfo, inputFileErr := os.Stat(inputPath) - outputFileInfo, outputFileErr := os.Stat(outputPath) - - if nil == outputFileErr && nil == inputFileErr { - if !forceRegenerate && inputFileInfo.ModTime().Before(outputFileInfo.ModTime()) { - fmt.Println("File " + outputPath + " already exists.") - - return nil - } - } - } - - packageName, structs, err := ExtractStructs(inputPath) - if err != nil { - return err - } - - im := NewInceptionMain(goCmd, inputPath, outputPath, resetFields) - - err = im.Generate(packageName, structs, importName) - if err != nil { - return errors.New(fmt.Sprintf("error=%v path=%q", err, im.TempMainPath)) - } - - err = im.Run() - if err != nil { - return err - } - - return nil -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/generator/inceptionmain.go b/tests/tools/vendor/github.com/pquerna/ffjson/generator/inceptionmain.go deleted file mode 100644 index f5f140b51d..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/generator/inceptionmain.go +++ /dev/null @@ -1,251 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package generator - -import ( - "bytes" - "errors" - "fmt" - "go/format" - "io/ioutil" - "os" - "os/exec" - "path/filepath" - "strings" - "text/template" - - "github.com/pquerna/ffjson/shared" -) - -const inceptionMainTemplate = ` -// DO NOT EDIT! -// Code generated by ffjson -// DO NOT EDIT! - -package main - -import ( - "github.com/pquerna/ffjson/inception" - importedinceptionpackage "{{.ImportName}}" -) - -func main() { - i := ffjsoninception.NewInception("{{.InputPath}}", "{{.PackageName}}", "{{.OutputPath}}", {{.ResetFields}}) - i.AddMany(importedinceptionpackage.FFJSONExpose()) - i.Execute() -} -` - -const ffjsonExposeTemplate = ` -// Code generated by ffjson -// -// This should be automatically deleted by running 'ffjson', -// if leftover, please delete it. - -package {{.PackageName}} - -import ( - ffjsonshared "github.com/pquerna/ffjson/shared" -) - -func FFJSONExpose() []ffjsonshared.InceptionType { - rv := make([]ffjsonshared.InceptionType, 0) -{{range .StructNames}} - rv = append(rv, ffjsonshared.InceptionType{Obj: {{.Name}}{}, Options: ffjson{{printf "%#v" .Options}} } ) -{{end}} - return rv -} -` - -type structName struct { - Name string - Options shared.StructOptions -} - -type templateCtx struct { - StructNames []structName - ImportName string - PackageName string - InputPath string - OutputPath string - ResetFields bool -} - -type InceptionMain struct { - goCmd string - inputPath string - exposePath string - outputPath string - TempMainPath string - tempDir string - tempMain *os.File - tempExpose *os.File - resetFields bool -} - -func NewInceptionMain(goCmd string, inputPath string, outputPath string, resetFields bool) *InceptionMain { - exposePath := getExposePath(inputPath) - return &InceptionMain{ - goCmd: goCmd, - inputPath: inputPath, - outputPath: outputPath, - exposePath: exposePath, - resetFields: resetFields, - } -} - -func getImportName(inputPath string) (string, error) { - p, err := filepath.Abs(inputPath) - if err != nil { - return "", err - } - - dir := filepath.Dir(p) - gopaths := strings.Split(os.Getenv("GOPATH"), string(os.PathListSeparator)) - - for _, path := range gopaths { - gpath, err := filepath.Abs(path) - if err != nil { - continue - } - rel, err := filepath.Rel(filepath.ToSlash(gpath), dir) - if err != nil { - return "", err - } - - if len(rel) < 4 || rel[:4] != "src"+string(os.PathSeparator) { - continue - } - return rel[4:], nil - } - return "", errors.New(fmt.Sprintf("Could not find source directory: GOPATH=%q REL=%q", gopaths, dir)) - -} - -func getExposePath(inputPath string) string { - return inputPath[0:len(inputPath)-3] + "_ffjson_expose.go" -} - -func (im *InceptionMain) renderTpl(f *os.File, t *template.Template, tc *templateCtx) error { - buf := new(bytes.Buffer) - err := t.Execute(buf, tc) - if err != nil { - return err - } - formatted, err := format.Source(buf.Bytes()) - if err != nil { - return err - } - _, err = f.Write(formatted) - return err -} - -func (im *InceptionMain) Generate(packageName string, si []*StructInfo, importName string) error { - var err error - - if importName == "" { - importName, err = getImportName(im.inputPath) - if err != nil { - return err - } - } - - im.tempDir, err = ioutil.TempDir(filepath.Dir(im.inputPath), "ffjson-inception") - if err != nil { - return err - } - - importName = filepath.ToSlash(importName) - // for `go run` to work, we must have a file ending in ".go". - im.tempMain, err = TempFileWithPostfix(im.tempDir, "ffjson-inception", ".go") - if err != nil { - return err - } - - im.TempMainPath = im.tempMain.Name() - sn := make([]structName, len(si)) - for i, st := range si { - sn[i].Name = st.Name - sn[i].Options = st.Options - } - - tc := &templateCtx{ - ImportName: importName, - PackageName: packageName, - StructNames: sn, - InputPath: im.inputPath, - OutputPath: im.outputPath, - ResetFields: im.resetFields, - } - - t := template.Must(template.New("inception.go").Parse(inceptionMainTemplate)) - - err = im.renderTpl(im.tempMain, t, tc) - if err != nil { - return err - } - - im.tempExpose, err = os.Create(im.exposePath) - if err != nil { - return err - } - - t = template.Must(template.New("ffjson_expose.go").Parse(ffjsonExposeTemplate)) - - err = im.renderTpl(im.tempExpose, t, tc) - if err != nil { - return err - } - - return nil -} - -func (im *InceptionMain) Run() error { - var out bytes.Buffer - var errOut bytes.Buffer - - cmd := exec.Command(im.goCmd, "run", "-a", im.TempMainPath) - cmd.Stdout = &out - cmd.Stderr = &errOut - - err := cmd.Run() - - if err != nil { - return errors.New( - fmt.Sprintf("Go Run Failed for: %s\nSTDOUT:\n%s\nSTDERR:\n%s\n", - im.TempMainPath, - string(out.Bytes()), - string(errOut.Bytes()))) - } - - defer func() { - if im.tempExpose != nil { - im.tempExpose.Close() - } - - if im.tempMain != nil { - im.tempMain.Close() - } - - os.Remove(im.TempMainPath) - os.Remove(im.exposePath) - os.Remove(im.tempDir) - }() - - return nil -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/generator/parser.go b/tests/tools/vendor/github.com/pquerna/ffjson/generator/parser.go deleted file mode 100644 index 76458d6dcf..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/generator/parser.go +++ /dev/null @@ -1,142 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package generator - -import ( - "flag" - "fmt" - "github.com/pquerna/ffjson/shared" - "go/ast" - "go/doc" - "go/parser" - "go/token" - "regexp" - "strings" -) - -var noEncoder = flag.Bool("noencoder", false, "Do not generate encoder functions") -var noDecoder = flag.Bool("nodecoder", false, "Do not generate decoder functions") - -type StructField struct { - Name string -} - -type StructInfo struct { - Name string - Options shared.StructOptions -} - -func NewStructInfo(name string) *StructInfo { - return &StructInfo{ - Name: name, - Options: shared.StructOptions{ - SkipDecoder: *noDecoder, - SkipEncoder: *noEncoder, - }, - } -} - -var skipre = regexp.MustCompile("(.*)ffjson:(\\s*)((skip)|(ignore))(.*)") -var skipdec = regexp.MustCompile("(.*)ffjson:(\\s*)((skipdecoder)|(nodecoder))(.*)") -var skipenc = regexp.MustCompile("(.*)ffjson:(\\s*)((skipencoder)|(noencoder))(.*)") - -func shouldInclude(d *ast.Object) (bool, error) { - ts, ok := d.Decl.(*ast.TypeSpec) - if !ok { - return false, fmt.Errorf("Unknown type without TypeSec: %v", d) - } - - _, ok = ts.Type.(*ast.StructType) - if !ok { - ident, ok := ts.Type.(*ast.Ident) - if !ok || ident.Name == "" { - return false, nil - } - - // It must be in this package, and not a pointer alias - if strings.Contains(ident.Name, ".") || strings.Contains(ident.Name, "*") { - return false, nil - } - - // if Obj is nil, we have an external type or built-in. - if ident.Obj == nil || ident.Obj.Decl == nil { - return false, nil - } - return shouldInclude(ident.Obj) - } - return true, nil -} - -func ExtractStructs(inputPath string) (string, []*StructInfo, error) { - fset := token.NewFileSet() - - f, err := parser.ParseFile(fset, inputPath, nil, parser.ParseComments) - - if err != nil { - return "", nil, err - } - - packageName := f.Name.String() - structs := make(map[string]*StructInfo) - - for k, d := range f.Scope.Objects { - if d.Kind == ast.Typ { - incl, err := shouldInclude(d) - if err != nil { - return "", nil, err - } - if incl { - stobj := NewStructInfo(k) - - structs[k] = stobj - } - } - } - - files := map[string]*ast.File{ - inputPath: f, - } - - pkg, _ := ast.NewPackage(fset, files, nil, nil) - - d := doc.New(pkg, f.Name.String(), doc.AllDecls) - for _, t := range d.Types { - if skipre.MatchString(t.Doc) { - delete(structs, t.Name) - } else { - if skipdec.MatchString(t.Doc) { - s, ok := structs[t.Name] - if ok { - s.Options.SkipDecoder = true - } - } - if skipenc.MatchString(t.Doc) { - s, ok := structs[t.Name] - if ok { - s.Options.SkipEncoder = true - } - } - } - } - - rv := make([]*StructInfo, 0) - for _, v := range structs { - rv = append(rv, v) - } - return packageName, rv, nil -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/generator/tags.go b/tests/tools/vendor/github.com/pquerna/ffjson/generator/tags.go deleted file mode 100644 index d7fca952b9..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/generator/tags.go +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package generator - -import ( - "strings" -) - -// from: http://golang.org/src/pkg/encoding/json/tags.go - -// tagOptions is the string following a comma in a struct field's "json" -// tag, or the empty string. It does not include the leading comma. -type tagOptions string - -// parseTag splits a struct field's json tag into its name and -// comma-separated options. -func parseTag(tag string) (string, tagOptions) { - if idx := strings.Index(tag, ","); idx != -1 { - return tag[:idx], tagOptions(tag[idx+1:]) - } - return tag, tagOptions("") -} - -// Contains reports whether a comma-separated list of options -// contains a particular substr flag. substr must be surrounded by a -// string boundary or commas. -func (o tagOptions) Contains(optionName string) bool { - if len(o) == 0 { - return false - } - s := string(o) - for s != "" { - var next string - i := strings.Index(s, ",") - if i >= 0 { - s, next = s[:i], s[i+1:] - } - if s == optionName { - return true - } - s = next - } - return false -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/generator/tempfile.go b/tests/tools/vendor/github.com/pquerna/ffjson/generator/tempfile.go deleted file mode 100644 index 1d116ad468..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/generator/tempfile.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package generator - -import ( - "os" - "path/filepath" - "strconv" - "sync" - "time" -) - -// Random number state. -// We generate random temporary file names so that there's a good -// chance the file doesn't exist yet - keeps the number of tries in -// TempFile to a minimum. -var rand uint32 -var randmu sync.Mutex - -func reseed() uint32 { - return uint32(time.Now().UnixNano() + int64(os.Getpid())) -} - -func nextSuffix() string { - randmu.Lock() - r := rand - if r == 0 { - r = reseed() - } - r = r*1664525 + 1013904223 // constants from Numerical Recipes - rand = r - randmu.Unlock() - return strconv.Itoa(int(1e9 + r%1e9))[1:] -} - -// TempFile creates a new temporary file in the directory dir -// with a name beginning with prefix, opens the file for reading -// and writing, and returns the resulting *os.File. -// If dir is the empty string, TempFile uses the default directory -// for temporary files (see os.TempDir). -// Multiple programs calling TempFile simultaneously -// will not choose the same file. The caller can use f.Name() -// to find the pathname of the file. It is the caller's responsibility -// to remove the file when no longer needed. -func TempFileWithPostfix(dir, prefix string, postfix string) (f *os.File, err error) { - if dir == "" { - dir = os.TempDir() - } - - nconflict := 0 - for i := 0; i < 10000; i++ { - name := filepath.Join(dir, prefix+nextSuffix()+postfix) - f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600) - if os.IsExist(err) { - if nconflict++; nconflict > 10 { - rand = reseed() - } - continue - } - break - } - return -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder.go deleted file mode 100644 index 908347a325..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder.go +++ /dev/null @@ -1,323 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "fmt" - "reflect" - "strings" - - "github.com/pquerna/ffjson/shared" -) - -var validValues []string = []string{ - "FFTok_left_brace", - "FFTok_left_bracket", - "FFTok_integer", - "FFTok_double", - "FFTok_string", - "FFTok_bool", - "FFTok_null", -} - -func CreateUnmarshalJSON(ic *Inception, si *StructInfo) error { - out := "" - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - if len(si.Fields) > 0 { - ic.OutputImports[`"bytes"`] = true - } - ic.OutputImports[`"fmt"`] = true - - out += tplStr(decodeTpl["header"], header{ - IC: ic, - SI: si, - }) - - out += tplStr(decodeTpl["ujFunc"], ujFunc{ - SI: si, - IC: ic, - ValidValues: validValues, - ResetFields: ic.ResetFields, - }) - - ic.OutputFuncs = append(ic.OutputFuncs, out) - - return nil -} - -func handleField(ic *Inception, name string, typ reflect.Type, ptr bool, quoted bool) string { - return handleFieldAddr(ic, name, false, typ, ptr, quoted) -} - -func handleFieldAddr(ic *Inception, name string, takeAddr bool, typ reflect.Type, ptr bool, quoted bool) string { - out := fmt.Sprintf("/* handler: %s type=%v kind=%v quoted=%t*/\n", name, typ, typ.Kind(), quoted) - - umlx := typ.Implements(unmarshalFasterType) || typeInInception(ic, typ, shared.MustDecoder) - umlx = umlx || reflect.PtrTo(typ).Implements(unmarshalFasterType) - - umlstd := typ.Implements(unmarshalerType) || reflect.PtrTo(typ).Implements(unmarshalerType) - - out += tplStr(decodeTpl["handleUnmarshaler"], handleUnmarshaler{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - TakeAddr: takeAddr || ptr, - UnmarshalJSONFFLexer: umlx, - Unmarshaler: umlstd, - }) - - if umlx || umlstd { - return out - } - - // TODO(pquerna): generic handling of token type mismatching struct type - switch typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseInt") - - case reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseUint") - - case reflect.Float32, - reflect.Float64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_double", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseFloat") - - case reflect.Bool: - ic.OutputImports[`"bytes"`] = true - ic.OutputImports[`"errors"`] = true - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_bool", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += tplStr(decodeTpl["handleBool"], handleBool{ - Name: name, - Typ: typ, - TakeAddr: takeAddr || ptr, - }) - - case reflect.Ptr: - out += tplStr(decodeTpl["handlePtr"], handlePtr{ - IC: ic, - Name: name, - Typ: typ, - Quoted: quoted, - }) - - case reflect.Array, - reflect.Slice: - out += getArrayHandler(ic, name, typ, ptr) - - case reflect.String: - // Is it a json.Number? - if typ.PkgPath() == "encoding/json" && typ.Name() == "Number" { - // Fall back to json package to rely on the valid number check. - // See: https://github.com/golang/go/blob/f05c3aa24d815cd3869153750c9875e35fc48a6e/src/encoding/json/decode.go#L897 - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } else { - out += tplStr(decodeTpl["handleString"], handleString{ - IC: ic, - Name: name, - Typ: typ, - TakeAddr: takeAddr || ptr, - Quoted: quoted, - }) - } - case reflect.Interface: - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - case reflect.Map: - out += tplStr(decodeTpl["handleObject"], handleObject{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - TakeAddr: takeAddr || ptr, - }) - default: - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } - - return out -} - -func getArrayHandler(ic *Inception, name string, typ reflect.Type, ptr bool) string { - if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { - ic.OutputImports[`"encoding/base64"`] = true - useReflectToSet := false - if typ.Elem().Name() != "byte" { - ic.OutputImports[`"reflect"`] = true - useReflectToSet = true - } - - return tplStr(decodeTpl["handleByteSlice"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - UseReflectToSet: useReflectToSet, - }) - } - - if typ.Elem().Kind() == reflect.Struct && typ.Elem().Name() != "" { - goto sliceOrArray - } - - if (typ.Elem().Kind() == reflect.Struct || typ.Elem().Kind() == reflect.Map) || - typ.Elem().Kind() == reflect.Array || typ.Elem().Kind() == reflect.Slice && - typ.Elem().Name() == "" { - ic.OutputImports[`"encoding/json"`] = true - - return tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } - -sliceOrArray: - - if typ.Kind() == reflect.Array { - return tplStr(decodeTpl["handleArray"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - IsPtr: ptr, - Ptr: reflect.Ptr, - }) - } - - return tplStr(decodeTpl["handleSlice"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - IsPtr: ptr, - Ptr: reflect.Ptr, - }) -} - -func getAllowTokens(name string, tokens ...string) string { - return tplStr(decodeTpl["allowTokens"], allowTokens{ - Name: name, - Tokens: tokens, - }) -} - -func getNumberHandler(ic *Inception, name string, takeAddr bool, typ reflect.Type, parsefunc string) string { - return tplStr(decodeTpl["handlerNumeric"], handlerNumeric{ - IC: ic, - Name: name, - ParseFunc: parsefunc, - TakeAddr: takeAddr, - Typ: typ, - }) -} - -func getNumberSize(typ reflect.Type) string { - return fmt.Sprintf("%d", typ.Bits()) -} - -func getType(ic *Inception, name string, typ reflect.Type) string { - s := typ.Name() - - if typ.PkgPath() != "" && typ.PkgPath() != ic.PackagePath { - path := removeVendor(typ.PkgPath()) - ic.OutputImports[`"`+path+`"`] = true - s = typ.String() - } - - if s == "" { - return typ.String() - } - - return s -} - -// removeVendor removes everything before and including a '/vendor/' -// substring in the package path. -// This is needed becuase that full path can't be used in the -// import statement. -func removeVendor(path string) string { - i := strings.Index(path, "/vendor/") - if i == -1 { - return path - } - return path[i+8:] -} - -func buildTokens(containsOptional bool, optional string, required ...string) []string { - if containsOptional { - return append(required, optional) - } - - return required -} - -func unquoteField(quoted bool) string { - // The outer quote of a string is already stripped out by - // the lexer. We need to check if the inner string is also - // quoted. If so, we will decode it as json string. If decoding - // fails, we will use the original string - if quoted { - return ` - unquoted, ok := fflib.UnquoteBytes(outBuf) - if ok { - outBuf = unquoted - } - ` - } - return "" -} - -func getTmpVarFor(name string) string { - return "tmp" + strings.Replace(strings.Title(name), ".", "", -1) -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go deleted file mode 100644 index 0985061228..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go +++ /dev/null @@ -1,773 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "reflect" - "strconv" - "text/template" -) - -var decodeTpl map[string]*template.Template - -func init() { - decodeTpl = make(map[string]*template.Template) - - funcs := map[string]string{ - "handlerNumeric": handlerNumericTxt, - "allowTokens": allowTokensTxt, - "handleFallback": handleFallbackTxt, - "handleString": handleStringTxt, - "handleObject": handleObjectTxt, - "handleArray": handleArrayTxt, - "handleSlice": handleSliceTxt, - "handleByteSlice": handleByteSliceTxt, - "handleBool": handleBoolTxt, - "handlePtr": handlePtrTxt, - "header": headerTxt, - "ujFunc": ujFuncTxt, - "handleUnmarshaler": handleUnmarshalerTxt, - } - - tplFuncs := template.FuncMap{ - "getAllowTokens": getAllowTokens, - "getNumberSize": getNumberSize, - "getType": getType, - "handleField": handleField, - "handleFieldAddr": handleFieldAddr, - "unquoteField": unquoteField, - "getTmpVarFor": getTmpVarFor, - } - - for k, v := range funcs { - decodeTpl[k] = template.Must(template.New(k).Funcs(tplFuncs).Parse(v)) - } -} - -type handlerNumeric struct { - IC *Inception - Name string - ParseFunc string - Typ reflect.Type - TakeAddr bool -} - -var handlerNumericTxt = ` -{ - {{$ic := .IC}} - - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - {{if eq .ParseFunc "ParseFloat" }} - tval, err := fflib.{{ .ParseFunc}}(fs.Output.Bytes(), {{getNumberSize .Typ}}) - {{else}} - tval, err := fflib.{{ .ParseFunc}}(fs.Output.Bytes(), 10, {{getNumberSize .Typ}}) - {{end}} - - if err != nil { - return fs.WrapErr(err) - } - {{if eq .TakeAddr true}} - ttypval := {{getType $ic .Name .Typ}}(tval) - {{.Name}} = &ttypval - {{else}} - {{.Name}} = {{getType $ic .Name .Typ}}(tval) - {{end}} - } -} -` - -type allowTokens struct { - Name string - Tokens []string -} - -var allowTokensTxt = ` -{ - if {{range $index, $element := .Tokens}}{{if ne $index 0 }}&&{{end}} tok != fflib.{{$element}}{{end}} { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for {{.Name}}", tok)) - } -} -` - -type handleFallback struct { - Name string - Typ reflect.Type - Kind reflect.Kind -} - -var handleFallbackTxt = ` -{ - /* Falling back. type={{printf "%v" .Typ}} kind={{printf "%v" .Kind}} */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &{{.Name}}) - if err != nil { - return fs.WrapErr(err) - } -} -` - -type handleString struct { - IC *Inception - Name string - Typ reflect.Type - TakeAddr bool - Quoted bool -} - -var handleStringTxt = ` -{ - {{$ic := .IC}} - - {{getAllowTokens .Typ.Name "FFTok_string" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - {{if eq .TakeAddr true}} - var tval {{getType $ic .Name .Typ}} - outBuf := fs.Output.Bytes() - {{unquoteField .Quoted}} - tval = {{getType $ic .Name .Typ}}(string(outBuf)) - {{.Name}} = &tval - {{else}} - outBuf := fs.Output.Bytes() - {{unquoteField .Quoted}} - {{.Name}} = {{getType $ic .Name .Typ}}(string(outBuf)) - {{end}} - } -} -` - -type handleObject struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - TakeAddr bool -} - -var handleObjectTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_bracket" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - - {{if eq .TakeAddr true}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .Typ.Key.Kind .Ptr }} - var tval = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{else}} - var tval = make(map[{{getType $ic .Name .Typ.Key}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{end}} - {{else}} - {{if eq .Typ.Key.Kind .Ptr }} - var tval = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{else}} - var tval = make(map[{{getType $ic .Name .Typ.Key}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{end}} - {{end}} - {{else}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .Typ.Key.Kind .Ptr }} - {{.Name}} = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{else}} - {{.Name}} = make(map[{{getType $ic .Name .Typ.Key}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{end}} - {{else}} - {{if eq .Typ.Key.Kind .Ptr }} - {{.Name}} = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{else}} - {{.Name}} = make(map[{{getType $ic .Name .Typ.Key}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{end}} - {{end}} - {{end}} - - wantVal := true - - for { - {{$keyPtr := false}} - {{if eq .Typ.Key.Kind .Ptr }} - {{$keyPtr := true}} - var k *{{getType $ic .Name .Typ.Key.Elem}} - {{else}} - var k {{getType $ic .Name .Typ.Key}} - {{end}} - - {{$valPtr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$valPtr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC "k" .Typ.Key $keyPtr false}} - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - {{handleField .IC $tmpVar .Typ.Elem $valPtr false}} - - {{if eq .TakeAddr true}} - tval[k] = {{$tmpVar}} - {{else}} - {{.Name}}[k] = {{$tmpVar}} - {{end}} - wantVal = false - } - - {{if eq .TakeAddr true}} - {{.Name}} = &tval - {{end}} - } -} -` - -type handleArray struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - UseReflectToSet bool - IsPtr bool -} - -var handleArrayTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_brace" "FFTok_null"}} - {{if eq .Typ.Elem.Kind .Ptr}} - {{.Name}} = [{{.Typ.Len}}]*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{else}} - {{.Name}} = [{{.Typ.Len}}]{{getType $ic .Name .Typ.Elem}}{} - {{end}} - if tok != fflib.FFTok_null { - wantVal := true - - idx := 0 - for { - {{$ptr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$ptr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC $tmpVar .Typ.Elem $ptr false}} - - // Standard json.Unmarshal ignores elements out of array bounds, - // that what we do as well. - if idx < {{.Typ.Len}} { - {{.Name}}[idx] = {{$tmpVar}} - idx++ - } - - wantVal = false - } - } -} -` - -var handleSliceTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_brace" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .IsPtr true}} - {{.Name}} = &[]*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{else}} - {{.Name}} = []*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{end}} - {{else}} - {{if eq .IsPtr true}} - {{.Name}} = &[]{{getType $ic .Name .Typ.Elem}}{} - {{else}} - {{.Name}} = []{{getType $ic .Name .Typ.Elem}}{} - {{end}} - {{end}} - - wantVal := true - - for { - {{$ptr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$ptr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC $tmpVar .Typ.Elem $ptr false}} - {{if eq .IsPtr true}} - *{{.Name}} = append(*{{.Name}}, {{$tmpVar}}) - {{else}} - {{.Name}} = append({{.Name}}, {{$tmpVar}}) - {{end}} - wantVal = false - } - } -} -` - -var handleByteSliceTxt = ` -{ - {{getAllowTokens .Typ.Name "FFTok_string" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - b := make([]byte, base64.StdEncoding.DecodedLen(fs.Output.Len())) - n, err := base64.StdEncoding.Decode(b, fs.Output.Bytes()) - if err != nil { - return fs.WrapErr(err) - } - {{if eq .UseReflectToSet true}} - v := reflect.ValueOf(&{{.Name}}).Elem() - v.SetBytes(b[0:n]) - {{else}} - {{.Name}} = append([]byte(), b[0:n]...) - {{end}} - } -} -` - -type handleBool struct { - Name string - Typ reflect.Type - TakeAddr bool -} - -var handleBoolTxt = ` -{ - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - tmpb := fs.Output.Bytes() - - {{if eq .TakeAddr true}} - var tval bool - {{end}} - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - {{if eq .TakeAddr true}} - tval = true - {{else}} - {{.Name}} = true - {{end}} - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - {{if eq .TakeAddr true}} - tval = false - {{else}} - {{.Name}} = false - {{end}} - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - {{if eq .TakeAddr true}} - {{.Name}} = &tval - {{end}} - } -} -` - -type handlePtr struct { - IC *Inception - Name string - Typ reflect.Type - Quoted bool -} - -var handlePtrTxt = ` -{ - {{$ic := .IC}} - - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Elem.Name .Typ.Elem}}) - } - - {{handleFieldAddr .IC .Name true .Typ.Elem false .Quoted}} - } -} -` - -type header struct { - IC *Inception - SI *StructInfo -} - -var headerTxt = ` -const ( - ffjt{{.SI.Name}}base = iota - ffjt{{.SI.Name}}nosuchkey - {{with $si := .SI}} - {{range $index, $field := $si.Fields}} - {{if ne $field.JsonName "-"}} - ffjt{{$si.Name}}{{$field.Name}} - {{end}} - {{end}} - {{end}} -) - -{{with $si := .SI}} - {{range $index, $field := $si.Fields}} - {{if ne $field.JsonName "-"}} -var ffjKey{{$si.Name}}{{$field.Name}} = []byte({{$field.JsonName}}) - {{end}} - {{end}} -{{end}} - -` - -type ujFunc struct { - IC *Inception - SI *StructInfo - ValidValues []string - ResetFields bool -} - -var ujFuncTxt = ` -{{$si := .SI}} -{{$ic := .IC}} - -// UnmarshalJSON umarshall json - template of ffjson -func (j *{{.SI.Name}}) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *{{.SI.Name}}) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjt{{.SI.Name}}base - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - - {{if eq .ResetFields true}} - {{range $index, $field := $si.Fields}} - var ffjSet{{$si.Name}}{{$field.Name}} = false - {{end}} - {{end}} - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjt{{.SI.Name}}nosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - {{range $byte, $fields := $si.FieldsByFirstByte}} - case '{{$byte}}': - {{range $index, $field := $fields}} - {{if ne $index 0 }}} else if {{else}}if {{end}} bytes.Equal(ffjKey{{$si.Name}}{{$field.Name}}, kn) { - currentKey = ffjt{{$si.Name}}{{$field.Name}} - state = fflib.FFParse_want_colon - goto mainparse - {{end}} } - {{end}} - } - {{range $index, $field := $si.ReverseFields}} - if {{$field.FoldFuncName}}(ffjKey{{$si.Name}}{{$field.Name}}, kn) { - currentKey = ffjt{{$si.Name}}{{$field.Name}} - state = fflib.FFParse_want_colon - goto mainparse - } - {{end}} - currentKey = ffjt{{.SI.Name}}nosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if {{range $index, $v := .ValidValues}}{{if ne $index 0 }}||{{end}}tok == fflib.{{$v}}{{end}} { - switch currentKey { - {{range $index, $field := $si.Fields}} - case ffjt{{$si.Name}}{{$field.Name}}: - goto handle_{{$field.Name}} - {{end}} - case ffjt{{$si.Name}}nosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } -{{range $index, $field := $si.Fields}} -handle_{{$field.Name}}: - {{with $fieldName := $field.Name | printf "j.%s"}} - {{handleField $ic $fieldName $field.Typ $field.Pointer $field.ForceString}} - {{if eq $.ResetFields true}} - ffjSet{{$si.Name}}{{$field.Name}} = true - {{end}} - state = fflib.FFParse_after_value - goto mainparse - {{end}} -{{end}} - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: -{{if eq .ResetFields true}} -{{range $index, $field := $si.Fields}} - if !ffjSet{{$si.Name}}{{$field.Name}} { - {{with $fieldName := $field.Name | printf "j.%s"}} - {{if eq $field.Pointer true}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Interface), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Slice), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Array), 10) + `}} - {{$fieldName}} = [{{$field.Typ.Len}}]{{getType $ic $fieldName $field.Typ.Elem}}{} - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Map), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Bool), 10) + `}} - {{$fieldName}} = false - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.String), 10) + `}} - {{$fieldName}} = "" - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Struct), 10) + `}} - {{$fieldName}} = {{getType $ic $fieldName $field.Typ}}{} - {{else}} - {{$fieldName}} = {{getType $ic $fieldName $field.Typ}}(0) - {{end}} - {{end}} - } -{{end}} -{{end}} - return nil -} -` - -type handleUnmarshaler struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - TakeAddr bool - UnmarshalJSONFFLexer bool - Unmarshaler bool -} - -var handleUnmarshalerTxt = ` - {{$ic := .IC}} - - {{if eq .UnmarshalJSONFFLexer true}} - { - if tok == fflib.FFTok_null { - {{if eq .Typ.Kind .Ptr }} - {{.Name}} = nil - {{end}} - {{if eq .TakeAddr true }} - {{.Name}} = nil - {{end}} - } else { - {{if eq .Typ.Kind .Ptr }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Elem.Name .Typ.Elem}}) - } - {{end}} - {{if eq .TakeAddr true }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Name .Typ}}) - } - {{end}} - err = {{.Name}}.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key) - if err != nil { - return err - } - } - state = fflib.FFParse_after_value - } - {{else}} - {{if eq .Unmarshaler true}} - { - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true }} - {{.Name}} = nil - {{end}} - } else { - - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - {{if eq .TakeAddr true }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Name .Typ}}) - } - {{end}} - err = {{.Name}}.UnmarshalJSON(tbuf) - if err != nil { - return fs.WrapErr(err) - } - } - state = fflib.FFParse_after_value - } - {{end}} - {{end}} -` diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder.go deleted file mode 100644 index 3e37a2814a..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder.go +++ /dev/null @@ -1,544 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "fmt" - "reflect" - - "github.com/pquerna/ffjson/shared" -) - -func typeInInception(ic *Inception, typ reflect.Type, f shared.Feature) bool { - for _, v := range ic.objs { - if v.Typ == typ { - return v.Options.HasFeature(f) - } - if typ.Kind() == reflect.Ptr { - if v.Typ == typ.Elem() { - return v.Options.HasFeature(f) - } - } - } - - return false -} - -func getOmitEmpty(ic *Inception, sf *StructField) string { - ptname := "j." + sf.Name - if sf.Pointer { - ptname = "*" + ptname - return "if true {\n" - } - switch sf.Typ.Kind() { - - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return "if len(" + ptname + ") != 0 {" + "\n" - - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr, - reflect.Float32, - reflect.Float64: - return "if " + ptname + " != 0 {" + "\n" - - case reflect.Bool: - return "if " + ptname + " != false {" + "\n" - - case reflect.Interface, reflect.Ptr: - return "if " + ptname + " != nil {" + "\n" - - default: - // TODO(pquerna): fix types - return "if true {" + "\n" - } -} - -func getMapValue(ic *Inception, name string, typ reflect.Type, ptr bool, forceString bool) string { - var out = "" - - if typ.Key().Kind() != reflect.String { - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += ic.q.Flush() - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - return out - } - - var elemKind reflect.Kind - elemKind = typ.Elem().Kind() - - switch elemKind { - case reflect.String, - reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, - reflect.Float32, - reflect.Float64, - reflect.Bool: - - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - - out += "if " + name + " == nil {" + "\n" - ic.q.Write("null") - out += ic.q.GetQueued() - ic.q.DeleteLast() - out += "} else {" + "\n" - out += ic.q.WriteFlush("{ ") - out += " for key, value := range " + name + " {" + "\n" - out += " fflib.WriteJsonString(buf, key)" + "\n" - out += " buf.WriteString(`:`)" + "\n" - out += getGetInnerValue(ic, "value", typ.Elem(), false, forceString) - out += " buf.WriteByte(',')" + "\n" - out += " }" + "\n" - out += "buf.Rewind(1)" + "\n" - out += ic.q.WriteFlush("}") - out += "}" + "\n" - - default: - out += ic.q.Flush() - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - return out -} - -func getGetInnerValue(ic *Inception, name string, typ reflect.Type, ptr bool, forceString bool) string { - var out = "" - - // Flush if not bool or maps - if typ.Kind() != reflect.Bool && typ.Kind() != reflect.Map && typ.Kind() != reflect.Struct { - out += ic.q.Flush() - } - - if typ.Implements(marshalerFasterType) || - reflect.PtrTo(typ).Implements(marshalerFasterType) || - typeInInception(ic, typ, shared.MustEncoder) || - typ.Implements(marshalerType) || - reflect.PtrTo(typ).Implements(marshalerType) { - - out += ic.q.Flush() - out += tplStr(encodeTpl["handleMarshaler"], handleMarshaler{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - MarshalJSONBuf: typ.Implements(marshalerFasterType) || reflect.PtrTo(typ).Implements(marshalerFasterType) || typeInInception(ic, typ, shared.MustEncoder), - Marshaler: typ.Implements(marshalerType) || reflect.PtrTo(typ).Implements(marshalerType), - }) - return out - } - - ptname := name - if ptr { - ptname = "*" + name - } - - switch typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.FormatBits2(buf, uint64(" + ptname + "), 10, " + ptname + " < 0)" + "\n" - case reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.FormatBits2(buf, uint64(" + ptname + "), 10, false)" + "\n" - case reflect.Float32: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.AppendFloat(buf, float64(" + ptname + "), 'g', -1, 32)" + "\n" - case reflect.Float64: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.AppendFloat(buf, float64(" + ptname + "), 'g', -1, 64)" + "\n" - case reflect.Array, - reflect.Slice: - - // Arrays cannot be nil - if typ.Kind() != reflect.Array { - out += "if " + name + "!= nil {" + "\n" - } - // Array and slice values encode as JSON arrays, except that - // []byte encodes as a base64-encoded string, and a nil slice - // encodes as the null JSON object. - if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { - ic.OutputImports[`"encoding/base64"`] = true - - out += "buf.WriteString(`\"`)" + "\n" - out += `{` + "\n" - out += `enc := base64.NewEncoder(base64.StdEncoding, buf)` + "\n" - if typ.Elem().Name() != "byte" { - ic.OutputImports[`"reflect"`] = true - out += `enc.Write(reflect.Indirect(reflect.ValueOf(` + ptname + `)).Bytes())` + "\n" - - } else { - out += `enc.Write(` + ptname + `)` + "\n" - } - out += `enc.Close()` + "\n" - out += `}` + "\n" - out += "buf.WriteString(`\"`)" + "\n" - } else { - out += "buf.WriteString(`[`)" + "\n" - out += "for i, v := range " + ptname + "{" + "\n" - out += "if i != 0 {" + "\n" - out += "buf.WriteString(`,`)" + "\n" - out += "}" + "\n" - out += getGetInnerValue(ic, "v", typ.Elem(), false, false) - out += "}" + "\n" - out += "buf.WriteString(`]`)" + "\n" - } - if typ.Kind() != reflect.Array { - out += "} else {" + "\n" - out += "buf.WriteString(`null`)" + "\n" - out += "}" + "\n" - } - case reflect.String: - // Is it a json.Number? - if typ.PkgPath() == "encoding/json" && typ.Name() == "Number" { - // Fall back to json package to rely on the valid number check. - // See: https://github.com/golang/go/blob/92cd6e3af9f423ab4d8ac78f24e7fd81c31a8ce6/src/encoding/json/encode.go#L550 - out += fmt.Sprintf("/* json.Number */\n") - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } else { - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - if forceString { - // Forcestring on strings does double-escaping of the entire value. - // We create a temporary buffer, encode to that an re-encode it. - out += "{" + "\n" - out += "tmpbuf := fflib.Buffer{}" + "\n" - out += "tmpbuf.Grow(len(" + ptname + ") + 16)" + "\n" - out += "fflib.WriteJsonString(&tmpbuf, string(" + ptname + "))" + "\n" - out += "fflib.WriteJsonString(buf, string( tmpbuf.Bytes() " + `))` + "\n" - out += "}" + "\n" - } else { - out += "fflib.WriteJsonString(buf, string(" + ptname + "))" + "\n" - } - } - case reflect.Ptr: - out += "if " + name + "!= nil {" + "\n" - switch typ.Elem().Kind() { - case reflect.Struct: - out += getGetInnerValue(ic, name, typ.Elem(), false, false) - default: - out += getGetInnerValue(ic, "*"+name, typ.Elem(), false, false) - } - out += "} else {" + "\n" - out += "buf.WriteString(`null`)" + "\n" - out += "}" + "\n" - case reflect.Bool: - out += "if " + ptname + " {" + "\n" - ic.q.Write("true") - out += ic.q.GetQueued() - out += "} else {" + "\n" - // Delete 'true' - ic.q.DeleteLast() - out += ic.q.WriteFlush("false") - out += "}" + "\n" - case reflect.Interface: - out += fmt.Sprintf("/* Interface types must use runtime reflection. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - case reflect.Map: - out += getMapValue(ic, ptname, typ, ptr, forceString) - case reflect.Struct: - if typ.Name() == "" { - ic.q.Write("{") - ic.q.Write(" ") - out += fmt.Sprintf("/* Inline struct. type=%v kind=%v */\n", typ, typ.Kind()) - newV := reflect.Indirect(reflect.New(typ)).Interface() - fields := extractFields(newV) - - // Output all fields - for _, field := range fields { - // Adjust field name - field.Name = name + "." + field.Name - out += getField(ic, field, "") - } - - if lastConditional(fields) { - out += ic.q.Flush() - out += `buf.Rewind(1)` + "\n" - } else { - ic.q.DeleteLast() - } - out += ic.q.WriteFlush("}") - } else { - out += fmt.Sprintf("/* Struct fall back. type=%v kind=%v */\n", typ, typ.Kind()) - out += ic.q.Flush() - if ptr { - out += "err = buf.Encode(" + name + ")" + "\n" - } else { - // We send pointer to avoid copying entire struct - out += "err = buf.Encode(&" + name + ")" + "\n" - } - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - default: - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - - return out -} - -func getValue(ic *Inception, sf *StructField, prefix string) string { - closequote := false - if sf.ForceString { - switch sf.Typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr, - reflect.Float32, - reflect.Float64, - reflect.Bool: - ic.q.Write(`"`) - closequote = true - } - } - out := getGetInnerValue(ic, prefix+sf.Name, sf.Typ, sf.Pointer, sf.ForceString) - if closequote { - if sf.Pointer { - out += ic.q.WriteFlush(`"`) - } else { - ic.q.Write(`"`) - } - } - - return out -} - -func p2(v uint32) uint32 { - v-- - v |= v >> 1 - v |= v >> 2 - v |= v >> 4 - v |= v >> 8 - v |= v >> 16 - v++ - return v -} - -func getTypeSize(t reflect.Type) uint32 { - switch t.Kind() { - case reflect.String: - // TODO: consider runtime analysis. - return 32 - case reflect.Array, reflect.Map, reflect.Slice: - // TODO: consider runtime analysis. - return 4 * getTypeSize(t.Elem()) - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32: - return 8 - case reflect.Int64, - reflect.Uint64, - reflect.Uintptr: - return 16 - case reflect.Float32, - reflect.Float64: - return 16 - case reflect.Bool: - return 4 - case reflect.Ptr: - return getTypeSize(t.Elem()) - default: - return 16 - } -} - -func getTotalSize(si *StructInfo) uint32 { - rv := uint32(si.Typ.Size()) - for _, f := range si.Fields { - rv += getTypeSize(f.Typ) - } - return rv -} - -func getBufGrowSize(si *StructInfo) uint32 { - - // TOOD(pquerna): automatically calc a better grow size based on history - // of a struct. - return p2(getTotalSize(si)) -} - -func isIntish(t reflect.Type) bool { - if t.Kind() >= reflect.Int && t.Kind() <= reflect.Uintptr { - return true - } - if t.Kind() == reflect.Array || t.Kind() == reflect.Slice || t.Kind() == reflect.Ptr { - if t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { - // base64 special case. - return false - } else { - return isIntish(t.Elem()) - } - } - return false -} - -func getField(ic *Inception, f *StructField, prefix string) string { - out := "" - if f.OmitEmpty { - out += ic.q.Flush() - if f.Pointer { - out += "if " + prefix + f.Name + " != nil {" + "\n" - } - out += getOmitEmpty(ic, f) - } - - if f.Pointer && !f.OmitEmpty { - // Pointer values encode as the value pointed to. A nil pointer encodes as the null JSON object. - out += "if " + prefix + f.Name + " != nil {" + "\n" - } - - // JsonName is already escaped and quoted. - // getInnervalue should flush - ic.q.Write(f.JsonName + ":") - // We save a copy in case we need it - t := ic.q - - out += getValue(ic, f, prefix) - ic.q.Write(",") - - if f.Pointer && !f.OmitEmpty { - out += "} else {" + "\n" - out += t.WriteFlush("null") - out += "}" + "\n" - } - - if f.OmitEmpty { - out += ic.q.Flush() - if f.Pointer { - out += "}" + "\n" - } - out += "}" + "\n" - } - return out -} - -// We check if the last field is conditional. -func lastConditional(fields []*StructField) bool { - if len(fields) > 0 { - f := fields[len(fields)-1] - return f.OmitEmpty - } - return false -} - -func CreateMarshalJSON(ic *Inception, si *StructInfo) error { - conditionalWrites := lastConditional(si.Fields) - out := "" - - out += "// MarshalJSON marshal bytes to json - template\n" - out += `func (j *` + si.Name + `) MarshalJSON() ([]byte, error) {` + "\n" - out += `var buf fflib.Buffer` + "\n" - - out += `if j == nil {` + "\n" - out += ` buf.WriteString("null")` + "\n" - out += " return buf.Bytes(), nil" + "\n" - out += `}` + "\n" - - out += `err := j.MarshalJSONBuf(&buf)` + "\n" - out += `if err != nil {` + "\n" - out += " return nil, err" + "\n" - out += `}` + "\n" - out += `return buf.Bytes(), nil` + "\n" - out += `}` + "\n" - - out += "// MarshalJSONBuf marshal buff to json - template\n" - out += `func (j *` + si.Name + `) MarshalJSONBuf(buf fflib.EncodingBuffer) (error) {` + "\n" - out += ` if j == nil {` + "\n" - out += ` buf.WriteString("null")` + "\n" - out += " return nil" + "\n" - out += ` }` + "\n" - - out += `var err error` + "\n" - out += `var obj []byte` + "\n" - out += `_ = obj` + "\n" - out += `_ = err` + "\n" - - ic.q.Write("{") - - // The extra space is inserted here. - // If nothing is written to the field this will be deleted - // instead of the last comma. - if conditionalWrites || len(si.Fields) == 0 { - ic.q.Write(" ") - } - - for _, f := range si.Fields { - out += getField(ic, f, "j.") - } - - // Handling the last comma is tricky. - // If the last field has omitempty, conditionalWrites is set. - // If something has been written, we delete the last comma, - // by backing up the buffer, otherwise it will delete a space. - if conditionalWrites { - out += ic.q.Flush() - out += `buf.Rewind(1)` + "\n" - } else { - ic.q.DeleteLast() - } - - out += ic.q.WriteFlush("}") - out += `return nil` + "\n" - out += `}` + "\n" - ic.OutputFuncs = append(ic.OutputFuncs, out) - return nil -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go deleted file mode 100644 index 22ab5292e7..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "reflect" - "text/template" -) - -var encodeTpl map[string]*template.Template - -func init() { - encodeTpl = make(map[string]*template.Template) - - funcs := map[string]string{ - "handleMarshaler": handleMarshalerTxt, - } - tplFuncs := template.FuncMap{} - - for k, v := range funcs { - encodeTpl[k] = template.Must(template.New(k).Funcs(tplFuncs).Parse(v)) - } -} - -type handleMarshaler struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - MarshalJSONBuf bool - Marshaler bool -} - -var handleMarshalerTxt = ` - { - {{if eq .Typ.Kind .Ptr}} - if {{.Name}} == nil { - buf.WriteString("null") - } else { - {{end}} - - {{if eq .MarshalJSONBuf true}} - err = {{.Name}}.MarshalJSONBuf(buf) - if err != nil { - return err - } - {{else if eq .Marshaler true}} - obj, err = {{.Name}}.MarshalJSON() - if err != nil { - return err - } - buf.Write(obj) - {{end}} - {{if eq .Typ.Kind .Ptr}} - } - {{end}} - } -` diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/inception.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/inception.go deleted file mode 100644 index 10cb2712cc..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/inception.go +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "errors" - "fmt" - "github.com/pquerna/ffjson/shared" - "io/ioutil" - "os" - "reflect" - "sort" -) - -type Inception struct { - objs []*StructInfo - InputPath string - OutputPath string - PackageName string - PackagePath string - OutputImports map[string]bool - OutputFuncs []string - q ConditionalWrite - ResetFields bool -} - -func NewInception(inputPath string, packageName string, outputPath string, resetFields bool) *Inception { - return &Inception{ - objs: make([]*StructInfo, 0), - InputPath: inputPath, - OutputPath: outputPath, - PackageName: packageName, - OutputFuncs: make([]string, 0), - OutputImports: make(map[string]bool), - ResetFields: resetFields, - } -} - -func (i *Inception) AddMany(objs []shared.InceptionType) { - for _, obj := range objs { - i.Add(obj) - } -} - -func (i *Inception) Add(obj shared.InceptionType) { - i.objs = append(i.objs, NewStructInfo(obj)) - i.PackagePath = i.objs[0].Typ.PkgPath() -} - -func (i *Inception) wantUnmarshal(si *StructInfo) bool { - if si.Options.SkipDecoder { - return false - } - typ := si.Typ - umlx := typ.Implements(unmarshalFasterType) || reflect.PtrTo(typ).Implements(unmarshalFasterType) - umlstd := typ.Implements(unmarshalerType) || reflect.PtrTo(typ).Implements(unmarshalerType) - if umlstd && !umlx { - // structure has UnmarshalJSON, but not our faster version -- skip it. - return false - } - return true -} - -func (i *Inception) wantMarshal(si *StructInfo) bool { - if si.Options.SkipEncoder { - return false - } - typ := si.Typ - mlx := typ.Implements(marshalerFasterType) || reflect.PtrTo(typ).Implements(marshalerFasterType) - mlstd := typ.Implements(marshalerType) || reflect.PtrTo(typ).Implements(marshalerType) - if mlstd && !mlx { - // structure has MarshalJSON, but not our faster version -- skip it. - return false - } - return true -} - -type sortedStructs []*StructInfo - -func (p sortedStructs) Len() int { return len(p) } -func (p sortedStructs) Less(i, j int) bool { return p[i].Name < p[j].Name } -func (p sortedStructs) Swap(i, j int) { p[i], p[j] = p[j], p[i] } -func (p sortedStructs) Sort() { sort.Sort(p) } - -func (i *Inception) generateCode() error { - // We sort the structs by name, so output if predictable. - sorted := sortedStructs(i.objs) - sorted.Sort() - - for _, si := range sorted { - if i.wantMarshal(si) { - err := CreateMarshalJSON(i, si) - if err != nil { - return err - } - } - - if i.wantUnmarshal(si) { - err := CreateUnmarshalJSON(i, si) - if err != nil { - return err - } - } - } - return nil -} - -func (i *Inception) handleError(err error) { - fmt.Fprintf(os.Stderr, "Error: %s:\n\n", err) - os.Exit(1) -} - -func (i *Inception) Execute() { - if len(os.Args) != 1 { - i.handleError(errors.New(fmt.Sprintf("Internal ffjson error: inception executable takes no args: %v", os.Args))) - return - } - - err := i.generateCode() - if err != nil { - i.handleError(err) - return - } - - data, err := RenderTemplate(i) - if err != nil { - i.handleError(err) - return - } - - stat, err := os.Stat(i.InputPath) - - if err != nil { - i.handleError(err) - return - } - - err = ioutil.WriteFile(i.OutputPath, data, stat.Mode()) - - if err != nil { - i.handleError(err) - return - } - -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/reflect.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/reflect.go deleted file mode 100644 index 8fb0bd5cb5..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/reflect.go +++ /dev/null @@ -1,290 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - fflib "github.com/pquerna/ffjson/fflib/v1" - "github.com/pquerna/ffjson/shared" - - "bytes" - "encoding/json" - "reflect" - "unicode/utf8" -) - -type StructField struct { - Name string - JsonName string - FoldFuncName string - Typ reflect.Type - OmitEmpty bool - ForceString bool - HasMarshalJSON bool - HasUnmarshalJSON bool - Pointer bool - Tagged bool -} - -type FieldByJsonName []*StructField - -func (a FieldByJsonName) Len() int { return len(a) } -func (a FieldByJsonName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a FieldByJsonName) Less(i, j int) bool { return a[i].JsonName < a[j].JsonName } - -type StructInfo struct { - Name string - Obj interface{} - Typ reflect.Type - Fields []*StructField - Options shared.StructOptions -} - -func NewStructInfo(obj shared.InceptionType) *StructInfo { - t := reflect.TypeOf(obj.Obj) - return &StructInfo{ - Obj: obj.Obj, - Name: t.Name(), - Typ: t, - Fields: extractFields(obj.Obj), - Options: obj.Options, - } -} - -func (si *StructInfo) FieldsByFirstByte() map[string][]*StructField { - rv := make(map[string][]*StructField) - for _, f := range si.Fields { - b := string(f.JsonName[1]) - rv[b] = append(rv[b], f) - } - return rv -} - -func (si *StructInfo) ReverseFields() []*StructField { - var i int - rv := make([]*StructField, 0) - for i = len(si.Fields) - 1; i >= 0; i-- { - rv = append(rv, si.Fields[i]) - } - return rv -} - -const ( - caseMask = ^byte(0x20) // Mask to ignore case in ASCII. -) - -func foldFunc(key []byte) string { - nonLetter := false - special := false // special letter - for _, b := range key { - if b >= utf8.RuneSelf { - return "bytes.EqualFold" - } - upper := b & caseMask - if upper < 'A' || upper > 'Z' { - nonLetter = true - } else if upper == 'K' || upper == 'S' { - // See above for why these letters are special. - special = true - } - } - if special { - return "fflib.EqualFoldRight" - } - if nonLetter { - return "fflib.AsciiEqualFold" - } - return "fflib.SimpleLetterEqualFold" -} - -type MarshalerFaster interface { - MarshalJSONBuf(buf fflib.EncodingBuffer) error -} - -type UnmarshalFaster interface { - UnmarshalJSONFFLexer(l *fflib.FFLexer, state fflib.FFParseState) error -} - -var marshalerType = reflect.TypeOf(new(json.Marshaler)).Elem() -var marshalerFasterType = reflect.TypeOf(new(MarshalerFaster)).Elem() -var unmarshalerType = reflect.TypeOf(new(json.Unmarshaler)).Elem() -var unmarshalFasterType = reflect.TypeOf(new(UnmarshalFaster)).Elem() - -// extractFields returns a list of fields that JSON should recognize for the given type. -// The algorithm is breadth-first search over the set of structs to include - the top struct -// and then any reachable anonymous structs. -func extractFields(obj interface{}) []*StructField { - t := reflect.TypeOf(obj) - // Anonymous fields to explore at the current level and the next. - current := []StructField{} - next := []StructField{{Typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []*StructField - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.Typ] { - continue - } - visited[f.Typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.Typ.NumField(); i++ { - sf := f.Typ.Field(i) - if sf.PkgPath != "" { // unexported - continue - } - tag := sf.Tag.Get("json") - if tag == "-" { - continue - } - name, opts := parseTag(tag) - if !isValidTag(name) { - name = "" - } - - ft := sf.Type - ptr := false - if ft.Kind() == reflect.Ptr { - ptr = true - } - - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := name != "" - if name == "" { - name = sf.Name - } - - var buf bytes.Buffer - fflib.WriteJsonString(&buf, name) - - field := &StructField{ - Name: sf.Name, - JsonName: string(buf.Bytes()), - FoldFuncName: foldFunc([]byte(name)), - Typ: ft, - HasMarshalJSON: ft.Implements(marshalerType), - HasUnmarshalJSON: ft.Implements(unmarshalerType), - OmitEmpty: opts.Contains("omitempty"), - ForceString: opts.Contains("string"), - Pointer: ptr, - Tagged: tagged, - } - - fields = append(fields, field) - - if count[f.Typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - next = append(next, StructField{ - Name: ft.Name(), - Typ: ft, - }) - } - } - } - } - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with JSON tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.JsonName - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.JsonName != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// JSON tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []*StructField) (*StructField, bool) { - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if f.Tagged { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return nil, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return nil, false - } - return fields[0], true -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/tags.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/tags.go deleted file mode 100644 index ccce101b84..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/tags.go +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "strings" - "unicode" -) - -// from: http://golang.org/src/pkg/encoding/json/tags.go - -// tagOptions is the string following a comma in a struct field's "json" -// tag, or the empty string. It does not include the leading comma. -type tagOptions string - -// parseTag splits a struct field's json tag into its name and -// comma-separated options. -func parseTag(tag string) (string, tagOptions) { - if idx := strings.Index(tag, ","); idx != -1 { - return tag[:idx], tagOptions(tag[idx+1:]) - } - return tag, tagOptions("") -} - -// Contains reports whether a comma-separated list of options -// contains a particular substr flag. substr must be surrounded by a -// string boundary or commas. -func (o tagOptions) Contains(optionName string) bool { - if len(o) == 0 { - return false - } - s := string(o) - for s != "" { - var next string - i := strings.Index(s, ",") - if i >= 0 { - s, next = s[:i], s[i+1:] - } - if s == optionName { - return true - } - s = next - } - return false -} - -func isValidTag(s string) bool { - if s == "" { - return false - } - for _, c := range s { - switch { - case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): - // Backslash and quote chars are reserved, but - // otherwise any punctuation chars are allowed - // in a tag name. - default: - if !unicode.IsLetter(c) && !unicode.IsDigit(c) { - return false - } - } - } - return true -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/template.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/template.go deleted file mode 100644 index 121a23dd8d..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/template.go +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "bytes" - "go/format" - "text/template" -) - -const ffjsonTemplate = ` -// Code generated by ffjson . DO NOT EDIT. -// source: {{.InputPath}} - -package {{.PackageName}} - -import ( -{{range $k, $v := .OutputImports}}{{$k}} -{{end}} -) - -{{range .OutputFuncs}} -{{.}} -{{end}} - -` - -func RenderTemplate(ic *Inception) ([]byte, error) { - t := template.Must(template.New("ffjson.go").Parse(ffjsonTemplate)) - buf := new(bytes.Buffer) - err := t.Execute(buf, ic) - if err != nil { - return nil, err - } - return format.Source(buf.Bytes()) -} - -func tplStr(t *template.Template, data interface{}) string { - buf := bytes.Buffer{} - err := t.Execute(&buf, data) - if err != nil { - panic(err) - } - return buf.String() -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/inception/writerstack.go b/tests/tools/vendor/github.com/pquerna/ffjson/inception/writerstack.go deleted file mode 100644 index 1521961c94..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/inception/writerstack.go +++ /dev/null @@ -1,65 +0,0 @@ -package ffjsoninception - -import "strings" - -// ConditionalWrite is a stack containing a number of pending writes -type ConditionalWrite struct { - Queued []string -} - -// Write will add a string to be written -func (w *ConditionalWrite) Write(s string) { - w.Queued = append(w.Queued, s) -} - -// DeleteLast will delete the last added write -func (w *ConditionalWrite) DeleteLast() { - if len(w.Queued) == 0 { - return - } - w.Queued = w.Queued[:len(w.Queued)-1] -} - -// Last will return the last added write -func (w *ConditionalWrite) Last() string { - if len(w.Queued) == 0 { - return "" - } - return w.Queued[len(w.Queued)-1] -} - -// Flush will return all queued writes, and return -// "" (empty string) in nothing has been queued -// "buf.WriteByte('" + byte + "')" + '\n' if one bute has been queued. -// "buf.WriteString(`" + string + "`)" + "\n" if more than one byte has been queued. -func (w *ConditionalWrite) Flush() string { - combined := strings.Join(w.Queued, "") - if len(combined) == 0 { - return "" - } - - w.Queued = nil - if len(combined) == 1 { - return "buf.WriteByte('" + combined + "')" + "\n" - } - return "buf.WriteString(`" + combined + "`)" + "\n" -} - -func (w *ConditionalWrite) FlushTo(out string) string { - out += w.Flush() - return out -} - -// WriteFlush will add a string and return the Flush result for the queue -func (w *ConditionalWrite) WriteFlush(s string) string { - w.Write(s) - return w.Flush() -} - -// GetQueued will return the current queued content without flushing. -func (w *ConditionalWrite) GetQueued() string { - t := w.Queued - s := w.Flush() - w.Queued = t - return s -} diff --git a/tests/tools/vendor/github.com/pquerna/ffjson/shared/options.go b/tests/tools/vendor/github.com/pquerna/ffjson/shared/options.go deleted file mode 100644 index d74edc1351..0000000000 --- a/tests/tools/vendor/github.com/pquerna/ffjson/shared/options.go +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright 2014 Paul Querna, Klaus Post - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package shared - -type StructOptions struct { - SkipDecoder bool - SkipEncoder bool -} - -type InceptionType struct { - Obj interface{} - Options StructOptions -} -type Feature int - -const ( - Nothing Feature = 0 - MustDecoder = 1 << 1 - MustEncoder = 1 << 2 - MustEncDec = MustDecoder | MustEncoder -) - -func (i InceptionType) HasFeature(f Feature) bool { - return i.HasFeature(f) -} - -func (s StructOptions) HasFeature(f Feature) bool { - hasNeeded := true - if f&MustDecoder != 0 && s.SkipDecoder { - hasNeeded = false - } - if f&MustEncoder != 0 && s.SkipEncoder { - hasNeeded = false - } - return hasNeeded -} diff --git a/tests/tools/vendor/modules.txt b/tests/tools/vendor/modules.txt index 7932ccce66..8b323fb351 100644 --- a/tests/tools/vendor/modules.txt +++ b/tests/tools/vendor/modules.txt @@ -5,13 +5,6 @@ github.com/cpuguy83/go-md2man/md2man github.com/hashicorp/go-version # github.com/konsorten/go-windows-terminal-sequences v1.0.1 github.com/konsorten/go-windows-terminal-sequences -# github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 -github.com/pquerna/ffjson -github.com/pquerna/ffjson/fflib/v1 -github.com/pquerna/ffjson/fflib/v1/internal -github.com/pquerna/ffjson/generator -github.com/pquerna/ffjson/inception -github.com/pquerna/ffjson/shared # github.com/russross/blackfriday v1.5.2 github.com/russross/blackfriday # github.com/sirupsen/logrus v1.4.1 diff --git a/vendor/github.com/pquerna/ffjson/LICENSE b/vendor/github.com/pquerna/ffjson/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/vendor/github.com/pquerna/ffjson/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/pquerna/ffjson/NOTICE b/vendor/github.com/pquerna/ffjson/NOTICE deleted file mode 100644 index 405a49618b..0000000000 --- a/vendor/github.com/pquerna/ffjson/NOTICE +++ /dev/null @@ -1,8 +0,0 @@ -ffjson -Copyright (c) 2014, Paul Querna - -This product includes software developed by -Paul Querna (http://paul.querna.org/). - -Portions of this software were developed as -part of Go, Copyright (c) 2012 The Go Authors. \ No newline at end of file diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go b/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go deleted file mode 100644 index 7f63a8582d..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer.go +++ /dev/null @@ -1,421 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -// Simple byte buffer for marshaling data. - -import ( - "bytes" - "encoding/json" - "errors" - "io" - "unicode/utf8" -) - -type grower interface { - Grow(n int) -} - -type truncater interface { - Truncate(n int) - Reset() -} - -type bytesReader interface { - Bytes() []byte - String() string -} - -type runeWriter interface { - WriteRune(r rune) (n int, err error) -} - -type stringWriter interface { - WriteString(s string) (n int, err error) -} - -type lener interface { - Len() int -} - -type rewinder interface { - Rewind(n int) (err error) -} - -type encoder interface { - Encode(interface{}) error -} - -// TODO(pquerna): continue to reduce these interfaces - -type EncodingBuffer interface { - io.Writer - io.WriterTo - io.ByteWriter - stringWriter - truncater - grower - rewinder - encoder -} - -type DecodingBuffer interface { - io.ReadWriter - io.ByteWriter - stringWriter - runeWriter - truncater - grower - bytesReader - lener -} - -// A Buffer is a variable-sized buffer of bytes with Read and Write methods. -// The zero value for Buffer is an empty buffer ready to use. -type Buffer struct { - buf []byte // contents are the bytes buf[off : len(buf)] - off int // read at &buf[off], write at &buf[len(buf)] - runeBytes [utf8.UTFMax]byte // avoid allocation of slice on each WriteByte or Rune - encoder *json.Encoder - skipTrailingByte bool -} - -// ErrTooLarge is passed to panic if memory cannot be allocated to store data in a buffer. -var ErrTooLarge = errors.New("fflib.v1.Buffer: too large") - -// Bytes returns a slice of the contents of the unread portion of the buffer; -// len(b.Bytes()) == b.Len(). If the caller changes the contents of the -// returned slice, the contents of the buffer will change provided there -// are no intervening method calls on the Buffer. -func (b *Buffer) Bytes() []byte { return b.buf[b.off:] } - -// String returns the contents of the unread portion of the buffer -// as a string. If the Buffer is a nil pointer, it returns "". -func (b *Buffer) String() string { - if b == nil { - // Special case, useful in debugging. - return "" - } - return string(b.buf[b.off:]) -} - -// Len returns the number of bytes of the unread portion of the buffer; -// b.Len() == len(b.Bytes()). -func (b *Buffer) Len() int { return len(b.buf) - b.off } - -// Truncate discards all but the first n unread bytes from the buffer. -// It panics if n is negative or greater than the length of the buffer. -func (b *Buffer) Truncate(n int) { - if n == 0 { - b.off = 0 - b.buf = b.buf[0:0] - } else { - b.buf = b.buf[0 : b.off+n] - } -} - -// Reset resets the buffer so it has no content. -// b.Reset() is the same as b.Truncate(0). -func (b *Buffer) Reset() { b.Truncate(0) } - -// grow grows the buffer to guarantee space for n more bytes. -// It returns the index where bytes should be written. -// If the buffer can't grow it will panic with ErrTooLarge. -func (b *Buffer) grow(n int) int { - // If we have no buffer, get one from the pool - m := b.Len() - if m == 0 { - if b.buf == nil { - b.buf = makeSlice(2 * n) - b.off = 0 - } else if b.off != 0 { - // If buffer is empty, reset to recover space. - b.Truncate(0) - } - } - if len(b.buf)+n > cap(b.buf) { - var buf []byte - if m+n <= cap(b.buf)/2 { - // We can slide things down instead of allocating a new - // slice. We only need m+n <= cap(b.buf) to slide, but - // we instead let capacity get twice as large so we - // don't spend all our time copying. - copy(b.buf[:], b.buf[b.off:]) - buf = b.buf[:m] - } else { - // not enough space anywhere - buf = makeSlice(2*cap(b.buf) + n) - copy(buf, b.buf[b.off:]) - Pool(b.buf) - b.buf = buf - } - b.off = 0 - } - b.buf = b.buf[0 : b.off+m+n] - return b.off + m -} - -// Grow grows the buffer's capacity, if necessary, to guarantee space for -// another n bytes. After Grow(n), at least n bytes can be written to the -// buffer without another allocation. -// If n is negative, Grow will panic. -// If the buffer can't grow it will panic with ErrTooLarge. -func (b *Buffer) Grow(n int) { - if n < 0 { - panic("bytes.Buffer.Grow: negative count") - } - m := b.grow(n) - b.buf = b.buf[0:m] -} - -// Write appends the contents of p to the buffer, growing the buffer as -// needed. The return value n is the length of p; err is always nil. If the -// buffer becomes too large, Write will panic with ErrTooLarge. -func (b *Buffer) Write(p []byte) (n int, err error) { - if b.skipTrailingByte { - p = p[:len(p)-1] - } - m := b.grow(len(p)) - return copy(b.buf[m:], p), nil -} - -// WriteString appends the contents of s to the buffer, growing the buffer as -// needed. The return value n is the length of s; err is always nil. If the -// buffer becomes too large, WriteString will panic with ErrTooLarge. -func (b *Buffer) WriteString(s string) (n int, err error) { - m := b.grow(len(s)) - return copy(b.buf[m:], s), nil -} - -// MinRead is the minimum slice size passed to a Read call by -// Buffer.ReadFrom. As long as the Buffer has at least MinRead bytes beyond -// what is required to hold the contents of r, ReadFrom will not grow the -// underlying buffer. -const minRead = 512 - -// ReadFrom reads data from r until EOF and appends it to the buffer, growing -// the buffer as needed. The return value n is the number of bytes read. Any -// error except io.EOF encountered during the read is also returned. If the -// buffer becomes too large, ReadFrom will panic with ErrTooLarge. -func (b *Buffer) ReadFrom(r io.Reader) (n int64, err error) { - // If buffer is empty, reset to recover space. - if b.off >= len(b.buf) { - b.Truncate(0) - } - for { - if free := cap(b.buf) - len(b.buf); free < minRead { - // not enough space at end - newBuf := b.buf - if b.off+free < minRead { - // not enough space using beginning of buffer; - // double buffer capacity - newBuf = makeSlice(2*cap(b.buf) + minRead) - } - copy(newBuf, b.buf[b.off:]) - Pool(b.buf) - b.buf = newBuf[:len(b.buf)-b.off] - b.off = 0 - } - m, e := r.Read(b.buf[len(b.buf):cap(b.buf)]) - b.buf = b.buf[0 : len(b.buf)+m] - n += int64(m) - if e == io.EOF { - break - } - if e != nil { - return n, e - } - } - return n, nil // err is EOF, so return nil explicitly -} - -// WriteTo writes data to w until the buffer is drained or an error occurs. -// The return value n is the number of bytes written; it always fits into an -// int, but it is int64 to match the io.WriterTo interface. Any error -// encountered during the write is also returned. -func (b *Buffer) WriteTo(w io.Writer) (n int64, err error) { - if b.off < len(b.buf) { - nBytes := b.Len() - m, e := w.Write(b.buf[b.off:]) - if m > nBytes { - panic("bytes.Buffer.WriteTo: invalid Write count") - } - b.off += m - n = int64(m) - if e != nil { - return n, e - } - // all bytes should have been written, by definition of - // Write method in io.Writer - if m != nBytes { - return n, io.ErrShortWrite - } - } - // Buffer is now empty; reset. - b.Truncate(0) - return -} - -// WriteByte appends the byte c to the buffer, growing the buffer as needed. -// The returned error is always nil, but is included to match bufio.Writer's -// WriteByte. If the buffer becomes too large, WriteByte will panic with -// ErrTooLarge. -func (b *Buffer) WriteByte(c byte) error { - m := b.grow(1) - b.buf[m] = c - return nil -} - -func (b *Buffer) Rewind(n int) error { - b.buf = b.buf[:len(b.buf)-n] - return nil -} - -func (b *Buffer) Encode(v interface{}) error { - if b.encoder == nil { - b.encoder = json.NewEncoder(b) - } - b.skipTrailingByte = true - err := b.encoder.Encode(v) - b.skipTrailingByte = false - return err -} - -// WriteRune appends the UTF-8 encoding of Unicode code point r to the -// buffer, returning its length and an error, which is always nil but is -// included to match bufio.Writer's WriteRune. The buffer is grown as needed; -// if it becomes too large, WriteRune will panic with ErrTooLarge. -func (b *Buffer) WriteRune(r rune) (n int, err error) { - if r < utf8.RuneSelf { - b.WriteByte(byte(r)) - return 1, nil - } - n = utf8.EncodeRune(b.runeBytes[0:], r) - b.Write(b.runeBytes[0:n]) - return n, nil -} - -// Read reads the next len(p) bytes from the buffer or until the buffer -// is drained. The return value n is the number of bytes read. If the -// buffer has no data to return, err is io.EOF (unless len(p) is zero); -// otherwise it is nil. -func (b *Buffer) Read(p []byte) (n int, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - if len(p) == 0 { - return - } - return 0, io.EOF - } - n = copy(p, b.buf[b.off:]) - b.off += n - return -} - -// Next returns a slice containing the next n bytes from the buffer, -// advancing the buffer as if the bytes had been returned by Read. -// If there are fewer than n bytes in the buffer, Next returns the entire buffer. -// The slice is only valid until the next call to a read or write method. -func (b *Buffer) Next(n int) []byte { - m := b.Len() - if n > m { - n = m - } - data := b.buf[b.off : b.off+n] - b.off += n - return data -} - -// ReadByte reads and returns the next byte from the buffer. -// If no byte is available, it returns error io.EOF. -func (b *Buffer) ReadByte() (c byte, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - return 0, io.EOF - } - c = b.buf[b.off] - b.off++ - return c, nil -} - -// ReadRune reads and returns the next UTF-8-encoded -// Unicode code point from the buffer. -// If no bytes are available, the error returned is io.EOF. -// If the bytes are an erroneous UTF-8 encoding, it -// consumes one byte and returns U+FFFD, 1. -func (b *Buffer) ReadRune() (r rune, size int, err error) { - if b.off >= len(b.buf) { - // Buffer is empty, reset to recover space. - b.Truncate(0) - return 0, 0, io.EOF - } - c := b.buf[b.off] - if c < utf8.RuneSelf { - b.off++ - return rune(c), 1, nil - } - r, n := utf8.DecodeRune(b.buf[b.off:]) - b.off += n - return r, n, nil -} - -// ReadBytes reads until the first occurrence of delim in the input, -// returning a slice containing the data up to and including the delimiter. -// If ReadBytes encounters an error before finding a delimiter, -// it returns the data read before the error and the error itself (often io.EOF). -// ReadBytes returns err != nil if and only if the returned data does not end in -// delim. -func (b *Buffer) ReadBytes(delim byte) (line []byte, err error) { - slice, err := b.readSlice(delim) - // return a copy of slice. The buffer's backing array may - // be overwritten by later calls. - line = append(line, slice...) - return -} - -// readSlice is like ReadBytes but returns a reference to internal buffer data. -func (b *Buffer) readSlice(delim byte) (line []byte, err error) { - i := bytes.IndexByte(b.buf[b.off:], delim) - end := b.off + i + 1 - if i < 0 { - end = len(b.buf) - err = io.EOF - } - line = b.buf[b.off:end] - b.off = end - return line, err -} - -// ReadString reads until the first occurrence of delim in the input, -// returning a string containing the data up to and including the delimiter. -// If ReadString encounters an error before finding a delimiter, -// it returns the data read before the error and the error itself (often io.EOF). -// ReadString returns err != nil if and only if the returned data does not end -// in delim. -func (b *Buffer) ReadString(delim byte) (line string, err error) { - slice, err := b.readSlice(delim) - return string(slice), err -} - -// NewBuffer creates and initializes a new Buffer using buf as its initial -// contents. It is intended to prepare a Buffer to read existing data. It -// can also be used to size the internal buffer for writing. To do that, -// buf should have the desired capacity but a length of zero. -// -// In most cases, new(Buffer) (or just declaring a Buffer variable) is -// sufficient to initialize a Buffer. -func NewBuffer(buf []byte) *Buffer { return &Buffer{buf: buf} } - -// NewBufferString creates and initializes a new Buffer using string s as its -// initial contents. It is intended to prepare a buffer to read an existing -// string. -// -// In most cases, new(Buffer) (or just declaring a Buffer variable) is -// sufficient to initialize a Buffer. -func NewBufferString(s string) *Buffer { - return &Buffer{buf: []byte(s)} -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go b/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go deleted file mode 100644 index b84af6ff96..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_nopool.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build !go1.3 - -package v1 - -// Stub version of buffer_pool.go for Go 1.2, which doesn't have sync.Pool. - -func Pool(b []byte) {} - -func makeSlice(n int) []byte { - return make([]byte, n) -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go b/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go deleted file mode 100644 index a021c57cf4..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/buffer_pool.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.3 - -package v1 - -// Allocation pools for Buffers. - -import "sync" - -var pools [14]sync.Pool -var pool64 *sync.Pool - -func init() { - var i uint - // TODO(pquerna): add science here around actual pool sizes. - for i = 6; i < 20; i++ { - n := 1 << i - pools[poolNum(n)].New = func() interface{} { return make([]byte, 0, n) } - } - pool64 = &pools[0] -} - -// This returns the pool number that will give a buffer of -// at least 'i' bytes. -func poolNum(i int) int { - // TODO(pquerna): convert to log2 w/ bsr asm instruction: - // - if i <= 64 { - return 0 - } else if i <= 128 { - return 1 - } else if i <= 256 { - return 2 - } else if i <= 512 { - return 3 - } else if i <= 1024 { - return 4 - } else if i <= 2048 { - return 5 - } else if i <= 4096 { - return 6 - } else if i <= 8192 { - return 7 - } else if i <= 16384 { - return 8 - } else if i <= 32768 { - return 9 - } else if i <= 65536 { - return 10 - } else if i <= 131072 { - return 11 - } else if i <= 262144 { - return 12 - } else if i <= 524288 { - return 13 - } else { - return -1 - } -} - -// Send a buffer to the Pool to reuse for other instances. -// You may no longer utilize the content of the buffer, since it may be used -// by other goroutines. -func Pool(b []byte) { - if b == nil { - return - } - c := cap(b) - - // Our smallest buffer is 64 bytes, so we discard smaller buffers. - if c < 64 { - return - } - - // We need to put the incoming buffer into the NEXT buffer, - // since a buffer guarantees AT LEAST the number of bytes available - // that is the top of this buffer. - // That is the reason for dividing the cap by 2, so it gets into the NEXT bucket. - // We add 2 to avoid rounding down if size is exactly power of 2. - pn := poolNum((c + 2) >> 1) - if pn != -1 { - pools[pn].Put(b[0:0]) - } - // if we didn't have a slot for this []byte, we just drop it and let the GC - // take care of it. -} - -// makeSlice allocates a slice of size n -- it will attempt to use a pool'ed -// instance whenever possible. -func makeSlice(n int) []byte { - if n <= 64 { - return pool64.Get().([]byte)[0:n] - } - - pn := poolNum(n) - - if pn != -1 { - return pools[pn].Get().([]byte)[0:n] - } else { - return make([]byte, n) - } -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go b/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go deleted file mode 100644 index 08477409ac..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/bytenum.go +++ /dev/null @@ -1,88 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/iota.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "github.com/pquerna/ffjson/fflib/v1/internal" -) - -func ParseFloat(s []byte, bitSize int) (f float64, err error) { - return internal.ParseFloat(s, bitSize) -} - -// ParseUint is like ParseInt but for unsigned numbers, and oeprating on []byte -func ParseUint(s []byte, base int, bitSize int) (n uint64, err error) { - if len(s) == 1 { - switch s[0] { - case '0': - return 0, nil - case '1': - return 1, nil - case '2': - return 2, nil - case '3': - return 3, nil - case '4': - return 4, nil - case '5': - return 5, nil - case '6': - return 6, nil - case '7': - return 7, nil - case '8': - return 8, nil - case '9': - return 9, nil - } - } - return internal.ParseUint(s, base, bitSize) -} - -func ParseInt(s []byte, base int, bitSize int) (i int64, err error) { - if len(s) == 1 { - switch s[0] { - case '0': - return 0, nil - case '1': - return 1, nil - case '2': - return 2, nil - case '3': - return 3, nil - case '4': - return 4, nil - case '5': - return 5, nil - case '6': - return 6, nil - case '7': - return 7, nil - case '8': - return 8, nil - case '9': - return 9, nil - } - } - return internal.ParseInt(s, base, bitSize) -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go b/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go deleted file mode 100644 index 069df7a02a..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/decimal.go +++ /dev/null @@ -1,378 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Multiprecision decimal numbers. -// For floating-point formatting only; not general purpose. -// Only operations are assign and (binary) left/right shift. -// Can do binary floating point in multiprecision decimal precisely -// because 2 divides 10; cannot do decimal floating point -// in multiprecision binary precisely. - -package v1 - -type decimal struct { - d [800]byte // digits - nd int // number of digits used - dp int // decimal point - neg bool - trunc bool // discarded nonzero digits beyond d[:nd] -} - -func (a *decimal) String() string { - n := 10 + a.nd - if a.dp > 0 { - n += a.dp - } - if a.dp < 0 { - n += -a.dp - } - - buf := make([]byte, n) - w := 0 - switch { - case a.nd == 0: - return "0" - - case a.dp <= 0: - // zeros fill space between decimal point and digits - buf[w] = '0' - w++ - buf[w] = '.' - w++ - w += digitZero(buf[w : w+-a.dp]) - w += copy(buf[w:], a.d[0:a.nd]) - - case a.dp < a.nd: - // decimal point in middle of digits - w += copy(buf[w:], a.d[0:a.dp]) - buf[w] = '.' - w++ - w += copy(buf[w:], a.d[a.dp:a.nd]) - - default: - // zeros fill space between digits and decimal point - w += copy(buf[w:], a.d[0:a.nd]) - w += digitZero(buf[w : w+a.dp-a.nd]) - } - return string(buf[0:w]) -} - -func digitZero(dst []byte) int { - for i := range dst { - dst[i] = '0' - } - return len(dst) -} - -// trim trailing zeros from number. -// (They are meaningless; the decimal point is tracked -// independent of the number of digits.) -func trim(a *decimal) { - for a.nd > 0 && a.d[a.nd-1] == '0' { - a.nd-- - } - if a.nd == 0 { - a.dp = 0 - } -} - -// Assign v to a. -func (a *decimal) Assign(v uint64) { - var buf [24]byte - - // Write reversed decimal in buf. - n := 0 - for v > 0 { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n++ - v = v1 - } - - // Reverse again to produce forward decimal in a.d. - a.nd = 0 - for n--; n >= 0; n-- { - a.d[a.nd] = buf[n] - a.nd++ - } - a.dp = a.nd - trim(a) -} - -// Maximum shift that we can do in one pass without overflow. -// Signed int has 31 bits, and we have to be able to accommodate 9<>k == 0; r++ { - if r >= a.nd { - if n == 0 { - // a == 0; shouldn't get here, but handle anyway. - a.nd = 0 - return - } - for n>>k == 0 { - n = n * 10 - r++ - } - break - } - c := int(a.d[r]) - n = n*10 + c - '0' - } - a.dp -= r - 1 - - // Pick up a digit, put down a digit. - for ; r < a.nd; r++ { - c := int(a.d[r]) - dig := n >> k - n -= dig << k - a.d[w] = byte(dig + '0') - w++ - n = n*10 + c - '0' - } - - // Put down extra digits. - for n > 0 { - dig := n >> k - n -= dig << k - if w < len(a.d) { - a.d[w] = byte(dig + '0') - w++ - } else if dig > 0 { - a.trunc = true - } - n = n * 10 - } - - a.nd = w - trim(a) -} - -// Cheat sheet for left shift: table indexed by shift count giving -// number of new digits that will be introduced by that shift. -// -// For example, leftcheats[4] = {2, "625"}. That means that -// if we are shifting by 4 (multiplying by 16), it will add 2 digits -// when the string prefix is "625" through "999", and one fewer digit -// if the string prefix is "000" through "624". -// -// Credit for this trick goes to Ken. - -type leftCheat struct { - delta int // number of new digits - cutoff string // minus one digit if original < a. -} - -var leftcheats = []leftCheat{ - // Leading digits of 1/2^i = 5^i. - // 5^23 is not an exact 64-bit floating point number, - // so have to use bc for the math. - /* - seq 27 | sed 's/^/5^/' | bc | - awk 'BEGIN{ print "\tleftCheat{ 0, \"\" }," } - { - log2 = log(2)/log(10) - printf("\tleftCheat{ %d, \"%s\" },\t// * %d\n", - int(log2*NR+1), $0, 2**NR) - }' - */ - {0, ""}, - {1, "5"}, // * 2 - {1, "25"}, // * 4 - {1, "125"}, // * 8 - {2, "625"}, // * 16 - {2, "3125"}, // * 32 - {2, "15625"}, // * 64 - {3, "78125"}, // * 128 - {3, "390625"}, // * 256 - {3, "1953125"}, // * 512 - {4, "9765625"}, // * 1024 - {4, "48828125"}, // * 2048 - {4, "244140625"}, // * 4096 - {4, "1220703125"}, // * 8192 - {5, "6103515625"}, // * 16384 - {5, "30517578125"}, // * 32768 - {5, "152587890625"}, // * 65536 - {6, "762939453125"}, // * 131072 - {6, "3814697265625"}, // * 262144 - {6, "19073486328125"}, // * 524288 - {7, "95367431640625"}, // * 1048576 - {7, "476837158203125"}, // * 2097152 - {7, "2384185791015625"}, // * 4194304 - {7, "11920928955078125"}, // * 8388608 - {8, "59604644775390625"}, // * 16777216 - {8, "298023223876953125"}, // * 33554432 - {8, "1490116119384765625"}, // * 67108864 - {9, "7450580596923828125"}, // * 134217728 -} - -// Is the leading prefix of b lexicographically less than s? -func prefixIsLessThan(b []byte, s string) bool { - for i := 0; i < len(s); i++ { - if i >= len(b) { - return true - } - if b[i] != s[i] { - return b[i] < s[i] - } - } - return false -} - -// Binary shift left (/ 2) by k bits. k <= maxShift to avoid overflow. -func leftShift(a *decimal, k uint) { - delta := leftcheats[k].delta - if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) { - delta-- - } - - r := a.nd // read index - w := a.nd + delta // write index - n := 0 - - // Pick up a digit, put down a digit. - for r--; r >= 0; r-- { - n += (int(a.d[r]) - '0') << k - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - // Put down extra digits. - for n > 0 { - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - a.nd += delta - if a.nd >= len(a.d) { - a.nd = len(a.d) - } - a.dp += delta - trim(a) -} - -// Binary shift left (k > 0) or right (k < 0). -func (a *decimal) Shift(k int) { - switch { - case a.nd == 0: - // nothing to do: a == 0 - case k > 0: - for k > maxShift { - leftShift(a, maxShift) - k -= maxShift - } - leftShift(a, uint(k)) - case k < 0: - for k < -maxShift { - rightShift(a, maxShift) - k += maxShift - } - rightShift(a, uint(-k)) - } -} - -// If we chop a at nd digits, should we round up? -func shouldRoundUp(a *decimal, nd int) bool { - if nd < 0 || nd >= a.nd { - return false - } - if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even - // if we truncated, a little higher than what's recorded - always round up - if a.trunc { - return true - } - return nd > 0 && (a.d[nd-1]-'0')%2 != 0 - } - // not halfway - digit tells all - return a.d[nd] >= '5' -} - -// Round a to nd digits (or fewer). -// If nd is zero, it means we're rounding -// just to the left of the digits, as in -// 0.09 -> 0.1. -func (a *decimal) Round(nd int) { - if nd < 0 || nd >= a.nd { - return - } - if shouldRoundUp(a, nd) { - a.RoundUp(nd) - } else { - a.RoundDown(nd) - } -} - -// Round a down to nd digits (or fewer). -func (a *decimal) RoundDown(nd int) { - if nd < 0 || nd >= a.nd { - return - } - a.nd = nd - trim(a) -} - -// Round a up to nd digits (or fewer). -func (a *decimal) RoundUp(nd int) { - if nd < 0 || nd >= a.nd { - return - } - - // round up - for i := nd - 1; i >= 0; i-- { - c := a.d[i] - if c < '9' { // can stop after this digit - a.d[i]++ - a.nd = i + 1 - return - } - } - - // Number is all 9s. - // Change to single 1 with adjusted decimal point. - a.d[0] = '1' - a.nd = 1 - a.dp++ -} - -// Extract integer part, rounded appropriately. -// No guarantees about overflow. -func (a *decimal) RoundedInteger() uint64 { - if a.dp > 20 { - return 0xFFFFFFFFFFFFFFFF - } - var i int - n := uint64(0) - for i = 0; i < a.dp && i < a.nd; i++ { - n = n*10 + uint64(a.d[i]-'0') - } - for ; i < a.dp; i++ { - n *= 10 - } - if shouldRoundUp(a, a.dp) { - n++ - } - return n -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go b/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go deleted file mode 100644 index 508ddc6bed..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/extfloat.go +++ /dev/null @@ -1,668 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -// An extFloat represents an extended floating-point number, with more -// precision than a float64. It does not try to save bits: the -// number represented by the structure is mant*(2^exp), with a negative -// sign if neg is true. -type extFloat struct { - mant uint64 - exp int - neg bool -} - -// Powers of ten taken from double-conversion library. -// http://code.google.com/p/double-conversion/ -const ( - firstPowerOfTen = -348 - stepPowerOfTen = 8 -) - -var smallPowersOfTen = [...]extFloat{ - {1 << 63, -63, false}, // 1 - {0xa << 60, -60, false}, // 1e1 - {0x64 << 57, -57, false}, // 1e2 - {0x3e8 << 54, -54, false}, // 1e3 - {0x2710 << 50, -50, false}, // 1e4 - {0x186a0 << 47, -47, false}, // 1e5 - {0xf4240 << 44, -44, false}, // 1e6 - {0x989680 << 40, -40, false}, // 1e7 -} - -var powersOfTen = [...]extFloat{ - {0xfa8fd5a0081c0288, -1220, false}, // 10^-348 - {0xbaaee17fa23ebf76, -1193, false}, // 10^-340 - {0x8b16fb203055ac76, -1166, false}, // 10^-332 - {0xcf42894a5dce35ea, -1140, false}, // 10^-324 - {0x9a6bb0aa55653b2d, -1113, false}, // 10^-316 - {0xe61acf033d1a45df, -1087, false}, // 10^-308 - {0xab70fe17c79ac6ca, -1060, false}, // 10^-300 - {0xff77b1fcbebcdc4f, -1034, false}, // 10^-292 - {0xbe5691ef416bd60c, -1007, false}, // 10^-284 - {0x8dd01fad907ffc3c, -980, false}, // 10^-276 - {0xd3515c2831559a83, -954, false}, // 10^-268 - {0x9d71ac8fada6c9b5, -927, false}, // 10^-260 - {0xea9c227723ee8bcb, -901, false}, // 10^-252 - {0xaecc49914078536d, -874, false}, // 10^-244 - {0x823c12795db6ce57, -847, false}, // 10^-236 - {0xc21094364dfb5637, -821, false}, // 10^-228 - {0x9096ea6f3848984f, -794, false}, // 10^-220 - {0xd77485cb25823ac7, -768, false}, // 10^-212 - {0xa086cfcd97bf97f4, -741, false}, // 10^-204 - {0xef340a98172aace5, -715, false}, // 10^-196 - {0xb23867fb2a35b28e, -688, false}, // 10^-188 - {0x84c8d4dfd2c63f3b, -661, false}, // 10^-180 - {0xc5dd44271ad3cdba, -635, false}, // 10^-172 - {0x936b9fcebb25c996, -608, false}, // 10^-164 - {0xdbac6c247d62a584, -582, false}, // 10^-156 - {0xa3ab66580d5fdaf6, -555, false}, // 10^-148 - {0xf3e2f893dec3f126, -529, false}, // 10^-140 - {0xb5b5ada8aaff80b8, -502, false}, // 10^-132 - {0x87625f056c7c4a8b, -475, false}, // 10^-124 - {0xc9bcff6034c13053, -449, false}, // 10^-116 - {0x964e858c91ba2655, -422, false}, // 10^-108 - {0xdff9772470297ebd, -396, false}, // 10^-100 - {0xa6dfbd9fb8e5b88f, -369, false}, // 10^-92 - {0xf8a95fcf88747d94, -343, false}, // 10^-84 - {0xb94470938fa89bcf, -316, false}, // 10^-76 - {0x8a08f0f8bf0f156b, -289, false}, // 10^-68 - {0xcdb02555653131b6, -263, false}, // 10^-60 - {0x993fe2c6d07b7fac, -236, false}, // 10^-52 - {0xe45c10c42a2b3b06, -210, false}, // 10^-44 - {0xaa242499697392d3, -183, false}, // 10^-36 - {0xfd87b5f28300ca0e, -157, false}, // 10^-28 - {0xbce5086492111aeb, -130, false}, // 10^-20 - {0x8cbccc096f5088cc, -103, false}, // 10^-12 - {0xd1b71758e219652c, -77, false}, // 10^-4 - {0x9c40000000000000, -50, false}, // 10^4 - {0xe8d4a51000000000, -24, false}, // 10^12 - {0xad78ebc5ac620000, 3, false}, // 10^20 - {0x813f3978f8940984, 30, false}, // 10^28 - {0xc097ce7bc90715b3, 56, false}, // 10^36 - {0x8f7e32ce7bea5c70, 83, false}, // 10^44 - {0xd5d238a4abe98068, 109, false}, // 10^52 - {0x9f4f2726179a2245, 136, false}, // 10^60 - {0xed63a231d4c4fb27, 162, false}, // 10^68 - {0xb0de65388cc8ada8, 189, false}, // 10^76 - {0x83c7088e1aab65db, 216, false}, // 10^84 - {0xc45d1df942711d9a, 242, false}, // 10^92 - {0x924d692ca61be758, 269, false}, // 10^100 - {0xda01ee641a708dea, 295, false}, // 10^108 - {0xa26da3999aef774a, 322, false}, // 10^116 - {0xf209787bb47d6b85, 348, false}, // 10^124 - {0xb454e4a179dd1877, 375, false}, // 10^132 - {0x865b86925b9bc5c2, 402, false}, // 10^140 - {0xc83553c5c8965d3d, 428, false}, // 10^148 - {0x952ab45cfa97a0b3, 455, false}, // 10^156 - {0xde469fbd99a05fe3, 481, false}, // 10^164 - {0xa59bc234db398c25, 508, false}, // 10^172 - {0xf6c69a72a3989f5c, 534, false}, // 10^180 - {0xb7dcbf5354e9bece, 561, false}, // 10^188 - {0x88fcf317f22241e2, 588, false}, // 10^196 - {0xcc20ce9bd35c78a5, 614, false}, // 10^204 - {0x98165af37b2153df, 641, false}, // 10^212 - {0xe2a0b5dc971f303a, 667, false}, // 10^220 - {0xa8d9d1535ce3b396, 694, false}, // 10^228 - {0xfb9b7cd9a4a7443c, 720, false}, // 10^236 - {0xbb764c4ca7a44410, 747, false}, // 10^244 - {0x8bab8eefb6409c1a, 774, false}, // 10^252 - {0xd01fef10a657842c, 800, false}, // 10^260 - {0x9b10a4e5e9913129, 827, false}, // 10^268 - {0xe7109bfba19c0c9d, 853, false}, // 10^276 - {0xac2820d9623bf429, 880, false}, // 10^284 - {0x80444b5e7aa7cf85, 907, false}, // 10^292 - {0xbf21e44003acdd2d, 933, false}, // 10^300 - {0x8e679c2f5e44ff8f, 960, false}, // 10^308 - {0xd433179d9c8cb841, 986, false}, // 10^316 - {0x9e19db92b4e31ba9, 1013, false}, // 10^324 - {0xeb96bf6ebadf77d9, 1039, false}, // 10^332 - {0xaf87023b9bf0ee6b, 1066, false}, // 10^340 -} - -// floatBits returns the bits of the float64 that best approximates -// the extFloat passed as receiver. Overflow is set to true if -// the resulting float64 is ±Inf. -func (f *extFloat) floatBits(flt *floatInfo) (bits uint64, overflow bool) { - f.Normalize() - - exp := f.exp + 63 - - // Exponent too small. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - f.mant >>= uint(n) - exp += n - } - - // Extract 1+flt.mantbits bits from the 64-bit mantissa. - mant := f.mant >> (63 - flt.mantbits) - if f.mant&(1<<(62-flt.mantbits)) != 0 { - // Round up. - mant += 1 - } - - // Rounding might have added a bit; shift down. - if mant == 2<>= 1 - exp++ - } - - // Infinities. - if exp-flt.bias >= 1<>uint(-f.exp))<>= uint(-f.exp) - f.exp = 0 - return *f, *f - } - expBiased := exp - flt.bias - - upper = extFloat{mant: 2*f.mant + 1, exp: f.exp - 1, neg: f.neg} - if mant != 1<>(64-32) == 0 { - mant <<= 32 - exp -= 32 - } - if mant>>(64-16) == 0 { - mant <<= 16 - exp -= 16 - } - if mant>>(64-8) == 0 { - mant <<= 8 - exp -= 8 - } - if mant>>(64-4) == 0 { - mant <<= 4 - exp -= 4 - } - if mant>>(64-2) == 0 { - mant <<= 2 - exp -= 2 - } - if mant>>(64-1) == 0 { - mant <<= 1 - exp -= 1 - } - shift = uint(f.exp - exp) - f.mant, f.exp = mant, exp - return -} - -// Multiply sets f to the product f*g: the result is correctly rounded, -// but not normalized. -func (f *extFloat) Multiply(g extFloat) { - fhi, flo := f.mant>>32, uint64(uint32(f.mant)) - ghi, glo := g.mant>>32, uint64(uint32(g.mant)) - - // Cross products. - cross1 := fhi * glo - cross2 := flo * ghi - - // f.mant*g.mant is fhi*ghi << 64 + (cross1+cross2) << 32 + flo*glo - f.mant = fhi*ghi + (cross1 >> 32) + (cross2 >> 32) - rem := uint64(uint32(cross1)) + uint64(uint32(cross2)) + ((flo * glo) >> 32) - // Round up. - rem += (1 << 31) - - f.mant += (rem >> 32) - f.exp = f.exp + g.exp + 64 -} - -var uint64pow10 = [...]uint64{ - 1, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, - 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, -} - -// AssignDecimal sets f to an approximate value mantissa*10^exp. It -// returns true if the value represented by f is guaranteed to be the -// best approximation of d after being rounded to a float64 or -// float32 depending on flt. -func (f *extFloat) AssignDecimal(mantissa uint64, exp10 int, neg bool, trunc bool, flt *floatInfo) (ok bool) { - const uint64digits = 19 - const errorscale = 8 - errors := 0 // An upper bound for error, computed in errorscale*ulp. - if trunc { - // the decimal number was truncated. - errors += errorscale / 2 - } - - f.mant = mantissa - f.exp = 0 - f.neg = neg - - // Multiply by powers of ten. - i := (exp10 - firstPowerOfTen) / stepPowerOfTen - if exp10 < firstPowerOfTen || i >= len(powersOfTen) { - return false - } - adjExp := (exp10 - firstPowerOfTen) % stepPowerOfTen - - // We multiply by exp%step - if adjExp < uint64digits && mantissa < uint64pow10[uint64digits-adjExp] { - // We can multiply the mantissa exactly. - f.mant *= uint64pow10[adjExp] - f.Normalize() - } else { - f.Normalize() - f.Multiply(smallPowersOfTen[adjExp]) - errors += errorscale / 2 - } - - // We multiply by 10 to the exp - exp%step. - f.Multiply(powersOfTen[i]) - if errors > 0 { - errors += 1 - } - errors += errorscale / 2 - - // Normalize - shift := f.Normalize() - errors <<= shift - - // Now f is a good approximation of the decimal. - // Check whether the error is too large: that is, if the mantissa - // is perturbated by the error, the resulting float64 will change. - // The 64 bits mantissa is 1 + 52 bits for float64 + 11 extra bits. - // - // In many cases the approximation will be good enough. - denormalExp := flt.bias - 63 - var extrabits uint - if f.exp <= denormalExp { - // f.mant * 2^f.exp is smaller than 2^(flt.bias+1). - extrabits = uint(63 - flt.mantbits + 1 + uint(denormalExp-f.exp)) - } else { - extrabits = uint(63 - flt.mantbits) - } - - halfway := uint64(1) << (extrabits - 1) - mant_extra := f.mant & (1< expMax: - i-- - default: - break Loop - } - } - // Apply the desired decimal shift on f. It will have exponent - // in the desired range. This is multiplication by 10^-exp10. - f.Multiply(powersOfTen[i]) - - return -(firstPowerOfTen + i*stepPowerOfTen), i -} - -// frexp10Many applies a common shift by a power of ten to a, b, c. -func frexp10Many(a, b, c *extFloat) (exp10 int) { - exp10, i := c.frexp10() - a.Multiply(powersOfTen[i]) - b.Multiply(powersOfTen[i]) - return -} - -// FixedDecimal stores in d the first n significant digits -// of the decimal representation of f. It returns false -// if it cannot be sure of the answer. -func (f *extFloat) FixedDecimal(d *decimalSlice, n int) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if n == 0 { - panic("strconv: internal error: extFloat.FixedDecimal called with n == 0") - } - // Multiply by an appropriate power of ten to have a reasonable - // number to process. - f.Normalize() - exp10, _ := f.frexp10() - - shift := uint(-f.exp) - integer := uint32(f.mant >> shift) - fraction := f.mant - (uint64(integer) << shift) - ε := uint64(1) // ε is the uncertainty we have on the mantissa of f. - - // Write exactly n digits to d. - needed := n // how many digits are left to write. - integerDigits := 0 // the number of decimal digits of integer. - pow10 := uint64(1) // the power of ten by which f was scaled. - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - rest := integer - if integerDigits > needed { - // the integral part is already large, trim the last digits. - pow10 = uint64pow10[integerDigits-needed] - integer /= uint32(pow10) - rest -= integer * uint32(pow10) - } else { - rest = 0 - } - - // Write the digits of integer: the digits of rest are omitted. - var buf [32]byte - pos := len(buf) - for v := integer; v > 0; { - v1 := v / 10 - v -= 10 * v1 - pos-- - buf[pos] = byte(v + '0') - v = v1 - } - for i := pos; i < len(buf); i++ { - d.d[i-pos] = buf[i] - } - nd := len(buf) - pos - d.nd = nd - d.dp = integerDigits + exp10 - needed -= nd - - if needed > 0 { - if rest != 0 || pow10 != 1 { - panic("strconv: internal error, rest != 0 but needed > 0") - } - // Emit digits for the fractional part. Each time, 10*fraction - // fits in a uint64 without overflow. - for needed > 0 { - fraction *= 10 - ε *= 10 // the uncertainty scales as we multiply by ten. - if 2*ε > 1<> shift - d.d[nd] = byte(digit + '0') - fraction -= digit << shift - nd++ - needed-- - } - d.nd = nd - } - - // We have written a truncation of f (a numerator / 10^d.dp). The remaining part - // can be interpreted as a small number (< 1) to be added to the last digit of the - // numerator. - // - // If rest > 0, the amount is: - // (rest< 0 guarantees that pow10 << shift does not overflow a uint64. - // - // If rest = 0, pow10 == 1 and the amount is - // fraction / (1 << shift) - // fraction being known with a ±ε uncertainty. - // - // We pass this information to the rounding routine for adjustment. - - ok := adjustLastDigitFixed(d, uint64(rest)<= 0; i-- { - if d.d[i] != '0' { - d.nd = i + 1 - break - } - } - return true -} - -// adjustLastDigitFixed assumes d contains the representation of the integral part -// of some number, whose fractional part is num / (den << shift). The numerator -// num is only known up to an uncertainty of size ε, assumed to be less than -// (den << shift)/2. -// -// It will increase the last digit by one to account for correct rounding, typically -// when the fractional part is greater than 1/2, and will return false if ε is such -// that no correct answer can be given. -func adjustLastDigitFixed(d *decimalSlice, num, den uint64, shift uint, ε uint64) bool { - if num > den< den< den< (den< den<= 0; i-- { - if d.d[i] == '9' { - d.nd-- - } else { - break - } - } - if i < 0 { - d.d[0] = '1' - d.nd = 1 - d.dp++ - } else { - d.d[i]++ - } - return true - } - return false -} - -// ShortestDecimal stores in d the shortest decimal representation of f -// which belongs to the open interval (lower, upper), where f is supposed -// to lie. It returns false whenever the result is unsure. The implementation -// uses the Grisu3 algorithm. -func (f *extFloat) ShortestDecimal(d *decimalSlice, lower, upper *extFloat) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if f.exp == 0 && *lower == *f && *lower == *upper { - // an exact integer. - var buf [24]byte - n := len(buf) - 1 - for v := f.mant; v > 0; { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n-- - v = v1 - } - nd := len(buf) - n - 1 - for i := 0; i < nd; i++ { - d.d[i] = buf[n+1+i] - } - d.nd, d.dp = nd, nd - for d.nd > 0 && d.d[d.nd-1] == '0' { - d.nd-- - } - if d.nd == 0 { - d.dp = 0 - } - d.neg = f.neg - return true - } - upper.Normalize() - // Uniformize exponents. - if f.exp > upper.exp { - f.mant <<= uint(f.exp - upper.exp) - f.exp = upper.exp - } - if lower.exp > upper.exp { - lower.mant <<= uint(lower.exp - upper.exp) - lower.exp = upper.exp - } - - exp10 := frexp10Many(lower, f, upper) - // Take a safety margin due to rounding in frexp10Many, but we lose precision. - upper.mant++ - lower.mant-- - - // The shortest representation of f is either rounded up or down, but - // in any case, it is a truncation of upper. - shift := uint(-upper.exp) - integer := uint32(upper.mant >> shift) - fraction := upper.mant - (uint64(integer) << shift) - - // How far we can go down from upper until the result is wrong. - allowance := upper.mant - lower.mant - // How far we should go to get a very precise result. - targetDiff := upper.mant - f.mant - - // Count integral digits: there are at most 10. - var integerDigits int - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - for i := 0; i < integerDigits; i++ { - pow := uint64pow10[integerDigits-i-1] - digit := integer / uint32(pow) - d.d[i] = byte(digit + '0') - integer -= digit * uint32(pow) - // evaluate whether we should stop. - if currentDiff := uint64(integer)<> shift) - d.d[d.nd] = byte(digit + '0') - d.nd++ - fraction -= uint64(digit) << shift - if fraction < allowance*multiplier { - // We are in the admissible range. Note that if allowance is about to - // overflow, that is, allowance > 2^64/10, the condition is automatically - // true due to the limited range of fraction. - return adjustLastDigit(d, - fraction, targetDiff*multiplier, allowance*multiplier, - 1< maxDiff-ulpBinary { - // we went too far - return false - } - if d.nd == 1 && d.d[0] == '0' { - // the number has actually reached zero. - d.nd = 0 - d.dp = 0 - } - return true -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go b/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go deleted file mode 100644 index 4d33e6f77d..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/fold.go +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's encoding/json/fold.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "unicode/utf8" -) - -const ( - caseMask = ^byte(0x20) // Mask to ignore case in ASCII. - kelvin = '\u212a' - smallLongEss = '\u017f' -) - -// equalFoldRight is a specialization of bytes.EqualFold when s is -// known to be all ASCII (including punctuation), but contains an 's', -// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. -// See comments on foldFunc. -func EqualFoldRight(s, t []byte) bool { - for _, sb := range s { - if len(t) == 0 { - return false - } - tb := t[0] - if tb < utf8.RuneSelf { - if sb != tb { - sbUpper := sb & caseMask - if 'A' <= sbUpper && sbUpper <= 'Z' { - if sbUpper != tb&caseMask { - return false - } - } else { - return false - } - } - t = t[1:] - continue - } - // sb is ASCII and t is not. t must be either kelvin - // sign or long s; sb must be s, S, k, or K. - tr, size := utf8.DecodeRune(t) - switch sb { - case 's', 'S': - if tr != smallLongEss { - return false - } - case 'k', 'K': - if tr != kelvin { - return false - } - default: - return false - } - t = t[size:] - - } - if len(t) > 0 { - return false - } - return true -} - -// asciiEqualFold is a specialization of bytes.EqualFold for use when -// s is all ASCII (but may contain non-letters) and contains no -// special-folding letters. -// See comments on foldFunc. -func AsciiEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, sb := range s { - tb := t[i] - if sb == tb { - continue - } - if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { - if sb&caseMask != tb&caseMask { - return false - } - } else { - return false - } - } - return true -} - -// simpleLetterEqualFold is a specialization of bytes.EqualFold for -// use when s is all ASCII letters (no underscores, etc) and also -// doesn't contain 'k', 'K', 's', or 'S'. -// See comments on foldFunc. -func SimpleLetterEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, b := range s { - if b&caseMask != t[i]&caseMask { - return false - } - } - return true -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go b/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go deleted file mode 100644 index 360d6dbcf9..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/ftoa.go +++ /dev/null @@ -1,542 +0,0 @@ -package v1 - -/** - * Copyright 2015 Paul Querna, Klaus Post - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Most of this file are on Go stdlib's strconv/ftoa.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -import "math" - -// TODO: move elsewhere? -type floatInfo struct { - mantbits uint - expbits uint - bias int -} - -var optimize = true // can change for testing - -var float32info = floatInfo{23, 8, -127} -var float64info = floatInfo{52, 11, -1023} - -// AppendFloat appends the string form of the floating-point number f, -// as generated by FormatFloat -func AppendFloat(dst EncodingBuffer, val float64, fmt byte, prec, bitSize int) { - var bits uint64 - var flt *floatInfo - switch bitSize { - case 32: - bits = uint64(math.Float32bits(float32(val))) - flt = &float32info - case 64: - bits = math.Float64bits(val) - flt = &float64info - default: - panic("strconv: illegal AppendFloat/FormatFloat bitSize") - } - - neg := bits>>(flt.expbits+flt.mantbits) != 0 - exp := int(bits>>flt.mantbits) & (1< digs.nd && digs.nd >= digs.dp { - eprec = digs.nd - } - // %e is used if the exponent from the conversion - // is less than -4 or greater than or equal to the precision. - // if precision was the shortest possible, use precision 6 for this decision. - if shortest { - eprec = 6 - } - exp := digs.dp - 1 - if exp < -4 || exp >= eprec { - if prec > digs.nd { - prec = digs.nd - } - fmtE(dst, neg, digs, prec-1, fmt+'e'-'g') - return - } - if prec > digs.dp { - prec = digs.nd - } - fmtF(dst, neg, digs, max(prec-digs.dp, 0)) - return - } - - // unknown format - dst.Write([]byte{'%', fmt}) - return -} - -// Round d (= mant * 2^exp) to the shortest number of digits -// that will let the original floating point value be precisely -// reconstructed. Size is original floating point size (64 or 32). -func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { - // If mantissa is zero, the number is zero; stop now. - if mant == 0 { - d.nd = 0 - return - } - - // Compute upper and lower such that any decimal number - // between upper and lower (possibly inclusive) - // will round to the original floating point number. - - // We may see at once that the number is already shortest. - // - // Suppose d is not denormal, so that 2^exp <= d < 10^dp. - // The closest shorter number is at least 10^(dp-nd) away. - // The lower/upper bounds computed below are at distance - // at most 2^(exp-mantbits). - // - // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits), - // or equivalently log2(10)*(dp-nd) > exp-mantbits. - // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32). - minexp := flt.bias + 1 // minimum possible exponent - if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) { - // The number is already shortest. - return - } - - // d = mant << (exp - mantbits) - // Next highest floating point number is mant+1 << exp-mantbits. - // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1. - upper := new(decimal) - upper.Assign(mant*2 + 1) - upper.Shift(exp - int(flt.mantbits) - 1) - - // d = mant << (exp - mantbits) - // Next lowest floating point number is mant-1 << exp-mantbits, - // unless mant-1 drops the significant bit and exp is not the minimum exp, - // in which case the next lowest is mant*2-1 << exp-mantbits-1. - // Either way, call it mantlo << explo-mantbits. - // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1. - var mantlo uint64 - var explo int - if mant > 1< 0 { - dst.WriteByte('.') - i := 1 - m := min(d.nd, prec+1) - if i < m { - dst.Write(d.d[i:m]) - i = m - } - for i <= prec { - dst.WriteByte('0') - i++ - } - } - - // e± - dst.WriteByte(fmt) - exp := d.dp - 1 - if d.nd == 0 { // special case: 0 has exponent 0 - exp = 0 - } - if exp < 0 { - ch = '-' - exp = -exp - } else { - ch = '+' - } - dst.WriteByte(ch) - - // dd or ddd - switch { - case exp < 10: - dst.WriteByte('0') - dst.WriteByte(byte(exp) + '0') - case exp < 100: - dst.WriteByte(byte(exp/10) + '0') - dst.WriteByte(byte(exp%10) + '0') - default: - dst.WriteByte(byte(exp/100) + '0') - dst.WriteByte(byte(exp/10)%10 + '0') - dst.WriteByte(byte(exp%10) + '0') - } - - return -} - -// %f: -ddddddd.ddddd -func fmtF(dst EncodingBuffer, neg bool, d decimalSlice, prec int) { - // sign - if neg { - dst.WriteByte('-') - } - - // integer, padded with zeros as needed. - if d.dp > 0 { - m := min(d.nd, d.dp) - dst.Write(d.d[:m]) - for ; m < d.dp; m++ { - dst.WriteByte('0') - } - } else { - dst.WriteByte('0') - } - - // fraction - if prec > 0 { - dst.WriteByte('.') - for i := 0; i < prec; i++ { - ch := byte('0') - if j := d.dp + i; 0 <= j && j < d.nd { - ch = d.d[j] - } - dst.WriteByte(ch) - } - } - - return -} - -// %b: -ddddddddp±ddd -func fmtB(dst EncodingBuffer, neg bool, mant uint64, exp int, flt *floatInfo) { - // sign - if neg { - dst.WriteByte('-') - } - - // mantissa - formatBits(dst, mant, 10, false) - - // p - dst.WriteByte('p') - - // ±exponent - exp -= int(flt.mantbits) - if exp >= 0 { - dst.WriteByte('+') - } - formatBits(dst, uint64(exp), 10, exp < 0) - - return -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -// formatBits computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. -func formatBits(dst EncodingBuffer, u uint64, base int, neg bool) { - if base < 2 || base > len(digits) { - panic("strconv: illegal AppendInt/FormatInt base") - } - // 2 <= base && base <= len(digits) - - var a [64 + 1]byte // +1 for sign of 64bit value in base 2 - i := len(a) - - if neg { - u = -u - } - - // convert bits - if base == 10 { - // common case: use constants for / because - // the compiler can optimize it into a multiply+shift - - if ^uintptr(0)>>32 == 0 { - for u > uint64(^uintptr(0)) { - q := u / 1e9 - us := uintptr(u - q*1e9) // us % 1e9 fits into a uintptr - for j := 9; j > 0; j-- { - i-- - qs := us / 10 - a[i] = byte(us - qs*10 + '0') - us = qs - } - u = q - } - } - - // u guaranteed to fit into a uintptr - us := uintptr(u) - for us >= 10 { - i-- - q := us / 10 - a[i] = byte(us - q*10 + '0') - us = q - } - // u < 10 - i-- - a[i] = byte(us + '0') - - } else if s := shifts[base]; s > 0 { - // base is power of 2: use shifts and masks instead of / and % - b := uint64(base) - m := uintptr(b) - 1 // == 1<= b { - i-- - a[i] = digits[uintptr(u)&m] - u >>= s - } - // u < base - i-- - a[i] = digits[uintptr(u)] - - } else { - // general case - b := uint64(base) - for u >= b { - i-- - q := u / b - a[i] = digits[uintptr(u-q*b)] - u = q - } - // u < base - i-- - a[i] = digits[uintptr(u)] - } - - // add sign, if any - if neg { - i-- - a[i] = '-' - } - - dst.Write(a[i:]) -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go b/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go deleted file mode 100644 index 46c1289ec4..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atof.go +++ /dev/null @@ -1,936 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/atof.go */ - -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// decimal to binary floating point conversion. -// Algorithm: -// 1) Store input in multiprecision decimal. -// 2) Multiply/divide decimal by powers of two until in range [0.5, 1) -// 3) Multiply by 2^precision and round to get mantissa. - -import "math" - -var optimize = true // can change for testing - -func equalIgnoreCase(s1 []byte, s2 []byte) bool { - if len(s1) != len(s2) { - return false - } - for i := 0; i < len(s1); i++ { - c1 := s1[i] - if 'A' <= c1 && c1 <= 'Z' { - c1 += 'a' - 'A' - } - c2 := s2[i] - if 'A' <= c2 && c2 <= 'Z' { - c2 += 'a' - 'A' - } - if c1 != c2 { - return false - } - } - return true -} - -func special(s []byte) (f float64, ok bool) { - if len(s) == 0 { - return - } - switch s[0] { - default: - return - case '+': - if equalIgnoreCase(s, []byte("+inf")) || equalIgnoreCase(s, []byte("+infinity")) { - return math.Inf(1), true - } - case '-': - if equalIgnoreCase(s, []byte("-inf")) || equalIgnoreCase(s, []byte("-infinity")) { - return math.Inf(-1), true - } - case 'n', 'N': - if equalIgnoreCase(s, []byte("nan")) { - return math.NaN(), true - } - case 'i', 'I': - if equalIgnoreCase(s, []byte("inf")) || equalIgnoreCase(s, []byte("infinity")) { - return math.Inf(1), true - } - } - return -} - -func (b *decimal) set(s []byte) (ok bool) { - i := 0 - b.neg = false - b.trunc = false - - // optional sign - if i >= len(s) { - return - } - switch { - case s[i] == '+': - i++ - case s[i] == '-': - b.neg = true - i++ - } - - // digits - sawdot := false - sawdigits := false - for ; i < len(s); i++ { - switch { - case s[i] == '.': - if sawdot { - return - } - sawdot = true - b.dp = b.nd - continue - - case '0' <= s[i] && s[i] <= '9': - sawdigits = true - if s[i] == '0' && b.nd == 0 { // ignore leading zeros - b.dp-- - continue - } - if b.nd < len(b.d) { - b.d[b.nd] = s[i] - b.nd++ - } else if s[i] != '0' { - b.trunc = true - } - continue - } - break - } - if !sawdigits { - return - } - if !sawdot { - b.dp = b.nd - } - - // optional exponent moves decimal point. - // if we read a very large, very long number, - // just be sure to move the decimal point by - // a lot (say, 100000). it doesn't matter if it's - // not the exact number. - if i < len(s) && (s[i] == 'e' || s[i] == 'E') { - i++ - if i >= len(s) { - return - } - esign := 1 - if s[i] == '+' { - i++ - } else if s[i] == '-' { - i++ - esign = -1 - } - if i >= len(s) || s[i] < '0' || s[i] > '9' { - return - } - e := 0 - for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ { - if e < 10000 { - e = e*10 + int(s[i]) - '0' - } - } - b.dp += e * esign - } - - if i != len(s) { - return - } - - ok = true - return -} - -// readFloat reads a decimal mantissa and exponent from a float -// string representation. It sets ok to false if the number could -// not fit return types or is invalid. -func readFloat(s []byte) (mantissa uint64, exp int, neg, trunc, ok bool) { - const uint64digits = 19 - i := 0 - - // optional sign - if i >= len(s) { - return - } - switch { - case s[i] == '+': - i++ - case s[i] == '-': - neg = true - i++ - } - - // digits - sawdot := false - sawdigits := false - nd := 0 - ndMant := 0 - dp := 0 - for ; i < len(s); i++ { - switch c := s[i]; true { - case c == '.': - if sawdot { - return - } - sawdot = true - dp = nd - continue - - case '0' <= c && c <= '9': - sawdigits = true - if c == '0' && nd == 0 { // ignore leading zeros - dp-- - continue - } - nd++ - if ndMant < uint64digits { - mantissa *= 10 - mantissa += uint64(c - '0') - ndMant++ - } else if s[i] != '0' { - trunc = true - } - continue - } - break - } - if !sawdigits { - return - } - if !sawdot { - dp = nd - } - - // optional exponent moves decimal point. - // if we read a very large, very long number, - // just be sure to move the decimal point by - // a lot (say, 100000). it doesn't matter if it's - // not the exact number. - if i < len(s) && (s[i] == 'e' || s[i] == 'E') { - i++ - if i >= len(s) { - return - } - esign := 1 - if s[i] == '+' { - i++ - } else if s[i] == '-' { - i++ - esign = -1 - } - if i >= len(s) || s[i] < '0' || s[i] > '9' { - return - } - e := 0 - for ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ { - if e < 10000 { - e = e*10 + int(s[i]) - '0' - } - } - dp += e * esign - } - - if i != len(s) { - return - } - - exp = dp - ndMant - ok = true - return - -} - -// decimal power of ten to binary power of two. -var powtab = []int{1, 3, 6, 9, 13, 16, 19, 23, 26} - -func (d *decimal) floatBits(flt *floatInfo) (b uint64, overflow bool) { - var exp int - var mant uint64 - - // Zero is always a special case. - if d.nd == 0 { - mant = 0 - exp = flt.bias - goto out - } - - // Obvious overflow/underflow. - // These bounds are for 64-bit floats. - // Will have to change if we want to support 80-bit floats in the future. - if d.dp > 310 { - goto overflow - } - if d.dp < -330 { - // zero - mant = 0 - exp = flt.bias - goto out - } - - // Scale by powers of two until in range [0.5, 1.0) - exp = 0 - for d.dp > 0 { - var n int - if d.dp >= len(powtab) { - n = 27 - } else { - n = powtab[d.dp] - } - d.Shift(-n) - exp += n - } - for d.dp < 0 || d.dp == 0 && d.d[0] < '5' { - var n int - if -d.dp >= len(powtab) { - n = 27 - } else { - n = powtab[-d.dp] - } - d.Shift(n) - exp -= n - } - - // Our range is [0.5,1) but floating point range is [1,2). - exp-- - - // Minimum representable exponent is flt.bias+1. - // If the exponent is smaller, move it up and - // adjust d accordingly. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - d.Shift(-n) - exp += n - } - - if exp-flt.bias >= 1<>= 1 - exp++ - if exp-flt.bias >= 1<>float64info.mantbits != 0 { - return - } - f = float64(mantissa) - if neg { - f = -f - } - switch { - case exp == 0: - // an integer. - return f, true - // Exact integers are <= 10^15. - // Exact powers of ten are <= 10^22. - case exp > 0 && exp <= 15+22: // int * 10^k - // If exponent is big but number of digits is not, - // can move a few zeros into the integer part. - if exp > 22 { - f *= float64pow10[exp-22] - exp = 22 - } - if f > 1e15 || f < -1e15 { - // the exponent was really too large. - return - } - return f * float64pow10[exp], true - case exp < 0 && exp >= -22: // int / 10^k - return f / float64pow10[-exp], true - } - return -} - -// If possible to compute mantissa*10^exp to 32-bit float f exactly, -// entirely in floating-point math, do so, avoiding the machinery above. -func atof32exact(mantissa uint64, exp int, neg bool) (f float32, ok bool) { - if mantissa>>float32info.mantbits != 0 { - return - } - f = float32(mantissa) - if neg { - f = -f - } - switch { - case exp == 0: - return f, true - // Exact integers are <= 10^7. - // Exact powers of ten are <= 10^10. - case exp > 0 && exp <= 7+10: // int * 10^k - // If exponent is big but number of digits is not, - // can move a few zeros into the integer part. - if exp > 10 { - f *= float32pow10[exp-10] - exp = 10 - } - if f > 1e7 || f < -1e7 { - // the exponent was really too large. - return - } - return f * float32pow10[exp], true - case exp < 0 && exp >= -10: // int / 10^k - return f / float32pow10[-exp], true - } - return -} - -const fnParseFloat = "ParseFloat" - -func atof32(s []byte) (f float32, err error) { - if val, ok := special(s); ok { - return float32(val), nil - } - - if optimize { - // Parse mantissa and exponent. - mantissa, exp, neg, trunc, ok := readFloat(s) - if ok { - // Try pure floating-point arithmetic conversion. - if !trunc { - if f, ok := atof32exact(mantissa, exp, neg); ok { - return f, nil - } - } - // Try another fast path. - ext := new(extFloat) - if ok := ext.AssignDecimal(mantissa, exp, neg, trunc, &float32info); ok { - b, ovf := ext.floatBits(&float32info) - f = math.Float32frombits(uint32(b)) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err - } - } - } - var d decimal - if !d.set(s) { - return 0, syntaxError(fnParseFloat, string(s)) - } - b, ovf := d.floatBits(&float32info) - f = math.Float32frombits(uint32(b)) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err -} - -func atof64(s []byte) (f float64, err error) { - if val, ok := special(s); ok { - return val, nil - } - - if optimize { - // Parse mantissa and exponent. - mantissa, exp, neg, trunc, ok := readFloat(s) - if ok { - // Try pure floating-point arithmetic conversion. - if !trunc { - if f, ok := atof64exact(mantissa, exp, neg); ok { - return f, nil - } - } - // Try another fast path. - ext := new(extFloat) - if ok := ext.AssignDecimal(mantissa, exp, neg, trunc, &float64info); ok { - b, ovf := ext.floatBits(&float64info) - f = math.Float64frombits(b) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err - } - } - } - var d decimal - if !d.set(s) { - return 0, syntaxError(fnParseFloat, string(s)) - } - b, ovf := d.floatBits(&float64info) - f = math.Float64frombits(b) - if ovf { - err = rangeError(fnParseFloat, string(s)) - } - return f, err -} - -// ParseFloat converts the string s to a floating-point number -// with the precision specified by bitSize: 32 for float32, or 64 for float64. -// When bitSize=32, the result still has type float64, but it will be -// convertible to float32 without changing its value. -// -// If s is well-formed and near a valid floating point number, -// ParseFloat returns the nearest floating point number rounded -// using IEEE754 unbiased rounding. -// -// The errors that ParseFloat returns have concrete type *NumError -// and include err.Num = s. -// -// If s is not syntactically well-formed, ParseFloat returns err.Err = ErrSyntax. -// -// If s is syntactically well-formed but is more than 1/2 ULP -// away from the largest floating point number of the given size, -// ParseFloat returns f = ±Inf, err.Err = ErrRange. -func ParseFloat(s []byte, bitSize int) (f float64, err error) { - if bitSize == 32 { - f1, err1 := atof32(s) - return float64(f1), err1 - } - f1, err1 := atof64(s) - return f1, err1 -} - -// oroginal: strconv/decimal.go, but not exported, and needed for PareFloat. - -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Multiprecision decimal numbers. -// For floating-point formatting only; not general purpose. -// Only operations are assign and (binary) left/right shift. -// Can do binary floating point in multiprecision decimal precisely -// because 2 divides 10; cannot do decimal floating point -// in multiprecision binary precisely. - -type decimal struct { - d [800]byte // digits - nd int // number of digits used - dp int // decimal point - neg bool - trunc bool // discarded nonzero digits beyond d[:nd] -} - -func (a *decimal) String() string { - n := 10 + a.nd - if a.dp > 0 { - n += a.dp - } - if a.dp < 0 { - n += -a.dp - } - - buf := make([]byte, n) - w := 0 - switch { - case a.nd == 0: - return "0" - - case a.dp <= 0: - // zeros fill space between decimal point and digits - buf[w] = '0' - w++ - buf[w] = '.' - w++ - w += digitZero(buf[w : w+-a.dp]) - w += copy(buf[w:], a.d[0:a.nd]) - - case a.dp < a.nd: - // decimal point in middle of digits - w += copy(buf[w:], a.d[0:a.dp]) - buf[w] = '.' - w++ - w += copy(buf[w:], a.d[a.dp:a.nd]) - - default: - // zeros fill space between digits and decimal point - w += copy(buf[w:], a.d[0:a.nd]) - w += digitZero(buf[w : w+a.dp-a.nd]) - } - return string(buf[0:w]) -} - -func digitZero(dst []byte) int { - for i := range dst { - dst[i] = '0' - } - return len(dst) -} - -// trim trailing zeros from number. -// (They are meaningless; the decimal point is tracked -// independent of the number of digits.) -func trim(a *decimal) { - for a.nd > 0 && a.d[a.nd-1] == '0' { - a.nd-- - } - if a.nd == 0 { - a.dp = 0 - } -} - -// Assign v to a. -func (a *decimal) Assign(v uint64) { - var buf [24]byte - - // Write reversed decimal in buf. - n := 0 - for v > 0 { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n++ - v = v1 - } - - // Reverse again to produce forward decimal in a.d. - a.nd = 0 - for n--; n >= 0; n-- { - a.d[a.nd] = buf[n] - a.nd++ - } - a.dp = a.nd - trim(a) -} - -// Maximum shift that we can do in one pass without overflow. -// Signed int has 31 bits, and we have to be able to accommodate 9<>k == 0; r++ { - if r >= a.nd { - if n == 0 { - // a == 0; shouldn't get here, but handle anyway. - a.nd = 0 - return - } - for n>>k == 0 { - n = n * 10 - r++ - } - break - } - c := int(a.d[r]) - n = n*10 + c - '0' - } - a.dp -= r - 1 - - // Pick up a digit, put down a digit. - for ; r < a.nd; r++ { - c := int(a.d[r]) - dig := n >> k - n -= dig << k - a.d[w] = byte(dig + '0') - w++ - n = n*10 + c - '0' - } - - // Put down extra digits. - for n > 0 { - dig := n >> k - n -= dig << k - if w < len(a.d) { - a.d[w] = byte(dig + '0') - w++ - } else if dig > 0 { - a.trunc = true - } - n = n * 10 - } - - a.nd = w - trim(a) -} - -// Cheat sheet for left shift: table indexed by shift count giving -// number of new digits that will be introduced by that shift. -// -// For example, leftcheats[4] = {2, "625"}. That means that -// if we are shifting by 4 (multiplying by 16), it will add 2 digits -// when the string prefix is "625" through "999", and one fewer digit -// if the string prefix is "000" through "624". -// -// Credit for this trick goes to Ken. - -type leftCheat struct { - delta int // number of new digits - cutoff string // minus one digit if original < a. -} - -var leftcheats = []leftCheat{ - // Leading digits of 1/2^i = 5^i. - // 5^23 is not an exact 64-bit floating point number, - // so have to use bc for the math. - /* - seq 27 | sed 's/^/5^/' | bc | - awk 'BEGIN{ print "\tleftCheat{ 0, \"\" }," } - { - log2 = log(2)/log(10) - printf("\tleftCheat{ %d, \"%s\" },\t// * %d\n", - int(log2*NR+1), $0, 2**NR) - }' - */ - {0, ""}, - {1, "5"}, // * 2 - {1, "25"}, // * 4 - {1, "125"}, // * 8 - {2, "625"}, // * 16 - {2, "3125"}, // * 32 - {2, "15625"}, // * 64 - {3, "78125"}, // * 128 - {3, "390625"}, // * 256 - {3, "1953125"}, // * 512 - {4, "9765625"}, // * 1024 - {4, "48828125"}, // * 2048 - {4, "244140625"}, // * 4096 - {4, "1220703125"}, // * 8192 - {5, "6103515625"}, // * 16384 - {5, "30517578125"}, // * 32768 - {5, "152587890625"}, // * 65536 - {6, "762939453125"}, // * 131072 - {6, "3814697265625"}, // * 262144 - {6, "19073486328125"}, // * 524288 - {7, "95367431640625"}, // * 1048576 - {7, "476837158203125"}, // * 2097152 - {7, "2384185791015625"}, // * 4194304 - {7, "11920928955078125"}, // * 8388608 - {8, "59604644775390625"}, // * 16777216 - {8, "298023223876953125"}, // * 33554432 - {8, "1490116119384765625"}, // * 67108864 - {9, "7450580596923828125"}, // * 134217728 -} - -// Is the leading prefix of b lexicographically less than s? -func prefixIsLessThan(b []byte, s string) bool { - for i := 0; i < len(s); i++ { - if i >= len(b) { - return true - } - if b[i] != s[i] { - return b[i] < s[i] - } - } - return false -} - -// Binary shift left (/ 2) by k bits. k <= maxShift to avoid overflow. -func leftShift(a *decimal, k uint) { - delta := leftcheats[k].delta - if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) { - delta-- - } - - r := a.nd // read index - w := a.nd + delta // write index - n := 0 - - // Pick up a digit, put down a digit. - for r--; r >= 0; r-- { - n += (int(a.d[r]) - '0') << k - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - // Put down extra digits. - for n > 0 { - quo := n / 10 - rem := n - 10*quo - w-- - if w < len(a.d) { - a.d[w] = byte(rem + '0') - } else if rem != 0 { - a.trunc = true - } - n = quo - } - - a.nd += delta - if a.nd >= len(a.d) { - a.nd = len(a.d) - } - a.dp += delta - trim(a) -} - -// Binary shift left (k > 0) or right (k < 0). -func (a *decimal) Shift(k int) { - switch { - case a.nd == 0: - // nothing to do: a == 0 - case k > 0: - for k > maxShift { - leftShift(a, maxShift) - k -= maxShift - } - leftShift(a, uint(k)) - case k < 0: - for k < -maxShift { - rightShift(a, maxShift) - k += maxShift - } - rightShift(a, uint(-k)) - } -} - -// If we chop a at nd digits, should we round up? -func shouldRoundUp(a *decimal, nd int) bool { - if nd < 0 || nd >= a.nd { - return false - } - if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even - // if we truncated, a little higher than what's recorded - always round up - if a.trunc { - return true - } - return nd > 0 && (a.d[nd-1]-'0')%2 != 0 - } - // not halfway - digit tells all - return a.d[nd] >= '5' -} - -// Round a to nd digits (or fewer). -// If nd is zero, it means we're rounding -// just to the left of the digits, as in -// 0.09 -> 0.1. -func (a *decimal) Round(nd int) { - if nd < 0 || nd >= a.nd { - return - } - if shouldRoundUp(a, nd) { - a.RoundUp(nd) - } else { - a.RoundDown(nd) - } -} - -// Round a down to nd digits (or fewer). -func (a *decimal) RoundDown(nd int) { - if nd < 0 || nd >= a.nd { - return - } - a.nd = nd - trim(a) -} - -// Round a up to nd digits (or fewer). -func (a *decimal) RoundUp(nd int) { - if nd < 0 || nd >= a.nd { - return - } - - // round up - for i := nd - 1; i >= 0; i-- { - c := a.d[i] - if c < '9' { // can stop after this digit - a.d[i]++ - a.nd = i + 1 - return - } - } - - // Number is all 9s. - // Change to single 1 with adjusted decimal point. - a.d[0] = '1' - a.nd = 1 - a.dp++ -} - -// Extract integer part, rounded appropriately. -// No guarantees about overflow. -func (a *decimal) RoundedInteger() uint64 { - if a.dp > 20 { - return 0xFFFFFFFFFFFFFFFF - } - var i int - n := uint64(0) - for i = 0; i < a.dp && i < a.nd; i++ { - n = n*10 + uint64(a.d[i]-'0') - } - for ; i < a.dp; i++ { - n *= 10 - } - if shouldRoundUp(a, a.dp) { - n++ - } - return n -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go b/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go deleted file mode 100644 index 06eb2ec29f..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/atoi.go +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/atoi.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -import ( - "errors" - "strconv" -) - -// ErrRange indicates that a value is out of range for the target type. -var ErrRange = errors.New("value out of range") - -// ErrSyntax indicates that a value does not have the right syntax for the target type. -var ErrSyntax = errors.New("invalid syntax") - -// A NumError records a failed conversion. -type NumError struct { - Func string // the failing function (ParseBool, ParseInt, ParseUint, ParseFloat) - Num string // the input - Err error // the reason the conversion failed (ErrRange, ErrSyntax) -} - -func (e *NumError) Error() string { - return "strconv." + e.Func + ": " + "parsing " + strconv.Quote(e.Num) + ": " + e.Err.Error() -} - -func syntaxError(fn, str string) *NumError { - return &NumError{fn, str, ErrSyntax} -} - -func rangeError(fn, str string) *NumError { - return &NumError{fn, str, ErrRange} -} - -const intSize = 32 << uint(^uint(0)>>63) - -// IntSize is the size in bits of an int or uint value. -const IntSize = intSize - -// Return the first number n such that n*base >= 1<<64. -func cutoff64(base int) uint64 { - if base < 2 { - return 0 - } - return (1<<64-1)/uint64(base) + 1 -} - -// ParseUint is like ParseInt but for unsigned numbers, and oeprating on []byte -func ParseUint(s []byte, base int, bitSize int) (n uint64, err error) { - var cutoff, maxVal uint64 - - if bitSize == 0 { - bitSize = int(IntSize) - } - - s0 := s - switch { - case len(s) < 1: - err = ErrSyntax - goto Error - - case 2 <= base && base <= 36: - // valid base; nothing to do - - case base == 0: - // Look for octal, hex prefix. - switch { - case s[0] == '0' && len(s) > 1 && (s[1] == 'x' || s[1] == 'X'): - base = 16 - s = s[2:] - if len(s) < 1 { - err = ErrSyntax - goto Error - } - case s[0] == '0': - base = 8 - default: - base = 10 - } - - default: - err = errors.New("invalid base " + strconv.Itoa(base)) - goto Error - } - - n = 0 - cutoff = cutoff64(base) - maxVal = 1<= base { - n = 0 - err = ErrSyntax - goto Error - } - - if n >= cutoff { - // n*base overflows - n = 1<<64 - 1 - err = ErrRange - goto Error - } - n *= uint64(base) - - n1 := n + uint64(v) - if n1 < n || n1 > maxVal { - // n+v overflows - n = 1<<64 - 1 - err = ErrRange - goto Error - } - n = n1 - } - - return n, nil - -Error: - return n, &NumError{"ParseUint", string(s0), err} -} - -// ParseInt interprets a string s in the given base (2 to 36) and -// returns the corresponding value i. If base == 0, the base is -// implied by the string's prefix: base 16 for "0x", base 8 for -// "0", and base 10 otherwise. -// -// The bitSize argument specifies the integer type -// that the result must fit into. Bit sizes 0, 8, 16, 32, and 64 -// correspond to int, int8, int16, int32, and int64. -// -// The errors that ParseInt returns have concrete type *NumError -// and include err.Num = s. If s is empty or contains invalid -// digits, err.Err = ErrSyntax and the returned value is 0; -// if the value corresponding to s cannot be represented by a -// signed integer of the given size, err.Err = ErrRange and the -// returned value is the maximum magnitude integer of the -// appropriate bitSize and sign. -func ParseInt(s []byte, base int, bitSize int) (i int64, err error) { - const fnParseInt = "ParseInt" - - if bitSize == 0 { - bitSize = int(IntSize) - } - - // Empty string bad. - if len(s) == 0 { - return 0, syntaxError(fnParseInt, string(s)) - } - - // Pick off leading sign. - s0 := s - neg := false - if s[0] == '+' { - s = s[1:] - } else if s[0] == '-' { - neg = true - s = s[1:] - } - - // Convert unsigned and check range. - var un uint64 - un, err = ParseUint(s, base, bitSize) - if err != nil && err.(*NumError).Err != ErrRange { - err.(*NumError).Func = fnParseInt - err.(*NumError).Num = string(s0) - return 0, err - } - cutoff := uint64(1 << uint(bitSize-1)) - if !neg && un >= cutoff { - return int64(cutoff - 1), rangeError(fnParseInt, string(s0)) - } - if neg && un > cutoff { - return -int64(cutoff), rangeError(fnParseInt, string(s0)) - } - n := int64(un) - if neg { - n = -n - } - return n, nil -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go b/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go deleted file mode 100644 index ab791085a4..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/extfloat.go +++ /dev/null @@ -1,668 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// An extFloat represents an extended floating-point number, with more -// precision than a float64. It does not try to save bits: the -// number represented by the structure is mant*(2^exp), with a negative -// sign if neg is true. -type extFloat struct { - mant uint64 - exp int - neg bool -} - -// Powers of ten taken from double-conversion library. -// http://code.google.com/p/double-conversion/ -const ( - firstPowerOfTen = -348 - stepPowerOfTen = 8 -) - -var smallPowersOfTen = [...]extFloat{ - {1 << 63, -63, false}, // 1 - {0xa << 60, -60, false}, // 1e1 - {0x64 << 57, -57, false}, // 1e2 - {0x3e8 << 54, -54, false}, // 1e3 - {0x2710 << 50, -50, false}, // 1e4 - {0x186a0 << 47, -47, false}, // 1e5 - {0xf4240 << 44, -44, false}, // 1e6 - {0x989680 << 40, -40, false}, // 1e7 -} - -var powersOfTen = [...]extFloat{ - {0xfa8fd5a0081c0288, -1220, false}, // 10^-348 - {0xbaaee17fa23ebf76, -1193, false}, // 10^-340 - {0x8b16fb203055ac76, -1166, false}, // 10^-332 - {0xcf42894a5dce35ea, -1140, false}, // 10^-324 - {0x9a6bb0aa55653b2d, -1113, false}, // 10^-316 - {0xe61acf033d1a45df, -1087, false}, // 10^-308 - {0xab70fe17c79ac6ca, -1060, false}, // 10^-300 - {0xff77b1fcbebcdc4f, -1034, false}, // 10^-292 - {0xbe5691ef416bd60c, -1007, false}, // 10^-284 - {0x8dd01fad907ffc3c, -980, false}, // 10^-276 - {0xd3515c2831559a83, -954, false}, // 10^-268 - {0x9d71ac8fada6c9b5, -927, false}, // 10^-260 - {0xea9c227723ee8bcb, -901, false}, // 10^-252 - {0xaecc49914078536d, -874, false}, // 10^-244 - {0x823c12795db6ce57, -847, false}, // 10^-236 - {0xc21094364dfb5637, -821, false}, // 10^-228 - {0x9096ea6f3848984f, -794, false}, // 10^-220 - {0xd77485cb25823ac7, -768, false}, // 10^-212 - {0xa086cfcd97bf97f4, -741, false}, // 10^-204 - {0xef340a98172aace5, -715, false}, // 10^-196 - {0xb23867fb2a35b28e, -688, false}, // 10^-188 - {0x84c8d4dfd2c63f3b, -661, false}, // 10^-180 - {0xc5dd44271ad3cdba, -635, false}, // 10^-172 - {0x936b9fcebb25c996, -608, false}, // 10^-164 - {0xdbac6c247d62a584, -582, false}, // 10^-156 - {0xa3ab66580d5fdaf6, -555, false}, // 10^-148 - {0xf3e2f893dec3f126, -529, false}, // 10^-140 - {0xb5b5ada8aaff80b8, -502, false}, // 10^-132 - {0x87625f056c7c4a8b, -475, false}, // 10^-124 - {0xc9bcff6034c13053, -449, false}, // 10^-116 - {0x964e858c91ba2655, -422, false}, // 10^-108 - {0xdff9772470297ebd, -396, false}, // 10^-100 - {0xa6dfbd9fb8e5b88f, -369, false}, // 10^-92 - {0xf8a95fcf88747d94, -343, false}, // 10^-84 - {0xb94470938fa89bcf, -316, false}, // 10^-76 - {0x8a08f0f8bf0f156b, -289, false}, // 10^-68 - {0xcdb02555653131b6, -263, false}, // 10^-60 - {0x993fe2c6d07b7fac, -236, false}, // 10^-52 - {0xe45c10c42a2b3b06, -210, false}, // 10^-44 - {0xaa242499697392d3, -183, false}, // 10^-36 - {0xfd87b5f28300ca0e, -157, false}, // 10^-28 - {0xbce5086492111aeb, -130, false}, // 10^-20 - {0x8cbccc096f5088cc, -103, false}, // 10^-12 - {0xd1b71758e219652c, -77, false}, // 10^-4 - {0x9c40000000000000, -50, false}, // 10^4 - {0xe8d4a51000000000, -24, false}, // 10^12 - {0xad78ebc5ac620000, 3, false}, // 10^20 - {0x813f3978f8940984, 30, false}, // 10^28 - {0xc097ce7bc90715b3, 56, false}, // 10^36 - {0x8f7e32ce7bea5c70, 83, false}, // 10^44 - {0xd5d238a4abe98068, 109, false}, // 10^52 - {0x9f4f2726179a2245, 136, false}, // 10^60 - {0xed63a231d4c4fb27, 162, false}, // 10^68 - {0xb0de65388cc8ada8, 189, false}, // 10^76 - {0x83c7088e1aab65db, 216, false}, // 10^84 - {0xc45d1df942711d9a, 242, false}, // 10^92 - {0x924d692ca61be758, 269, false}, // 10^100 - {0xda01ee641a708dea, 295, false}, // 10^108 - {0xa26da3999aef774a, 322, false}, // 10^116 - {0xf209787bb47d6b85, 348, false}, // 10^124 - {0xb454e4a179dd1877, 375, false}, // 10^132 - {0x865b86925b9bc5c2, 402, false}, // 10^140 - {0xc83553c5c8965d3d, 428, false}, // 10^148 - {0x952ab45cfa97a0b3, 455, false}, // 10^156 - {0xde469fbd99a05fe3, 481, false}, // 10^164 - {0xa59bc234db398c25, 508, false}, // 10^172 - {0xf6c69a72a3989f5c, 534, false}, // 10^180 - {0xb7dcbf5354e9bece, 561, false}, // 10^188 - {0x88fcf317f22241e2, 588, false}, // 10^196 - {0xcc20ce9bd35c78a5, 614, false}, // 10^204 - {0x98165af37b2153df, 641, false}, // 10^212 - {0xe2a0b5dc971f303a, 667, false}, // 10^220 - {0xa8d9d1535ce3b396, 694, false}, // 10^228 - {0xfb9b7cd9a4a7443c, 720, false}, // 10^236 - {0xbb764c4ca7a44410, 747, false}, // 10^244 - {0x8bab8eefb6409c1a, 774, false}, // 10^252 - {0xd01fef10a657842c, 800, false}, // 10^260 - {0x9b10a4e5e9913129, 827, false}, // 10^268 - {0xe7109bfba19c0c9d, 853, false}, // 10^276 - {0xac2820d9623bf429, 880, false}, // 10^284 - {0x80444b5e7aa7cf85, 907, false}, // 10^292 - {0xbf21e44003acdd2d, 933, false}, // 10^300 - {0x8e679c2f5e44ff8f, 960, false}, // 10^308 - {0xd433179d9c8cb841, 986, false}, // 10^316 - {0x9e19db92b4e31ba9, 1013, false}, // 10^324 - {0xeb96bf6ebadf77d9, 1039, false}, // 10^332 - {0xaf87023b9bf0ee6b, 1066, false}, // 10^340 -} - -// floatBits returns the bits of the float64 that best approximates -// the extFloat passed as receiver. Overflow is set to true if -// the resulting float64 is ±Inf. -func (f *extFloat) floatBits(flt *floatInfo) (bits uint64, overflow bool) { - f.Normalize() - - exp := f.exp + 63 - - // Exponent too small. - if exp < flt.bias+1 { - n := flt.bias + 1 - exp - f.mant >>= uint(n) - exp += n - } - - // Extract 1+flt.mantbits bits from the 64-bit mantissa. - mant := f.mant >> (63 - flt.mantbits) - if f.mant&(1<<(62-flt.mantbits)) != 0 { - // Round up. - mant += 1 - } - - // Rounding might have added a bit; shift down. - if mant == 2<>= 1 - exp++ - } - - // Infinities. - if exp-flt.bias >= 1<>uint(-f.exp))<>= uint(-f.exp) - f.exp = 0 - return *f, *f - } - expBiased := exp - flt.bias - - upper = extFloat{mant: 2*f.mant + 1, exp: f.exp - 1, neg: f.neg} - if mant != 1<>(64-32) == 0 { - mant <<= 32 - exp -= 32 - } - if mant>>(64-16) == 0 { - mant <<= 16 - exp -= 16 - } - if mant>>(64-8) == 0 { - mant <<= 8 - exp -= 8 - } - if mant>>(64-4) == 0 { - mant <<= 4 - exp -= 4 - } - if mant>>(64-2) == 0 { - mant <<= 2 - exp -= 2 - } - if mant>>(64-1) == 0 { - mant <<= 1 - exp -= 1 - } - shift = uint(f.exp - exp) - f.mant, f.exp = mant, exp - return -} - -// Multiply sets f to the product f*g: the result is correctly rounded, -// but not normalized. -func (f *extFloat) Multiply(g extFloat) { - fhi, flo := f.mant>>32, uint64(uint32(f.mant)) - ghi, glo := g.mant>>32, uint64(uint32(g.mant)) - - // Cross products. - cross1 := fhi * glo - cross2 := flo * ghi - - // f.mant*g.mant is fhi*ghi << 64 + (cross1+cross2) << 32 + flo*glo - f.mant = fhi*ghi + (cross1 >> 32) + (cross2 >> 32) - rem := uint64(uint32(cross1)) + uint64(uint32(cross2)) + ((flo * glo) >> 32) - // Round up. - rem += (1 << 31) - - f.mant += (rem >> 32) - f.exp = f.exp + g.exp + 64 -} - -var uint64pow10 = [...]uint64{ - 1, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, - 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, -} - -// AssignDecimal sets f to an approximate value mantissa*10^exp. It -// returns true if the value represented by f is guaranteed to be the -// best approximation of d after being rounded to a float64 or -// float32 depending on flt. -func (f *extFloat) AssignDecimal(mantissa uint64, exp10 int, neg bool, trunc bool, flt *floatInfo) (ok bool) { - const uint64digits = 19 - const errorscale = 8 - errors := 0 // An upper bound for error, computed in errorscale*ulp. - if trunc { - // the decimal number was truncated. - errors += errorscale / 2 - } - - f.mant = mantissa - f.exp = 0 - f.neg = neg - - // Multiply by powers of ten. - i := (exp10 - firstPowerOfTen) / stepPowerOfTen - if exp10 < firstPowerOfTen || i >= len(powersOfTen) { - return false - } - adjExp := (exp10 - firstPowerOfTen) % stepPowerOfTen - - // We multiply by exp%step - if adjExp < uint64digits && mantissa < uint64pow10[uint64digits-adjExp] { - // We can multiply the mantissa exactly. - f.mant *= uint64pow10[adjExp] - f.Normalize() - } else { - f.Normalize() - f.Multiply(smallPowersOfTen[adjExp]) - errors += errorscale / 2 - } - - // We multiply by 10 to the exp - exp%step. - f.Multiply(powersOfTen[i]) - if errors > 0 { - errors += 1 - } - errors += errorscale / 2 - - // Normalize - shift := f.Normalize() - errors <<= shift - - // Now f is a good approximation of the decimal. - // Check whether the error is too large: that is, if the mantissa - // is perturbated by the error, the resulting float64 will change. - // The 64 bits mantissa is 1 + 52 bits for float64 + 11 extra bits. - // - // In many cases the approximation will be good enough. - denormalExp := flt.bias - 63 - var extrabits uint - if f.exp <= denormalExp { - // f.mant * 2^f.exp is smaller than 2^(flt.bias+1). - extrabits = uint(63 - flt.mantbits + 1 + uint(denormalExp-f.exp)) - } else { - extrabits = uint(63 - flt.mantbits) - } - - halfway := uint64(1) << (extrabits - 1) - mant_extra := f.mant & (1< expMax: - i-- - default: - break Loop - } - } - // Apply the desired decimal shift on f. It will have exponent - // in the desired range. This is multiplication by 10^-exp10. - f.Multiply(powersOfTen[i]) - - return -(firstPowerOfTen + i*stepPowerOfTen), i -} - -// frexp10Many applies a common shift by a power of ten to a, b, c. -func frexp10Many(a, b, c *extFloat) (exp10 int) { - exp10, i := c.frexp10() - a.Multiply(powersOfTen[i]) - b.Multiply(powersOfTen[i]) - return -} - -// FixedDecimal stores in d the first n significant digits -// of the decimal representation of f. It returns false -// if it cannot be sure of the answer. -func (f *extFloat) FixedDecimal(d *decimalSlice, n int) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if n == 0 { - panic("strconv: internal error: extFloat.FixedDecimal called with n == 0") - } - // Multiply by an appropriate power of ten to have a reasonable - // number to process. - f.Normalize() - exp10, _ := f.frexp10() - - shift := uint(-f.exp) - integer := uint32(f.mant >> shift) - fraction := f.mant - (uint64(integer) << shift) - ε := uint64(1) // ε is the uncertainty we have on the mantissa of f. - - // Write exactly n digits to d. - needed := n // how many digits are left to write. - integerDigits := 0 // the number of decimal digits of integer. - pow10 := uint64(1) // the power of ten by which f was scaled. - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - rest := integer - if integerDigits > needed { - // the integral part is already large, trim the last digits. - pow10 = uint64pow10[integerDigits-needed] - integer /= uint32(pow10) - rest -= integer * uint32(pow10) - } else { - rest = 0 - } - - // Write the digits of integer: the digits of rest are omitted. - var buf [32]byte - pos := len(buf) - for v := integer; v > 0; { - v1 := v / 10 - v -= 10 * v1 - pos-- - buf[pos] = byte(v + '0') - v = v1 - } - for i := pos; i < len(buf); i++ { - d.d[i-pos] = buf[i] - } - nd := len(buf) - pos - d.nd = nd - d.dp = integerDigits + exp10 - needed -= nd - - if needed > 0 { - if rest != 0 || pow10 != 1 { - panic("strconv: internal error, rest != 0 but needed > 0") - } - // Emit digits for the fractional part. Each time, 10*fraction - // fits in a uint64 without overflow. - for needed > 0 { - fraction *= 10 - ε *= 10 // the uncertainty scales as we multiply by ten. - if 2*ε > 1<> shift - d.d[nd] = byte(digit + '0') - fraction -= digit << shift - nd++ - needed-- - } - d.nd = nd - } - - // We have written a truncation of f (a numerator / 10^d.dp). The remaining part - // can be interpreted as a small number (< 1) to be added to the last digit of the - // numerator. - // - // If rest > 0, the amount is: - // (rest< 0 guarantees that pow10 << shift does not overflow a uint64. - // - // If rest = 0, pow10 == 1 and the amount is - // fraction / (1 << shift) - // fraction being known with a ±ε uncertainty. - // - // We pass this information to the rounding routine for adjustment. - - ok := adjustLastDigitFixed(d, uint64(rest)<= 0; i-- { - if d.d[i] != '0' { - d.nd = i + 1 - break - } - } - return true -} - -// adjustLastDigitFixed assumes d contains the representation of the integral part -// of some number, whose fractional part is num / (den << shift). The numerator -// num is only known up to an uncertainty of size ε, assumed to be less than -// (den << shift)/2. -// -// It will increase the last digit by one to account for correct rounding, typically -// when the fractional part is greater than 1/2, and will return false if ε is such -// that no correct answer can be given. -func adjustLastDigitFixed(d *decimalSlice, num, den uint64, shift uint, ε uint64) bool { - if num > den< den< den< (den< den<= 0; i-- { - if d.d[i] == '9' { - d.nd-- - } else { - break - } - } - if i < 0 { - d.d[0] = '1' - d.nd = 1 - d.dp++ - } else { - d.d[i]++ - } - return true - } - return false -} - -// ShortestDecimal stores in d the shortest decimal representation of f -// which belongs to the open interval (lower, upper), where f is supposed -// to lie. It returns false whenever the result is unsure. The implementation -// uses the Grisu3 algorithm. -func (f *extFloat) ShortestDecimal(d *decimalSlice, lower, upper *extFloat) bool { - if f.mant == 0 { - d.nd = 0 - d.dp = 0 - d.neg = f.neg - return true - } - if f.exp == 0 && *lower == *f && *lower == *upper { - // an exact integer. - var buf [24]byte - n := len(buf) - 1 - for v := f.mant; v > 0; { - v1 := v / 10 - v -= 10 * v1 - buf[n] = byte(v + '0') - n-- - v = v1 - } - nd := len(buf) - n - 1 - for i := 0; i < nd; i++ { - d.d[i] = buf[n+1+i] - } - d.nd, d.dp = nd, nd - for d.nd > 0 && d.d[d.nd-1] == '0' { - d.nd-- - } - if d.nd == 0 { - d.dp = 0 - } - d.neg = f.neg - return true - } - upper.Normalize() - // Uniformize exponents. - if f.exp > upper.exp { - f.mant <<= uint(f.exp - upper.exp) - f.exp = upper.exp - } - if lower.exp > upper.exp { - lower.mant <<= uint(lower.exp - upper.exp) - lower.exp = upper.exp - } - - exp10 := frexp10Many(lower, f, upper) - // Take a safety margin due to rounding in frexp10Many, but we lose precision. - upper.mant++ - lower.mant-- - - // The shortest representation of f is either rounded up or down, but - // in any case, it is a truncation of upper. - shift := uint(-upper.exp) - integer := uint32(upper.mant >> shift) - fraction := upper.mant - (uint64(integer) << shift) - - // How far we can go down from upper until the result is wrong. - allowance := upper.mant - lower.mant - // How far we should go to get a very precise result. - targetDiff := upper.mant - f.mant - - // Count integral digits: there are at most 10. - var integerDigits int - for i, pow := 0, uint64(1); i < 20; i++ { - if pow > uint64(integer) { - integerDigits = i - break - } - pow *= 10 - } - for i := 0; i < integerDigits; i++ { - pow := uint64pow10[integerDigits-i-1] - digit := integer / uint32(pow) - d.d[i] = byte(digit + '0') - integer -= digit * uint32(pow) - // evaluate whether we should stop. - if currentDiff := uint64(integer)<> shift) - d.d[d.nd] = byte(digit + '0') - d.nd++ - fraction -= uint64(digit) << shift - if fraction < allowance*multiplier { - // We are in the admissible range. Note that if allowance is about to - // overflow, that is, allowance > 2^64/10, the condition is automatically - // true due to the limited range of fraction. - return adjustLastDigit(d, - fraction, targetDiff*multiplier, allowance*multiplier, - 1< maxDiff-ulpBinary { - // we went too far - return false - } - if d.nd == 1 && d.d[0] == '0' { - // the number has actually reached zero. - d.nd = 0 - d.dp = 0 - } - return true -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go b/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go deleted file mode 100644 index 253f83b45a..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/internal/ftoa.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Binary to decimal floating point conversion. -// Algorithm: -// 1) store mantissa in multiprecision decimal -// 2) shift decimal by exponent -// 3) read digits out & format - -package internal - -import "math" - -// TODO: move elsewhere? -type floatInfo struct { - mantbits uint - expbits uint - bias int -} - -var float32info = floatInfo{23, 8, -127} -var float64info = floatInfo{52, 11, -1023} - -// FormatFloat converts the floating-point number f to a string, -// according to the format fmt and precision prec. It rounds the -// result assuming that the original was obtained from a floating-point -// value of bitSize bits (32 for float32, 64 for float64). -// -// The format fmt is one of -// 'b' (-ddddp±ddd, a binary exponent), -// 'e' (-d.dddde±dd, a decimal exponent), -// 'E' (-d.ddddE±dd, a decimal exponent), -// 'f' (-ddd.dddd, no exponent), -// 'g' ('e' for large exponents, 'f' otherwise), or -// 'G' ('E' for large exponents, 'f' otherwise). -// -// The precision prec controls the number of digits -// (excluding the exponent) printed by the 'e', 'E', 'f', 'g', and 'G' formats. -// For 'e', 'E', and 'f' it is the number of digits after the decimal point. -// For 'g' and 'G' it is the total number of digits. -// The special precision -1 uses the smallest number of digits -// necessary such that ParseFloat will return f exactly. -func formatFloat(f float64, fmt byte, prec, bitSize int) string { - return string(genericFtoa(make([]byte, 0, max(prec+4, 24)), f, fmt, prec, bitSize)) -} - -// AppendFloat appends the string form of the floating-point number f, -// as generated by FormatFloat, to dst and returns the extended buffer. -func appendFloat(dst []byte, f float64, fmt byte, prec int, bitSize int) []byte { - return genericFtoa(dst, f, fmt, prec, bitSize) -} - -func genericFtoa(dst []byte, val float64, fmt byte, prec, bitSize int) []byte { - var bits uint64 - var flt *floatInfo - switch bitSize { - case 32: - bits = uint64(math.Float32bits(float32(val))) - flt = &float32info - case 64: - bits = math.Float64bits(val) - flt = &float64info - default: - panic("strconv: illegal AppendFloat/FormatFloat bitSize") - } - - neg := bits>>(flt.expbits+flt.mantbits) != 0 - exp := int(bits>>flt.mantbits) & (1< digs.nd && digs.nd >= digs.dp { - eprec = digs.nd - } - // %e is used if the exponent from the conversion - // is less than -4 or greater than or equal to the precision. - // if precision was the shortest possible, use precision 6 for this decision. - if shortest { - eprec = 6 - } - exp := digs.dp - 1 - if exp < -4 || exp >= eprec { - if prec > digs.nd { - prec = digs.nd - } - return fmtE(dst, neg, digs, prec-1, fmt+'e'-'g') - } - if prec > digs.dp { - prec = digs.nd - } - return fmtF(dst, neg, digs, max(prec-digs.dp, 0)) - } - - // unknown format - return append(dst, '%', fmt) -} - -// Round d (= mant * 2^exp) to the shortest number of digits -// that will let the original floating point value be precisely -// reconstructed. Size is original floating point size (64 or 32). -func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { - // If mantissa is zero, the number is zero; stop now. - if mant == 0 { - d.nd = 0 - return - } - - // Compute upper and lower such that any decimal number - // between upper and lower (possibly inclusive) - // will round to the original floating point number. - - // We may see at once that the number is already shortest. - // - // Suppose d is not denormal, so that 2^exp <= d < 10^dp. - // The closest shorter number is at least 10^(dp-nd) away. - // The lower/upper bounds computed below are at distance - // at most 2^(exp-mantbits). - // - // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits), - // or equivalently log2(10)*(dp-nd) > exp-mantbits. - // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32). - minexp := flt.bias + 1 // minimum possible exponent - if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) { - // The number is already shortest. - return - } - - // d = mant << (exp - mantbits) - // Next highest floating point number is mant+1 << exp-mantbits. - // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1. - upper := new(decimal) - upper.Assign(mant*2 + 1) - upper.Shift(exp - int(flt.mantbits) - 1) - - // d = mant << (exp - mantbits) - // Next lowest floating point number is mant-1 << exp-mantbits, - // unless mant-1 drops the significant bit and exp is not the minimum exp, - // in which case the next lowest is mant*2-1 << exp-mantbits-1. - // Either way, call it mantlo << explo-mantbits. - // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1. - var mantlo uint64 - var explo int - if mant > 1< 0 { - dst = append(dst, '.') - i := 1 - m := d.nd + prec + 1 - max(d.nd, prec+1) - for i < m { - dst = append(dst, d.d[i]) - i++ - } - for i <= prec { - dst = append(dst, '0') - i++ - } - } - - // e± - dst = append(dst, fmt) - exp := d.dp - 1 - if d.nd == 0 { // special case: 0 has exponent 0 - exp = 0 - } - if exp < 0 { - ch = '-' - exp = -exp - } else { - ch = '+' - } - dst = append(dst, ch) - - // dddd - var buf [3]byte - i := len(buf) - for exp >= 10 { - i-- - buf[i] = byte(exp%10 + '0') - exp /= 10 - } - // exp < 10 - i-- - buf[i] = byte(exp + '0') - - switch i { - case 0: - dst = append(dst, buf[0], buf[1], buf[2]) - case 1: - dst = append(dst, buf[1], buf[2]) - case 2: - // leading zeroes - dst = append(dst, '0', buf[2]) - } - return dst -} - -// %f: -ddddddd.ddddd -func fmtF(dst []byte, neg bool, d decimalSlice, prec int) []byte { - // sign - if neg { - dst = append(dst, '-') - } - - // integer, padded with zeros as needed. - if d.dp > 0 { - var i int - for i = 0; i < d.dp && i < d.nd; i++ { - dst = append(dst, d.d[i]) - } - for ; i < d.dp; i++ { - dst = append(dst, '0') - } - } else { - dst = append(dst, '0') - } - - // fraction - if prec > 0 { - dst = append(dst, '.') - for i := 0; i < prec; i++ { - ch := byte('0') - if j := d.dp + i; 0 <= j && j < d.nd { - ch = d.d[j] - } - dst = append(dst, ch) - } - } - - return dst -} - -// %b: -ddddddddp+ddd -func fmtB(dst []byte, neg bool, mant uint64, exp int, flt *floatInfo) []byte { - var buf [50]byte - w := len(buf) - exp -= int(flt.mantbits) - esign := byte('+') - if exp < 0 { - esign = '-' - exp = -exp - } - n := 0 - for exp > 0 || n < 1 { - n++ - w-- - buf[w] = byte(exp%10 + '0') - exp /= 10 - } - w-- - buf[w] = esign - w-- - buf[w] = 'p' - n = 0 - for mant > 0 || n < 1 { - n++ - w-- - buf[w] = byte(mant%10 + '0') - mant /= 10 - } - if neg { - w-- - buf[w] = '-' - } - return append(dst, buf[w:]...) -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go b/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go deleted file mode 100644 index 3e50f0c418..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/iota.go +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's strconv/iota.go */ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "io" -) - -const ( - digits = "0123456789abcdefghijklmnopqrstuvwxyz" - digits01 = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" - digits10 = "0000000000111111111122222222223333333333444444444455555555556666666666777777777788888888889999999999" -) - -var shifts = [len(digits) + 1]uint{ - 1 << 1: 1, - 1 << 2: 2, - 1 << 3: 3, - 1 << 4: 4, - 1 << 5: 5, -} - -var smallNumbers = [][]byte{ - []byte("0"), - []byte("1"), - []byte("2"), - []byte("3"), - []byte("4"), - []byte("5"), - []byte("6"), - []byte("7"), - []byte("8"), - []byte("9"), - []byte("10"), -} - -type FormatBitsWriter interface { - io.Writer - io.ByteWriter -} - -type FormatBitsScratch struct{} - -// -// DEPRECIATED: `scratch` is no longer used, FormatBits2 is available. -// -// FormatBits computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. If append_ is -// set, the string is appended to dst and the resulting byte slice is -// returned as the first result value; otherwise the string is returned -// as the second result value. -// -func FormatBits(scratch *FormatBitsScratch, dst FormatBitsWriter, u uint64, base int, neg bool) { - FormatBits2(dst, u, base, neg) -} - -// FormatBits2 computes the string representation of u in the given base. -// If neg is set, u is treated as negative int64 value. If append_ is -// set, the string is appended to dst and the resulting byte slice is -// returned as the first result value; otherwise the string is returned -// as the second result value. -// -func FormatBits2(dst FormatBitsWriter, u uint64, base int, neg bool) { - if base < 2 || base > len(digits) { - panic("strconv: illegal AppendInt/FormatInt base") - } - // fast path for small common numbers - if u <= 10 { - if neg { - dst.WriteByte('-') - } - dst.Write(smallNumbers[u]) - return - } - - // 2 <= base && base <= len(digits) - - var a = makeSlice(65) - // var a [64 + 1]byte // +1 for sign of 64bit value in base 2 - i := len(a) - - if neg { - u = -u - } - - // convert bits - if base == 10 { - // common case: use constants for / and % because - // the compiler can optimize it into a multiply+shift, - // and unroll loop - for u >= 100 { - i -= 2 - q := u / 100 - j := uintptr(u - q*100) - a[i+1] = digits01[j] - a[i+0] = digits10[j] - u = q - } - if u >= 10 { - i-- - q := u / 10 - a[i] = digits[uintptr(u-q*10)] - u = q - } - - } else if s := shifts[base]; s > 0 { - // base is power of 2: use shifts and masks instead of / and % - b := uint64(base) - m := uintptr(b) - 1 // == 1<= b { - i-- - a[i] = digits[uintptr(u)&m] - u >>= s - } - - } else { - // general case - b := uint64(base) - for u >= b { - i-- - a[i] = digits[uintptr(u%b)] - u /= b - } - } - - // u < base - i-- - a[i] = digits[uintptr(u)] - - // add sign, if any - if neg { - i-- - a[i] = '-' - } - - dst.Write(a[i:]) - - Pool(a) - - return -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go b/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go deleted file mode 100644 index 513b45d570..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/jsonstring.go +++ /dev/null @@ -1,512 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on Go stdlib's encoding/json/encode.go */ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package v1 - -import ( - "io" - "unicode/utf8" - "strconv" - "unicode/utf16" - "unicode" -) - -const hex = "0123456789abcdef" - -type JsonStringWriter interface { - io.Writer - io.ByteWriter - stringWriter -} - -func WriteJsonString(buf JsonStringWriter, s string) { - WriteJson(buf, []byte(s)) -} - -/** - * Function ported from encoding/json: func (e *encodeState) string(s string) (int, error) - */ -func WriteJson(buf JsonStringWriter, s []byte) { - buf.WriteByte('"') - start := 0 - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - /* - if 0x20 <= b && b != '\\' && b != '"' && b != '<' && b != '>' && b != '&' { - i++ - continue - } - */ - if lt[b] == true { - i++ - continue - } - - if start < i { - buf.Write(s[start:i]) - } - switch b { - case '\\', '"': - buf.WriteByte('\\') - buf.WriteByte(b) - case '\n': - buf.WriteByte('\\') - buf.WriteByte('n') - case '\r': - buf.WriteByte('\\') - buf.WriteByte('r') - default: - // This encodes bytes < 0x20 except for \n and \r, - // as well as < and >. The latter are escaped because they - // can lead to security holes when user-controlled strings - // are rendered into JSON and served to some browsers. - buf.WriteString(`\u00`) - buf.WriteByte(hex[b>>4]) - buf.WriteByte(hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRune(s[i:]) - if c == utf8.RuneError && size == 1 { - if start < i { - buf.Write(s[start:i]) - } - buf.WriteString(`\ufffd`) - i += size - start = i - continue - } - // U+2028 is LINE SEPARATOR. - // U+2029 is PARAGRAPH SEPARATOR. - // They are both technically valid characters in JSON strings, - // but don't work in JSONP, which has to be evaluated as JavaScript, - // and can lead to security holes there. It is valid JSON to - // escape them, so we do so unconditionally. - // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. - if c == '\u2028' || c == '\u2029' { - if start < i { - buf.Write(s[start:i]) - } - buf.WriteString(`\u202`) - buf.WriteByte(hex[c&0xF]) - i += size - start = i - continue - } - i += size - } - if start < len(s) { - buf.Write(s[start:]) - } - buf.WriteByte('"') -} - -// UnquoteBytes will decode []byte containing json string to go string -// ported from encoding/json/decode.go -func UnquoteBytes(s []byte) (t []byte, ok bool) { - if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { - return - } - s = s[1 : len(s)-1] - - // Check for unusual characters. If there are none, - // then no unquoting is needed, so return a slice of the - // original bytes. - r := 0 - for r < len(s) { - c := s[r] - if c == '\\' || c == '"' || c < ' ' { - break - } - if c < utf8.RuneSelf { - r++ - continue - } - rr, size := utf8.DecodeRune(s[r:]) - if rr == utf8.RuneError && size == 1 { - break - } - r += size - } - if r == len(s) { - return s, true - } - - b := make([]byte, len(s)+2*utf8.UTFMax) - w := copy(b, s[0:r]) - for r < len(s) { - // Out of room? Can only happen if s is full of - // malformed UTF-8 and we're replacing each - // byte with RuneError. - if w >= len(b)-2*utf8.UTFMax { - nb := make([]byte, (len(b)+utf8.UTFMax)*2) - copy(nb, b[0:w]) - b = nb - } - switch c := s[r]; { - case c == '\\': - r++ - if r >= len(s) { - return - } - switch s[r] { - default: - return - case '"', '\\', '/', '\'': - b[w] = s[r] - r++ - w++ - case 'b': - b[w] = '\b' - r++ - w++ - case 'f': - b[w] = '\f' - r++ - w++ - case 'n': - b[w] = '\n' - r++ - w++ - case 'r': - b[w] = '\r' - r++ - w++ - case 't': - b[w] = '\t' - r++ - w++ - case 'u': - r-- - rr := getu4(s[r:]) - if rr < 0 { - return - } - r += 6 - if utf16.IsSurrogate(rr) { - rr1 := getu4(s[r:]) - if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { - // A valid pair; consume. - r += 6 - w += utf8.EncodeRune(b[w:], dec) - break - } - // Invalid surrogate; fall back to replacement rune. - rr = unicode.ReplacementChar - } - w += utf8.EncodeRune(b[w:], rr) - } - - // Quote, control characters are invalid. - case c == '"', c < ' ': - return - - // ASCII - case c < utf8.RuneSelf: - b[w] = c - r++ - w++ - - // Coerce to well-formed UTF-8. - default: - rr, size := utf8.DecodeRune(s[r:]) - r += size - w += utf8.EncodeRune(b[w:], rr) - } - } - return b[0:w], true -} - -// getu4 decodes \uXXXX from the beginning of s, returning the hex value, -// or it returns -1. -func getu4(s []byte) rune { - if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { - return -1 - } - r, err := strconv.ParseUint(string(s[2:6]), 16, 64) - if err != nil { - return -1 - } - return rune(r) -} - -// TODO(pquerna): consider combining wibth the normal byte mask. -var lt [256]bool = [256]bool{ - false, /* 0 */ - false, /* 1 */ - false, /* 2 */ - false, /* 3 */ - false, /* 4 */ - false, /* 5 */ - false, /* 6 */ - false, /* 7 */ - false, /* 8 */ - false, /* 9 */ - false, /* 10 */ - false, /* 11 */ - false, /* 12 */ - false, /* 13 */ - false, /* 14 */ - false, /* 15 */ - false, /* 16 */ - false, /* 17 */ - false, /* 18 */ - false, /* 19 */ - false, /* 20 */ - false, /* 21 */ - false, /* 22 */ - false, /* 23 */ - false, /* 24 */ - false, /* 25 */ - false, /* 26 */ - false, /* 27 */ - false, /* 28 */ - false, /* 29 */ - false, /* 30 */ - false, /* 31 */ - true, /* 32 */ - true, /* 33 */ - false, /* 34 */ - true, /* 35 */ - true, /* 36 */ - true, /* 37 */ - false, /* 38 */ - true, /* 39 */ - true, /* 40 */ - true, /* 41 */ - true, /* 42 */ - true, /* 43 */ - true, /* 44 */ - true, /* 45 */ - true, /* 46 */ - true, /* 47 */ - true, /* 48 */ - true, /* 49 */ - true, /* 50 */ - true, /* 51 */ - true, /* 52 */ - true, /* 53 */ - true, /* 54 */ - true, /* 55 */ - true, /* 56 */ - true, /* 57 */ - true, /* 58 */ - true, /* 59 */ - false, /* 60 */ - true, /* 61 */ - false, /* 62 */ - true, /* 63 */ - true, /* 64 */ - true, /* 65 */ - true, /* 66 */ - true, /* 67 */ - true, /* 68 */ - true, /* 69 */ - true, /* 70 */ - true, /* 71 */ - true, /* 72 */ - true, /* 73 */ - true, /* 74 */ - true, /* 75 */ - true, /* 76 */ - true, /* 77 */ - true, /* 78 */ - true, /* 79 */ - true, /* 80 */ - true, /* 81 */ - true, /* 82 */ - true, /* 83 */ - true, /* 84 */ - true, /* 85 */ - true, /* 86 */ - true, /* 87 */ - true, /* 88 */ - true, /* 89 */ - true, /* 90 */ - true, /* 91 */ - false, /* 92 */ - true, /* 93 */ - true, /* 94 */ - true, /* 95 */ - true, /* 96 */ - true, /* 97 */ - true, /* 98 */ - true, /* 99 */ - true, /* 100 */ - true, /* 101 */ - true, /* 102 */ - true, /* 103 */ - true, /* 104 */ - true, /* 105 */ - true, /* 106 */ - true, /* 107 */ - true, /* 108 */ - true, /* 109 */ - true, /* 110 */ - true, /* 111 */ - true, /* 112 */ - true, /* 113 */ - true, /* 114 */ - true, /* 115 */ - true, /* 116 */ - true, /* 117 */ - true, /* 118 */ - true, /* 119 */ - true, /* 120 */ - true, /* 121 */ - true, /* 122 */ - true, /* 123 */ - true, /* 124 */ - true, /* 125 */ - true, /* 126 */ - true, /* 127 */ - true, /* 128 */ - true, /* 129 */ - true, /* 130 */ - true, /* 131 */ - true, /* 132 */ - true, /* 133 */ - true, /* 134 */ - true, /* 135 */ - true, /* 136 */ - true, /* 137 */ - true, /* 138 */ - true, /* 139 */ - true, /* 140 */ - true, /* 141 */ - true, /* 142 */ - true, /* 143 */ - true, /* 144 */ - true, /* 145 */ - true, /* 146 */ - true, /* 147 */ - true, /* 148 */ - true, /* 149 */ - true, /* 150 */ - true, /* 151 */ - true, /* 152 */ - true, /* 153 */ - true, /* 154 */ - true, /* 155 */ - true, /* 156 */ - true, /* 157 */ - true, /* 158 */ - true, /* 159 */ - true, /* 160 */ - true, /* 161 */ - true, /* 162 */ - true, /* 163 */ - true, /* 164 */ - true, /* 165 */ - true, /* 166 */ - true, /* 167 */ - true, /* 168 */ - true, /* 169 */ - true, /* 170 */ - true, /* 171 */ - true, /* 172 */ - true, /* 173 */ - true, /* 174 */ - true, /* 175 */ - true, /* 176 */ - true, /* 177 */ - true, /* 178 */ - true, /* 179 */ - true, /* 180 */ - true, /* 181 */ - true, /* 182 */ - true, /* 183 */ - true, /* 184 */ - true, /* 185 */ - true, /* 186 */ - true, /* 187 */ - true, /* 188 */ - true, /* 189 */ - true, /* 190 */ - true, /* 191 */ - true, /* 192 */ - true, /* 193 */ - true, /* 194 */ - true, /* 195 */ - true, /* 196 */ - true, /* 197 */ - true, /* 198 */ - true, /* 199 */ - true, /* 200 */ - true, /* 201 */ - true, /* 202 */ - true, /* 203 */ - true, /* 204 */ - true, /* 205 */ - true, /* 206 */ - true, /* 207 */ - true, /* 208 */ - true, /* 209 */ - true, /* 210 */ - true, /* 211 */ - true, /* 212 */ - true, /* 213 */ - true, /* 214 */ - true, /* 215 */ - true, /* 216 */ - true, /* 217 */ - true, /* 218 */ - true, /* 219 */ - true, /* 220 */ - true, /* 221 */ - true, /* 222 */ - true, /* 223 */ - true, /* 224 */ - true, /* 225 */ - true, /* 226 */ - true, /* 227 */ - true, /* 228 */ - true, /* 229 */ - true, /* 230 */ - true, /* 231 */ - true, /* 232 */ - true, /* 233 */ - true, /* 234 */ - true, /* 235 */ - true, /* 236 */ - true, /* 237 */ - true, /* 238 */ - true, /* 239 */ - true, /* 240 */ - true, /* 241 */ - true, /* 242 */ - true, /* 243 */ - true, /* 244 */ - true, /* 245 */ - true, /* 246 */ - true, /* 247 */ - true, /* 248 */ - true, /* 249 */ - true, /* 250 */ - true, /* 251 */ - true, /* 252 */ - true, /* 253 */ - true, /* 254 */ - true, /* 255 */ -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go b/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go deleted file mode 100644 index 5589292ff2..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/lexer.go +++ /dev/null @@ -1,937 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/* Portions of this file are on derived from yajl: */ -/* - * Copyright (c) 2007-2014, Lloyd Hilaiel - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package v1 - -import ( - "errors" - "fmt" - "io" -) - -type FFParseState int - -const ( - FFParse_map_start FFParseState = iota - FFParse_want_key - FFParse_want_colon - FFParse_want_value - FFParse_after_value -) - -type FFTok int - -const ( - FFTok_init FFTok = iota - FFTok_bool FFTok = iota - FFTok_colon FFTok = iota - FFTok_comma FFTok = iota - FFTok_eof FFTok = iota - FFTok_error FFTok = iota - FFTok_left_brace FFTok = iota - FFTok_left_bracket FFTok = iota - FFTok_null FFTok = iota - FFTok_right_brace FFTok = iota - FFTok_right_bracket FFTok = iota - - /* we differentiate between integers and doubles to allow the - * parser to interpret the number without re-scanning */ - FFTok_integer FFTok = iota - FFTok_double FFTok = iota - - FFTok_string FFTok = iota - - /* comment tokens are not currently returned to the parser, ever */ - FFTok_comment FFTok = iota -) - -type FFErr int - -const ( - FFErr_e_ok FFErr = iota - FFErr_io FFErr = iota - FFErr_string_invalid_utf8 FFErr = iota - FFErr_string_invalid_escaped_char FFErr = iota - FFErr_string_invalid_json_char FFErr = iota - FFErr_string_invalid_hex_char FFErr = iota - FFErr_invalid_char FFErr = iota - FFErr_invalid_string FFErr = iota - FFErr_missing_integer_after_decimal FFErr = iota - FFErr_missing_integer_after_exponent FFErr = iota - FFErr_missing_integer_after_minus FFErr = iota - FFErr_unallowed_comment FFErr = iota - FFErr_incomplete_comment FFErr = iota - FFErr_unexpected_token_type FFErr = iota // TODO: improve this error -) - -type FFLexer struct { - reader *ffReader - Output DecodingBuffer - Token FFTok - Error FFErr - BigError error - // TODO: convert all of this to an interface - lastCurrentChar int - captureAll bool - buf Buffer -} - -func NewFFLexer(input []byte) *FFLexer { - fl := &FFLexer{ - Token: FFTok_init, - Error: FFErr_e_ok, - reader: newffReader(input), - Output: &Buffer{}, - } - // TODO: guess size? - //fl.Output.Grow(64) - return fl -} - -type LexerError struct { - offset int - line int - char int - err error -} - -// Reset the Lexer and add new input. -func (ffl *FFLexer) Reset(input []byte) { - ffl.Token = FFTok_init - ffl.Error = FFErr_e_ok - ffl.BigError = nil - ffl.reader.Reset(input) - ffl.lastCurrentChar = 0 - ffl.Output.Reset() -} - -func (le *LexerError) Error() string { - return fmt.Sprintf(`ffjson error: (%T)%s offset=%d line=%d char=%d`, - le.err, le.err.Error(), - le.offset, le.line, le.char) -} - -func (ffl *FFLexer) WrapErr(err error) error { - line, char := ffl.reader.PosWithLine() - // TOOD: calcualte lines/characters based on offset - return &LexerError{ - offset: ffl.reader.Pos(), - line: line, - char: char, - err: err, - } -} - -func (ffl *FFLexer) scanReadByte() (byte, error) { - var c byte - var err error - if ffl.captureAll { - c, err = ffl.reader.ReadByte() - } else { - c, err = ffl.reader.ReadByteNoWS() - } - - if err != nil { - ffl.Error = FFErr_io - ffl.BigError = err - return 0, err - } - - return c, nil -} - -func (ffl *FFLexer) readByte() (byte, error) { - - c, err := ffl.reader.ReadByte() - if err != nil { - ffl.Error = FFErr_io - ffl.BigError = err - return 0, err - } - - return c, nil -} - -func (ffl *FFLexer) unreadByte() { - ffl.reader.UnreadByte() -} - -func (ffl *FFLexer) wantBytes(want []byte, iftrue FFTok) FFTok { - startPos := ffl.reader.Pos() - for _, b := range want { - c, err := ffl.readByte() - - if err != nil { - return FFTok_error - } - - if c != b { - ffl.unreadByte() - // fmt.Printf("wanted bytes: %s\n", string(want)) - // TODO(pquerna): thsi is a bad error message - ffl.Error = FFErr_invalid_string - return FFTok_error - } - } - - endPos := ffl.reader.Pos() - ffl.Output.Write(ffl.reader.Slice(startPos, endPos)) - return iftrue -} - -func (ffl *FFLexer) lexComment() FFTok { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '/' { - // a // comment, scan until line ends. - for { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '\n' { - return FFTok_comment - } - } - } else if c == '*' { - // a /* */ comment, scan */ - for { - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - if c == '*' { - c, err := ffl.readByte() - - if err != nil { - return FFTok_error - } - - if c == '/' { - return FFTok_comment - } - - ffl.Error = FFErr_incomplete_comment - return FFTok_error - } - } - } else { - ffl.Error = FFErr_incomplete_comment - return FFTok_error - } -} - -func (ffl *FFLexer) lexString() FFTok { - if ffl.captureAll { - ffl.buf.Reset() - err := ffl.reader.SliceString(&ffl.buf) - - if err != nil { - ffl.BigError = err - return FFTok_error - } - - WriteJson(ffl.Output, ffl.buf.Bytes()) - - return FFTok_string - } else { - err := ffl.reader.SliceString(ffl.Output) - - if err != nil { - ffl.BigError = err - return FFTok_error - } - - return FFTok_string - } -} - -func (ffl *FFLexer) lexNumber() FFTok { - var numRead int = 0 - tok := FFTok_integer - startPos := ffl.reader.Pos() - - c, err := ffl.readByte() - if err != nil { - return FFTok_error - } - - /* optional leading minus */ - if c == '-' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - /* a single zero, or a series of integers */ - if c == '0' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } else if c >= '1' && c <= '9' { - for c >= '0' && c <= '9' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - } else { - ffl.unreadByte() - ffl.Error = FFErr_missing_integer_after_minus - return FFTok_error - } - - if c == '.' { - numRead = 0 - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - - for c >= '0' && c <= '9' { - numRead++ - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - if numRead == 0 { - ffl.unreadByte() - - ffl.Error = FFErr_missing_integer_after_decimal - return FFTok_error - } - - tok = FFTok_double - } - - /* optional exponent (indicates this is floating point) */ - if c == 'e' || c == 'E' { - numRead = 0 - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - - /* optional sign */ - if c == '+' || c == '-' { - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - for c >= '0' && c <= '9' { - numRead++ - c, err = ffl.readByte() - if err != nil { - return FFTok_error - } - } - - if numRead == 0 { - ffl.Error = FFErr_missing_integer_after_exponent - return FFTok_error - } - - tok = FFTok_double - } - - ffl.unreadByte() - - endPos := ffl.reader.Pos() - ffl.Output.Write(ffl.reader.Slice(startPos, endPos)) - return tok -} - -var true_bytes = []byte{'r', 'u', 'e'} -var false_bytes = []byte{'a', 'l', 's', 'e'} -var null_bytes = []byte{'u', 'l', 'l'} - -func (ffl *FFLexer) Scan() FFTok { - tok := FFTok_error - if ffl.captureAll == false { - ffl.Output.Reset() - } - ffl.Token = FFTok_init - - for { - c, err := ffl.scanReadByte() - if err != nil { - if err == io.EOF { - return FFTok_eof - } else { - return FFTok_error - } - } - - switch c { - case '{': - tok = FFTok_left_bracket - if ffl.captureAll { - ffl.Output.WriteByte('{') - } - goto lexed - case '}': - tok = FFTok_right_bracket - if ffl.captureAll { - ffl.Output.WriteByte('}') - } - goto lexed - case '[': - tok = FFTok_left_brace - if ffl.captureAll { - ffl.Output.WriteByte('[') - } - goto lexed - case ']': - tok = FFTok_right_brace - if ffl.captureAll { - ffl.Output.WriteByte(']') - } - goto lexed - case ',': - tok = FFTok_comma - if ffl.captureAll { - ffl.Output.WriteByte(',') - } - goto lexed - case ':': - tok = FFTok_colon - if ffl.captureAll { - ffl.Output.WriteByte(':') - } - goto lexed - case '\t', '\n', '\v', '\f', '\r', ' ': - if ffl.captureAll { - ffl.Output.WriteByte(c) - } - case 't': - ffl.Output.WriteByte('t') - tok = ffl.wantBytes(true_bytes, FFTok_bool) - goto lexed - case 'f': - ffl.Output.WriteByte('f') - tok = ffl.wantBytes(false_bytes, FFTok_bool) - goto lexed - case 'n': - ffl.Output.WriteByte('n') - tok = ffl.wantBytes(null_bytes, FFTok_null) - goto lexed - case '"': - tok = ffl.lexString() - goto lexed - case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': - ffl.unreadByte() - tok = ffl.lexNumber() - goto lexed - case '/': - tok = ffl.lexComment() - goto lexed - default: - tok = FFTok_error - ffl.Error = FFErr_invalid_char - goto lexed - } - } - -lexed: - ffl.Token = tok - return tok -} - -func (ffl *FFLexer) scanField(start FFTok, capture bool) ([]byte, error) { - switch start { - case FFTok_left_brace, - FFTok_left_bracket: - { - end := FFTok_right_brace - if start == FFTok_left_bracket { - end = FFTok_right_bracket - if capture { - ffl.Output.WriteByte('{') - } - } else { - if capture { - ffl.Output.WriteByte('[') - } - } - - depth := 1 - if capture { - ffl.captureAll = true - } - // TODO: work. - scanloop: - for { - tok := ffl.Scan() - //fmt.Printf("capture-token: %v end: %v depth: %v\n", tok, end, depth) - switch tok { - case FFTok_eof: - return nil, errors.New("ffjson: unexpected EOF") - case FFTok_error: - if ffl.BigError != nil { - return nil, ffl.BigError - } - return nil, ffl.Error.ToError() - case end: - depth-- - if depth == 0 { - break scanloop - } - case start: - depth++ - } - } - - if capture { - ffl.captureAll = false - } - - if capture { - return ffl.Output.Bytes(), nil - } else { - return nil, nil - } - } - case FFTok_bool, - FFTok_integer, - FFTok_null, - FFTok_double: - // simple value, return it. - if capture { - return ffl.Output.Bytes(), nil - } else { - return nil, nil - } - - case FFTok_string: - //TODO(pquerna): so, other users expect this to be a quoted string :( - if capture { - ffl.buf.Reset() - WriteJson(&ffl.buf, ffl.Output.Bytes()) - return ffl.buf.Bytes(), nil - } else { - return nil, nil - } - } - - return nil, fmt.Errorf("ffjson: invalid capture type: %v", start) -} - -// Captures an entire field value, including recursive objects, -// and converts them to a []byte suitable to pass to a sub-object's -// UnmarshalJSON -func (ffl *FFLexer) CaptureField(start FFTok) ([]byte, error) { - return ffl.scanField(start, true) -} - -func (ffl *FFLexer) SkipField(start FFTok) error { - _, err := ffl.scanField(start, false) - return err -} - -// TODO(pquerna): return line number and offset. -func (err FFErr) ToError() error { - switch err { - case FFErr_e_ok: - return nil - case FFErr_io: - return errors.New("ffjson: IO error") - case FFErr_string_invalid_utf8: - return errors.New("ffjson: string with invalid UTF-8 sequence") - case FFErr_string_invalid_escaped_char: - return errors.New("ffjson: string with invalid escaped character") - case FFErr_string_invalid_json_char: - return errors.New("ffjson: string with invalid JSON character") - case FFErr_string_invalid_hex_char: - return errors.New("ffjson: string with invalid hex character") - case FFErr_invalid_char: - return errors.New("ffjson: invalid character") - case FFErr_invalid_string: - return errors.New("ffjson: invalid string") - case FFErr_missing_integer_after_decimal: - return errors.New("ffjson: missing integer after decimal") - case FFErr_missing_integer_after_exponent: - return errors.New("ffjson: missing integer after exponent") - case FFErr_missing_integer_after_minus: - return errors.New("ffjson: missing integer after minus") - case FFErr_unallowed_comment: - return errors.New("ffjson: unallowed comment") - case FFErr_incomplete_comment: - return errors.New("ffjson: incomplete comment") - case FFErr_unexpected_token_type: - return errors.New("ffjson: unexpected token sequence") - } - - panic(fmt.Sprintf("unknown error type: %v ", err)) -} - -func (state FFParseState) String() string { - switch state { - case FFParse_map_start: - return "map:start" - case FFParse_want_key: - return "want_key" - case FFParse_want_colon: - return "want_colon" - case FFParse_want_value: - return "want_value" - case FFParse_after_value: - return "after_value" - } - - panic(fmt.Sprintf("unknown parse state: %d", int(state))) -} - -func (tok FFTok) String() string { - switch tok { - case FFTok_init: - return "tok:init" - case FFTok_bool: - return "tok:bool" - case FFTok_colon: - return "tok:colon" - case FFTok_comma: - return "tok:comma" - case FFTok_eof: - return "tok:eof" - case FFTok_error: - return "tok:error" - case FFTok_left_brace: - return "tok:left_brace" - case FFTok_left_bracket: - return "tok:left_bracket" - case FFTok_null: - return "tok:null" - case FFTok_right_brace: - return "tok:right_brace" - case FFTok_right_bracket: - return "tok:right_bracket" - case FFTok_integer: - return "tok:integer" - case FFTok_double: - return "tok:double" - case FFTok_string: - return "tok:string" - case FFTok_comment: - return "comment" - } - - panic(fmt.Sprintf("unknown token: %d", int(tok))) -} - -/* a lookup table which lets us quickly determine three things: - * cVEC - valid escaped control char - * note. the solidus '/' may be escaped or not. - * cIJC - invalid json char - * cVHC - valid hex char - * cNFP - needs further processing (from a string scanning perspective) - * cNUC - needs utf8 checking when enabled (from a string scanning perspective) - */ - -const ( - cVEC int8 = 0x01 - cIJC int8 = 0x02 - cVHC int8 = 0x04 - cNFP int8 = 0x08 - cNUC int8 = 0x10 -) - -var byteLookupTable [256]int8 = [256]int8{ - cIJC, /* 0 */ - cIJC, /* 1 */ - cIJC, /* 2 */ - cIJC, /* 3 */ - cIJC, /* 4 */ - cIJC, /* 5 */ - cIJC, /* 6 */ - cIJC, /* 7 */ - cIJC, /* 8 */ - cIJC, /* 9 */ - cIJC, /* 10 */ - cIJC, /* 11 */ - cIJC, /* 12 */ - cIJC, /* 13 */ - cIJC, /* 14 */ - cIJC, /* 15 */ - cIJC, /* 16 */ - cIJC, /* 17 */ - cIJC, /* 18 */ - cIJC, /* 19 */ - cIJC, /* 20 */ - cIJC, /* 21 */ - cIJC, /* 22 */ - cIJC, /* 23 */ - cIJC, /* 24 */ - cIJC, /* 25 */ - cIJC, /* 26 */ - cIJC, /* 27 */ - cIJC, /* 28 */ - cIJC, /* 29 */ - cIJC, /* 30 */ - cIJC, /* 31 */ - 0, /* 32 */ - 0, /* 33 */ - cVEC | cIJC | cNFP, /* 34 */ - 0, /* 35 */ - 0, /* 36 */ - 0, /* 37 */ - 0, /* 38 */ - 0, /* 39 */ - 0, /* 40 */ - 0, /* 41 */ - 0, /* 42 */ - 0, /* 43 */ - 0, /* 44 */ - 0, /* 45 */ - 0, /* 46 */ - cVEC, /* 47 */ - cVHC, /* 48 */ - cVHC, /* 49 */ - cVHC, /* 50 */ - cVHC, /* 51 */ - cVHC, /* 52 */ - cVHC, /* 53 */ - cVHC, /* 54 */ - cVHC, /* 55 */ - cVHC, /* 56 */ - cVHC, /* 57 */ - 0, /* 58 */ - 0, /* 59 */ - 0, /* 60 */ - 0, /* 61 */ - 0, /* 62 */ - 0, /* 63 */ - 0, /* 64 */ - cVHC, /* 65 */ - cVHC, /* 66 */ - cVHC, /* 67 */ - cVHC, /* 68 */ - cVHC, /* 69 */ - cVHC, /* 70 */ - 0, /* 71 */ - 0, /* 72 */ - 0, /* 73 */ - 0, /* 74 */ - 0, /* 75 */ - 0, /* 76 */ - 0, /* 77 */ - 0, /* 78 */ - 0, /* 79 */ - 0, /* 80 */ - 0, /* 81 */ - 0, /* 82 */ - 0, /* 83 */ - 0, /* 84 */ - 0, /* 85 */ - 0, /* 86 */ - 0, /* 87 */ - 0, /* 88 */ - 0, /* 89 */ - 0, /* 90 */ - 0, /* 91 */ - cVEC | cIJC | cNFP, /* 92 */ - 0, /* 93 */ - 0, /* 94 */ - 0, /* 95 */ - 0, /* 96 */ - cVHC, /* 97 */ - cVEC | cVHC, /* 98 */ - cVHC, /* 99 */ - cVHC, /* 100 */ - cVHC, /* 101 */ - cVEC | cVHC, /* 102 */ - 0, /* 103 */ - 0, /* 104 */ - 0, /* 105 */ - 0, /* 106 */ - 0, /* 107 */ - 0, /* 108 */ - 0, /* 109 */ - cVEC, /* 110 */ - 0, /* 111 */ - 0, /* 112 */ - 0, /* 113 */ - cVEC, /* 114 */ - 0, /* 115 */ - cVEC, /* 116 */ - 0, /* 117 */ - 0, /* 118 */ - 0, /* 119 */ - 0, /* 120 */ - 0, /* 121 */ - 0, /* 122 */ - 0, /* 123 */ - 0, /* 124 */ - 0, /* 125 */ - 0, /* 126 */ - 0, /* 127 */ - cNUC, /* 128 */ - cNUC, /* 129 */ - cNUC, /* 130 */ - cNUC, /* 131 */ - cNUC, /* 132 */ - cNUC, /* 133 */ - cNUC, /* 134 */ - cNUC, /* 135 */ - cNUC, /* 136 */ - cNUC, /* 137 */ - cNUC, /* 138 */ - cNUC, /* 139 */ - cNUC, /* 140 */ - cNUC, /* 141 */ - cNUC, /* 142 */ - cNUC, /* 143 */ - cNUC, /* 144 */ - cNUC, /* 145 */ - cNUC, /* 146 */ - cNUC, /* 147 */ - cNUC, /* 148 */ - cNUC, /* 149 */ - cNUC, /* 150 */ - cNUC, /* 151 */ - cNUC, /* 152 */ - cNUC, /* 153 */ - cNUC, /* 154 */ - cNUC, /* 155 */ - cNUC, /* 156 */ - cNUC, /* 157 */ - cNUC, /* 158 */ - cNUC, /* 159 */ - cNUC, /* 160 */ - cNUC, /* 161 */ - cNUC, /* 162 */ - cNUC, /* 163 */ - cNUC, /* 164 */ - cNUC, /* 165 */ - cNUC, /* 166 */ - cNUC, /* 167 */ - cNUC, /* 168 */ - cNUC, /* 169 */ - cNUC, /* 170 */ - cNUC, /* 171 */ - cNUC, /* 172 */ - cNUC, /* 173 */ - cNUC, /* 174 */ - cNUC, /* 175 */ - cNUC, /* 176 */ - cNUC, /* 177 */ - cNUC, /* 178 */ - cNUC, /* 179 */ - cNUC, /* 180 */ - cNUC, /* 181 */ - cNUC, /* 182 */ - cNUC, /* 183 */ - cNUC, /* 184 */ - cNUC, /* 185 */ - cNUC, /* 186 */ - cNUC, /* 187 */ - cNUC, /* 188 */ - cNUC, /* 189 */ - cNUC, /* 190 */ - cNUC, /* 191 */ - cNUC, /* 192 */ - cNUC, /* 193 */ - cNUC, /* 194 */ - cNUC, /* 195 */ - cNUC, /* 196 */ - cNUC, /* 197 */ - cNUC, /* 198 */ - cNUC, /* 199 */ - cNUC, /* 200 */ - cNUC, /* 201 */ - cNUC, /* 202 */ - cNUC, /* 203 */ - cNUC, /* 204 */ - cNUC, /* 205 */ - cNUC, /* 206 */ - cNUC, /* 207 */ - cNUC, /* 208 */ - cNUC, /* 209 */ - cNUC, /* 210 */ - cNUC, /* 211 */ - cNUC, /* 212 */ - cNUC, /* 213 */ - cNUC, /* 214 */ - cNUC, /* 215 */ - cNUC, /* 216 */ - cNUC, /* 217 */ - cNUC, /* 218 */ - cNUC, /* 219 */ - cNUC, /* 220 */ - cNUC, /* 221 */ - cNUC, /* 222 */ - cNUC, /* 223 */ - cNUC, /* 224 */ - cNUC, /* 225 */ - cNUC, /* 226 */ - cNUC, /* 227 */ - cNUC, /* 228 */ - cNUC, /* 229 */ - cNUC, /* 230 */ - cNUC, /* 231 */ - cNUC, /* 232 */ - cNUC, /* 233 */ - cNUC, /* 234 */ - cNUC, /* 235 */ - cNUC, /* 236 */ - cNUC, /* 237 */ - cNUC, /* 238 */ - cNUC, /* 239 */ - cNUC, /* 240 */ - cNUC, /* 241 */ - cNUC, /* 242 */ - cNUC, /* 243 */ - cNUC, /* 244 */ - cNUC, /* 245 */ - cNUC, /* 246 */ - cNUC, /* 247 */ - cNUC, /* 248 */ - cNUC, /* 249 */ - cNUC, /* 250 */ - cNUC, /* 251 */ - cNUC, /* 252 */ - cNUC, /* 253 */ - cNUC, /* 254 */ - cNUC, /* 255 */ -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go b/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go deleted file mode 100644 index 0f22c469d6..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/reader.go +++ /dev/null @@ -1,512 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package v1 - -import ( - "fmt" - "io" - "unicode" - "unicode/utf16" -) - -const sliceStringMask = cIJC | cNFP - -type ffReader struct { - s []byte - i int - l int -} - -func newffReader(d []byte) *ffReader { - return &ffReader{ - s: d, - i: 0, - l: len(d), - } -} - -func (r *ffReader) Slice(start, stop int) []byte { - return r.s[start:stop] -} - -func (r *ffReader) Pos() int { - return r.i -} - -// Reset the reader, and add new input. -func (r *ffReader) Reset(d []byte) { - r.s = d - r.i = 0 - r.l = len(d) -} - -// Calcuates the Position with line and line offset, -// because this isn't counted for performance reasons, -// it will iterate the buffer from the beginning, and should -// only be used in error-paths. -func (r *ffReader) PosWithLine() (int, int) { - currentLine := 1 - currentChar := 0 - - for i := 0; i < r.i; i++ { - c := r.s[i] - currentChar++ - if c == '\n' { - currentLine++ - currentChar = 0 - } - } - - return currentLine, currentChar -} - -func (r *ffReader) ReadByteNoWS() (byte, error) { - if r.i >= r.l { - return 0, io.EOF - } - - j := r.i - - for { - c := r.s[j] - j++ - - // inline whitespace parsing gives another ~8% performance boost - // for many kinds of nicely indented JSON. - // ... and using a [255]bool instead of multiple ifs, gives another 2% - /* - if c != '\t' && - c != '\n' && - c != '\v' && - c != '\f' && - c != '\r' && - c != ' ' { - r.i = j - return c, nil - } - */ - if whitespaceLookupTable[c] == false { - r.i = j - return c, nil - } - - if j >= r.l { - return 0, io.EOF - } - } -} - -func (r *ffReader) ReadByte() (byte, error) { - if r.i >= r.l { - return 0, io.EOF - } - - r.i++ - - return r.s[r.i-1], nil -} - -func (r *ffReader) UnreadByte() error { - if r.i <= 0 { - panic("ffReader.UnreadByte: at beginning of slice") - } - r.i-- - return nil -} - -func (r *ffReader) readU4(j int) (rune, error) { - - var u4 [4]byte - for i := 0; i < 4; i++ { - if j >= r.l { - return -1, io.EOF - } - c := r.s[j] - if byteLookupTable[c]&cVHC != 0 { - u4[i] = c - j++ - continue - } else { - // TODO(pquerna): handle errors better. layering violation. - return -1, fmt.Errorf("lex_string_invalid_hex_char: %v %v", c, string(u4[:])) - } - } - - // TODO(pquerna): utf16.IsSurrogate - rr, err := ParseUint(u4[:], 16, 64) - if err != nil { - return -1, err - } - return rune(rr), nil -} - -func (r *ffReader) handleEscaped(c byte, j int, out DecodingBuffer) (int, error) { - if j >= r.l { - return 0, io.EOF - } - - c = r.s[j] - j++ - - if c == 'u' { - ru, err := r.readU4(j) - if err != nil { - return 0, err - } - - if utf16.IsSurrogate(ru) { - ru2, err := r.readU4(j + 6) - if err != nil { - return 0, err - } - out.Write(r.s[r.i : j-2]) - r.i = j + 10 - j = r.i - rval := utf16.DecodeRune(ru, ru2) - if rval != unicode.ReplacementChar { - out.WriteRune(rval) - } else { - return 0, fmt.Errorf("lex_string_invalid_unicode_surrogate: %v %v", ru, ru2) - } - } else { - out.Write(r.s[r.i : j-2]) - r.i = j + 4 - j = r.i - out.WriteRune(ru) - } - return j, nil - } else if byteLookupTable[c]&cVEC == 0 { - return 0, fmt.Errorf("lex_string_invalid_escaped_char: %v", c) - } else { - out.Write(r.s[r.i : j-2]) - r.i = j - j = r.i - - switch c { - case '"': - out.WriteByte('"') - case '\\': - out.WriteByte('\\') - case '/': - out.WriteByte('/') - case 'b': - out.WriteByte('\b') - case 'f': - out.WriteByte('\f') - case 'n': - out.WriteByte('\n') - case 'r': - out.WriteByte('\r') - case 't': - out.WriteByte('\t') - } - } - - return j, nil -} - -func (r *ffReader) SliceString(out DecodingBuffer) error { - var c byte - // TODO(pquerna): string_with_escapes? de-escape here? - j := r.i - - for { - if j >= r.l { - return io.EOF - } - - j, c = scanString(r.s, j) - - if c == '"' { - if j != r.i { - out.Write(r.s[r.i : j-1]) - r.i = j - } - return nil - } else if c == '\\' { - var err error - j, err = r.handleEscaped(c, j, out) - if err != nil { - return err - } - } else if byteLookupTable[c]&cIJC != 0 { - return fmt.Errorf("lex_string_invalid_json_char: %v", c) - } - continue - } -} - -// TODO(pquerna): consider combining wibth the normal byte mask. -var whitespaceLookupTable [256]bool = [256]bool{ - false, /* 0 */ - false, /* 1 */ - false, /* 2 */ - false, /* 3 */ - false, /* 4 */ - false, /* 5 */ - false, /* 6 */ - false, /* 7 */ - false, /* 8 */ - true, /* 9 */ - true, /* 10 */ - true, /* 11 */ - true, /* 12 */ - true, /* 13 */ - false, /* 14 */ - false, /* 15 */ - false, /* 16 */ - false, /* 17 */ - false, /* 18 */ - false, /* 19 */ - false, /* 20 */ - false, /* 21 */ - false, /* 22 */ - false, /* 23 */ - false, /* 24 */ - false, /* 25 */ - false, /* 26 */ - false, /* 27 */ - false, /* 28 */ - false, /* 29 */ - false, /* 30 */ - false, /* 31 */ - true, /* 32 */ - false, /* 33 */ - false, /* 34 */ - false, /* 35 */ - false, /* 36 */ - false, /* 37 */ - false, /* 38 */ - false, /* 39 */ - false, /* 40 */ - false, /* 41 */ - false, /* 42 */ - false, /* 43 */ - false, /* 44 */ - false, /* 45 */ - false, /* 46 */ - false, /* 47 */ - false, /* 48 */ - false, /* 49 */ - false, /* 50 */ - false, /* 51 */ - false, /* 52 */ - false, /* 53 */ - false, /* 54 */ - false, /* 55 */ - false, /* 56 */ - false, /* 57 */ - false, /* 58 */ - false, /* 59 */ - false, /* 60 */ - false, /* 61 */ - false, /* 62 */ - false, /* 63 */ - false, /* 64 */ - false, /* 65 */ - false, /* 66 */ - false, /* 67 */ - false, /* 68 */ - false, /* 69 */ - false, /* 70 */ - false, /* 71 */ - false, /* 72 */ - false, /* 73 */ - false, /* 74 */ - false, /* 75 */ - false, /* 76 */ - false, /* 77 */ - false, /* 78 */ - false, /* 79 */ - false, /* 80 */ - false, /* 81 */ - false, /* 82 */ - false, /* 83 */ - false, /* 84 */ - false, /* 85 */ - false, /* 86 */ - false, /* 87 */ - false, /* 88 */ - false, /* 89 */ - false, /* 90 */ - false, /* 91 */ - false, /* 92 */ - false, /* 93 */ - false, /* 94 */ - false, /* 95 */ - false, /* 96 */ - false, /* 97 */ - false, /* 98 */ - false, /* 99 */ - false, /* 100 */ - false, /* 101 */ - false, /* 102 */ - false, /* 103 */ - false, /* 104 */ - false, /* 105 */ - false, /* 106 */ - false, /* 107 */ - false, /* 108 */ - false, /* 109 */ - false, /* 110 */ - false, /* 111 */ - false, /* 112 */ - false, /* 113 */ - false, /* 114 */ - false, /* 115 */ - false, /* 116 */ - false, /* 117 */ - false, /* 118 */ - false, /* 119 */ - false, /* 120 */ - false, /* 121 */ - false, /* 122 */ - false, /* 123 */ - false, /* 124 */ - false, /* 125 */ - false, /* 126 */ - false, /* 127 */ - false, /* 128 */ - false, /* 129 */ - false, /* 130 */ - false, /* 131 */ - false, /* 132 */ - false, /* 133 */ - false, /* 134 */ - false, /* 135 */ - false, /* 136 */ - false, /* 137 */ - false, /* 138 */ - false, /* 139 */ - false, /* 140 */ - false, /* 141 */ - false, /* 142 */ - false, /* 143 */ - false, /* 144 */ - false, /* 145 */ - false, /* 146 */ - false, /* 147 */ - false, /* 148 */ - false, /* 149 */ - false, /* 150 */ - false, /* 151 */ - false, /* 152 */ - false, /* 153 */ - false, /* 154 */ - false, /* 155 */ - false, /* 156 */ - false, /* 157 */ - false, /* 158 */ - false, /* 159 */ - false, /* 160 */ - false, /* 161 */ - false, /* 162 */ - false, /* 163 */ - false, /* 164 */ - false, /* 165 */ - false, /* 166 */ - false, /* 167 */ - false, /* 168 */ - false, /* 169 */ - false, /* 170 */ - false, /* 171 */ - false, /* 172 */ - false, /* 173 */ - false, /* 174 */ - false, /* 175 */ - false, /* 176 */ - false, /* 177 */ - false, /* 178 */ - false, /* 179 */ - false, /* 180 */ - false, /* 181 */ - false, /* 182 */ - false, /* 183 */ - false, /* 184 */ - false, /* 185 */ - false, /* 186 */ - false, /* 187 */ - false, /* 188 */ - false, /* 189 */ - false, /* 190 */ - false, /* 191 */ - false, /* 192 */ - false, /* 193 */ - false, /* 194 */ - false, /* 195 */ - false, /* 196 */ - false, /* 197 */ - false, /* 198 */ - false, /* 199 */ - false, /* 200 */ - false, /* 201 */ - false, /* 202 */ - false, /* 203 */ - false, /* 204 */ - false, /* 205 */ - false, /* 206 */ - false, /* 207 */ - false, /* 208 */ - false, /* 209 */ - false, /* 210 */ - false, /* 211 */ - false, /* 212 */ - false, /* 213 */ - false, /* 214 */ - false, /* 215 */ - false, /* 216 */ - false, /* 217 */ - false, /* 218 */ - false, /* 219 */ - false, /* 220 */ - false, /* 221 */ - false, /* 222 */ - false, /* 223 */ - false, /* 224 */ - false, /* 225 */ - false, /* 226 */ - false, /* 227 */ - false, /* 228 */ - false, /* 229 */ - false, /* 230 */ - false, /* 231 */ - false, /* 232 */ - false, /* 233 */ - false, /* 234 */ - false, /* 235 */ - false, /* 236 */ - false, /* 237 */ - false, /* 238 */ - false, /* 239 */ - false, /* 240 */ - false, /* 241 */ - false, /* 242 */ - false, /* 243 */ - false, /* 244 */ - false, /* 245 */ - false, /* 246 */ - false, /* 247 */ - false, /* 248 */ - false, /* 249 */ - false, /* 250 */ - false, /* 251 */ - false, /* 252 */ - false, /* 253 */ - false, /* 254 */ - false, /* 255 */ -} diff --git a/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go b/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go deleted file mode 100644 index 47c2607708..0000000000 --- a/vendor/github.com/pquerna/ffjson/fflib/v1/reader_scan_generic.go +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package v1 - -func scanString(s []byte, j int) (int, byte) { - for { - if j >= len(s) { - return j, 0 - } - - c := s[j] - j++ - if byteLookupTable[c]&sliceStringMask == 0 { - continue - } - - return j, c - } -} diff --git a/vendor/github.com/pquerna/ffjson/inception/decoder.go b/vendor/github.com/pquerna/ffjson/inception/decoder.go deleted file mode 100644 index 908347a325..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/decoder.go +++ /dev/null @@ -1,323 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "fmt" - "reflect" - "strings" - - "github.com/pquerna/ffjson/shared" -) - -var validValues []string = []string{ - "FFTok_left_brace", - "FFTok_left_bracket", - "FFTok_integer", - "FFTok_double", - "FFTok_string", - "FFTok_bool", - "FFTok_null", -} - -func CreateUnmarshalJSON(ic *Inception, si *StructInfo) error { - out := "" - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - if len(si.Fields) > 0 { - ic.OutputImports[`"bytes"`] = true - } - ic.OutputImports[`"fmt"`] = true - - out += tplStr(decodeTpl["header"], header{ - IC: ic, - SI: si, - }) - - out += tplStr(decodeTpl["ujFunc"], ujFunc{ - SI: si, - IC: ic, - ValidValues: validValues, - ResetFields: ic.ResetFields, - }) - - ic.OutputFuncs = append(ic.OutputFuncs, out) - - return nil -} - -func handleField(ic *Inception, name string, typ reflect.Type, ptr bool, quoted bool) string { - return handleFieldAddr(ic, name, false, typ, ptr, quoted) -} - -func handleFieldAddr(ic *Inception, name string, takeAddr bool, typ reflect.Type, ptr bool, quoted bool) string { - out := fmt.Sprintf("/* handler: %s type=%v kind=%v quoted=%t*/\n", name, typ, typ.Kind(), quoted) - - umlx := typ.Implements(unmarshalFasterType) || typeInInception(ic, typ, shared.MustDecoder) - umlx = umlx || reflect.PtrTo(typ).Implements(unmarshalFasterType) - - umlstd := typ.Implements(unmarshalerType) || reflect.PtrTo(typ).Implements(unmarshalerType) - - out += tplStr(decodeTpl["handleUnmarshaler"], handleUnmarshaler{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - TakeAddr: takeAddr || ptr, - UnmarshalJSONFFLexer: umlx, - Unmarshaler: umlstd, - }) - - if umlx || umlstd { - return out - } - - // TODO(pquerna): generic handling of token type mismatching struct type - switch typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseInt") - - case reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseUint") - - case reflect.Float32, - reflect.Float64: - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_double", "FFTok_integer", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += getNumberHandler(ic, name, takeAddr || ptr, typ, "ParseFloat") - - case reflect.Bool: - ic.OutputImports[`"bytes"`] = true - ic.OutputImports[`"errors"`] = true - - allowed := buildTokens(quoted, "FFTok_string", "FFTok_bool", "FFTok_null") - out += getAllowTokens(typ.Name(), allowed...) - - out += tplStr(decodeTpl["handleBool"], handleBool{ - Name: name, - Typ: typ, - TakeAddr: takeAddr || ptr, - }) - - case reflect.Ptr: - out += tplStr(decodeTpl["handlePtr"], handlePtr{ - IC: ic, - Name: name, - Typ: typ, - Quoted: quoted, - }) - - case reflect.Array, - reflect.Slice: - out += getArrayHandler(ic, name, typ, ptr) - - case reflect.String: - // Is it a json.Number? - if typ.PkgPath() == "encoding/json" && typ.Name() == "Number" { - // Fall back to json package to rely on the valid number check. - // See: https://github.com/golang/go/blob/f05c3aa24d815cd3869153750c9875e35fc48a6e/src/encoding/json/decode.go#L897 - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } else { - out += tplStr(decodeTpl["handleString"], handleString{ - IC: ic, - Name: name, - Typ: typ, - TakeAddr: takeAddr || ptr, - Quoted: quoted, - }) - } - case reflect.Interface: - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - case reflect.Map: - out += tplStr(decodeTpl["handleObject"], handleObject{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - TakeAddr: takeAddr || ptr, - }) - default: - ic.OutputImports[`"encoding/json"`] = true - out += tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } - - return out -} - -func getArrayHandler(ic *Inception, name string, typ reflect.Type, ptr bool) string { - if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { - ic.OutputImports[`"encoding/base64"`] = true - useReflectToSet := false - if typ.Elem().Name() != "byte" { - ic.OutputImports[`"reflect"`] = true - useReflectToSet = true - } - - return tplStr(decodeTpl["handleByteSlice"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - UseReflectToSet: useReflectToSet, - }) - } - - if typ.Elem().Kind() == reflect.Struct && typ.Elem().Name() != "" { - goto sliceOrArray - } - - if (typ.Elem().Kind() == reflect.Struct || typ.Elem().Kind() == reflect.Map) || - typ.Elem().Kind() == reflect.Array || typ.Elem().Kind() == reflect.Slice && - typ.Elem().Name() == "" { - ic.OutputImports[`"encoding/json"`] = true - - return tplStr(decodeTpl["handleFallback"], handleFallback{ - Name: name, - Typ: typ, - Kind: typ.Kind(), - }) - } - -sliceOrArray: - - if typ.Kind() == reflect.Array { - return tplStr(decodeTpl["handleArray"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - IsPtr: ptr, - Ptr: reflect.Ptr, - }) - } - - return tplStr(decodeTpl["handleSlice"], handleArray{ - IC: ic, - Name: name, - Typ: typ, - IsPtr: ptr, - Ptr: reflect.Ptr, - }) -} - -func getAllowTokens(name string, tokens ...string) string { - return tplStr(decodeTpl["allowTokens"], allowTokens{ - Name: name, - Tokens: tokens, - }) -} - -func getNumberHandler(ic *Inception, name string, takeAddr bool, typ reflect.Type, parsefunc string) string { - return tplStr(decodeTpl["handlerNumeric"], handlerNumeric{ - IC: ic, - Name: name, - ParseFunc: parsefunc, - TakeAddr: takeAddr, - Typ: typ, - }) -} - -func getNumberSize(typ reflect.Type) string { - return fmt.Sprintf("%d", typ.Bits()) -} - -func getType(ic *Inception, name string, typ reflect.Type) string { - s := typ.Name() - - if typ.PkgPath() != "" && typ.PkgPath() != ic.PackagePath { - path := removeVendor(typ.PkgPath()) - ic.OutputImports[`"`+path+`"`] = true - s = typ.String() - } - - if s == "" { - return typ.String() - } - - return s -} - -// removeVendor removes everything before and including a '/vendor/' -// substring in the package path. -// This is needed becuase that full path can't be used in the -// import statement. -func removeVendor(path string) string { - i := strings.Index(path, "/vendor/") - if i == -1 { - return path - } - return path[i+8:] -} - -func buildTokens(containsOptional bool, optional string, required ...string) []string { - if containsOptional { - return append(required, optional) - } - - return required -} - -func unquoteField(quoted bool) string { - // The outer quote of a string is already stripped out by - // the lexer. We need to check if the inner string is also - // quoted. If so, we will decode it as json string. If decoding - // fails, we will use the original string - if quoted { - return ` - unquoted, ok := fflib.UnquoteBytes(outBuf) - if ok { - outBuf = unquoted - } - ` - } - return "" -} - -func getTmpVarFor(name string) string { - return "tmp" + strings.Replace(strings.Title(name), ".", "", -1) -} diff --git a/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go b/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go deleted file mode 100644 index 0985061228..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/decoder_tpl.go +++ /dev/null @@ -1,773 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "reflect" - "strconv" - "text/template" -) - -var decodeTpl map[string]*template.Template - -func init() { - decodeTpl = make(map[string]*template.Template) - - funcs := map[string]string{ - "handlerNumeric": handlerNumericTxt, - "allowTokens": allowTokensTxt, - "handleFallback": handleFallbackTxt, - "handleString": handleStringTxt, - "handleObject": handleObjectTxt, - "handleArray": handleArrayTxt, - "handleSlice": handleSliceTxt, - "handleByteSlice": handleByteSliceTxt, - "handleBool": handleBoolTxt, - "handlePtr": handlePtrTxt, - "header": headerTxt, - "ujFunc": ujFuncTxt, - "handleUnmarshaler": handleUnmarshalerTxt, - } - - tplFuncs := template.FuncMap{ - "getAllowTokens": getAllowTokens, - "getNumberSize": getNumberSize, - "getType": getType, - "handleField": handleField, - "handleFieldAddr": handleFieldAddr, - "unquoteField": unquoteField, - "getTmpVarFor": getTmpVarFor, - } - - for k, v := range funcs { - decodeTpl[k] = template.Must(template.New(k).Funcs(tplFuncs).Parse(v)) - } -} - -type handlerNumeric struct { - IC *Inception - Name string - ParseFunc string - Typ reflect.Type - TakeAddr bool -} - -var handlerNumericTxt = ` -{ - {{$ic := .IC}} - - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - {{if eq .ParseFunc "ParseFloat" }} - tval, err := fflib.{{ .ParseFunc}}(fs.Output.Bytes(), {{getNumberSize .Typ}}) - {{else}} - tval, err := fflib.{{ .ParseFunc}}(fs.Output.Bytes(), 10, {{getNumberSize .Typ}}) - {{end}} - - if err != nil { - return fs.WrapErr(err) - } - {{if eq .TakeAddr true}} - ttypval := {{getType $ic .Name .Typ}}(tval) - {{.Name}} = &ttypval - {{else}} - {{.Name}} = {{getType $ic .Name .Typ}}(tval) - {{end}} - } -} -` - -type allowTokens struct { - Name string - Tokens []string -} - -var allowTokensTxt = ` -{ - if {{range $index, $element := .Tokens}}{{if ne $index 0 }}&&{{end}} tok != fflib.{{$element}}{{end}} { - return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for {{.Name}}", tok)) - } -} -` - -type handleFallback struct { - Name string - Typ reflect.Type - Kind reflect.Kind -} - -var handleFallbackTxt = ` -{ - /* Falling back. type={{printf "%v" .Typ}} kind={{printf "%v" .Kind}} */ - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - err = json.Unmarshal(tbuf, &{{.Name}}) - if err != nil { - return fs.WrapErr(err) - } -} -` - -type handleString struct { - IC *Inception - Name string - Typ reflect.Type - TakeAddr bool - Quoted bool -} - -var handleStringTxt = ` -{ - {{$ic := .IC}} - - {{getAllowTokens .Typ.Name "FFTok_string" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - {{if eq .TakeAddr true}} - var tval {{getType $ic .Name .Typ}} - outBuf := fs.Output.Bytes() - {{unquoteField .Quoted}} - tval = {{getType $ic .Name .Typ}}(string(outBuf)) - {{.Name}} = &tval - {{else}} - outBuf := fs.Output.Bytes() - {{unquoteField .Quoted}} - {{.Name}} = {{getType $ic .Name .Typ}}(string(outBuf)) - {{end}} - } -} -` - -type handleObject struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - TakeAddr bool -} - -var handleObjectTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_bracket" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - - {{if eq .TakeAddr true}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .Typ.Key.Kind .Ptr }} - var tval = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{else}} - var tval = make(map[{{getType $ic .Name .Typ.Key}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{end}} - {{else}} - {{if eq .Typ.Key.Kind .Ptr }} - var tval = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{else}} - var tval = make(map[{{getType $ic .Name .Typ.Key}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{end}} - {{end}} - {{else}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .Typ.Key.Kind .Ptr }} - {{.Name}} = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{else}} - {{.Name}} = make(map[{{getType $ic .Name .Typ.Key}}]*{{getType $ic .Name .Typ.Elem.Elem}}, 0) - {{end}} - {{else}} - {{if eq .Typ.Key.Kind .Ptr }} - {{.Name}} = make(map[*{{getType $ic .Name .Typ.Key.Elem}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{else}} - {{.Name}} = make(map[{{getType $ic .Name .Typ.Key}}]{{getType $ic .Name .Typ.Elem}}, 0) - {{end}} - {{end}} - {{end}} - - wantVal := true - - for { - {{$keyPtr := false}} - {{if eq .Typ.Key.Kind .Ptr }} - {{$keyPtr := true}} - var k *{{getType $ic .Name .Typ.Key.Elem}} - {{else}} - var k {{getType $ic .Name .Typ.Key}} - {{end}} - - {{$valPtr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$valPtr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_bracket { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC "k" .Typ.Key $keyPtr false}} - - // Expect ':' after key - tok = fs.Scan() - if tok != fflib.FFTok_colon { - return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok)) - } - - tok = fs.Scan() - {{handleField .IC $tmpVar .Typ.Elem $valPtr false}} - - {{if eq .TakeAddr true}} - tval[k] = {{$tmpVar}} - {{else}} - {{.Name}}[k] = {{$tmpVar}} - {{end}} - wantVal = false - } - - {{if eq .TakeAddr true}} - {{.Name}} = &tval - {{end}} - } -} -` - -type handleArray struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - UseReflectToSet bool - IsPtr bool -} - -var handleArrayTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_brace" "FFTok_null"}} - {{if eq .Typ.Elem.Kind .Ptr}} - {{.Name}} = [{{.Typ.Len}}]*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{else}} - {{.Name}} = [{{.Typ.Len}}]{{getType $ic .Name .Typ.Elem}}{} - {{end}} - if tok != fflib.FFTok_null { - wantVal := true - - idx := 0 - for { - {{$ptr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$ptr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC $tmpVar .Typ.Elem $ptr false}} - - // Standard json.Unmarshal ignores elements out of array bounds, - // that what we do as well. - if idx < {{.Typ.Len}} { - {{.Name}}[idx] = {{$tmpVar}} - idx++ - } - - wantVal = false - } - } -} -` - -var handleSliceTxt = ` -{ - {{$ic := .IC}} - {{getAllowTokens .Typ.Name "FFTok_left_brace" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - {{if eq .Typ.Elem.Kind .Ptr }} - {{if eq .IsPtr true}} - {{.Name}} = &[]*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{else}} - {{.Name}} = []*{{getType $ic .Name .Typ.Elem.Elem}}{} - {{end}} - {{else}} - {{if eq .IsPtr true}} - {{.Name}} = &[]{{getType $ic .Name .Typ.Elem}}{} - {{else}} - {{.Name}} = []{{getType $ic .Name .Typ.Elem}}{} - {{end}} - {{end}} - - wantVal := true - - for { - {{$ptr := false}} - {{$tmpVar := getTmpVarFor .Name}} - {{if eq .Typ.Elem.Kind .Ptr }} - {{$ptr := true}} - var {{$tmpVar}} *{{getType $ic .Name .Typ.Elem.Elem}} - {{else}} - var {{$tmpVar}} {{getType $ic .Name .Typ.Elem}} - {{end}} - - tok = fs.Scan() - if tok == fflib.FFTok_error { - goto tokerror - } - if tok == fflib.FFTok_right_brace { - break - } - - if tok == fflib.FFTok_comma { - if wantVal == true { - // TODO(pquerna): this isn't an ideal error message, this handles - // things like [,,,] as an array value. - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) - } - continue - } else { - wantVal = true - } - - {{handleField .IC $tmpVar .Typ.Elem $ptr false}} - {{if eq .IsPtr true}} - *{{.Name}} = append(*{{.Name}}, {{$tmpVar}}) - {{else}} - {{.Name}} = append({{.Name}}, {{$tmpVar}}) - {{end}} - wantVal = false - } - } -} -` - -var handleByteSliceTxt = ` -{ - {{getAllowTokens .Typ.Name "FFTok_string" "FFTok_null"}} - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - b := make([]byte, base64.StdEncoding.DecodedLen(fs.Output.Len())) - n, err := base64.StdEncoding.Decode(b, fs.Output.Bytes()) - if err != nil { - return fs.WrapErr(err) - } - {{if eq .UseReflectToSet true}} - v := reflect.ValueOf(&{{.Name}}).Elem() - v.SetBytes(b[0:n]) - {{else}} - {{.Name}} = append([]byte(), b[0:n]...) - {{end}} - } -} -` - -type handleBool struct { - Name string - Typ reflect.Type - TakeAddr bool -} - -var handleBoolTxt = ` -{ - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true}} - {{.Name}} = nil - {{end}} - } else { - tmpb := fs.Output.Bytes() - - {{if eq .TakeAddr true}} - var tval bool - {{end}} - - if bytes.Compare([]byte{'t', 'r', 'u', 'e'}, tmpb) == 0 { - {{if eq .TakeAddr true}} - tval = true - {{else}} - {{.Name}} = true - {{end}} - } else if bytes.Compare([]byte{'f', 'a', 'l', 's', 'e'}, tmpb) == 0 { - {{if eq .TakeAddr true}} - tval = false - {{else}} - {{.Name}} = false - {{end}} - } else { - err = errors.New("unexpected bytes for true/false value") - return fs.WrapErr(err) - } - - {{if eq .TakeAddr true}} - {{.Name}} = &tval - {{end}} - } -} -` - -type handlePtr struct { - IC *Inception - Name string - Typ reflect.Type - Quoted bool -} - -var handlePtrTxt = ` -{ - {{$ic := .IC}} - - if tok == fflib.FFTok_null { - {{.Name}} = nil - } else { - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Elem.Name .Typ.Elem}}) - } - - {{handleFieldAddr .IC .Name true .Typ.Elem false .Quoted}} - } -} -` - -type header struct { - IC *Inception - SI *StructInfo -} - -var headerTxt = ` -const ( - ffjt{{.SI.Name}}base = iota - ffjt{{.SI.Name}}nosuchkey - {{with $si := .SI}} - {{range $index, $field := $si.Fields}} - {{if ne $field.JsonName "-"}} - ffjt{{$si.Name}}{{$field.Name}} - {{end}} - {{end}} - {{end}} -) - -{{with $si := .SI}} - {{range $index, $field := $si.Fields}} - {{if ne $field.JsonName "-"}} -var ffjKey{{$si.Name}}{{$field.Name}} = []byte({{$field.JsonName}}) - {{end}} - {{end}} -{{end}} - -` - -type ujFunc struct { - IC *Inception - SI *StructInfo - ValidValues []string - ResetFields bool -} - -var ujFuncTxt = ` -{{$si := .SI}} -{{$ic := .IC}} - -// UnmarshalJSON umarshall json - template of ffjson -func (j *{{.SI.Name}}) UnmarshalJSON(input []byte) error { - fs := fflib.NewFFLexer(input) - return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start) -} - -// UnmarshalJSONFFLexer fast json unmarshall - template ffjson -func (j *{{.SI.Name}}) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error { - var err error - currentKey := ffjt{{.SI.Name}}base - _ = currentKey - tok := fflib.FFTok_init - wantedTok := fflib.FFTok_init - - {{if eq .ResetFields true}} - {{range $index, $field := $si.Fields}} - var ffjSet{{$si.Name}}{{$field.Name}} = false - {{end}} - {{end}} - -mainparse: - for { - tok = fs.Scan() - // println(fmt.Sprintf("debug: tok: %v state: %v", tok, state)) - if tok == fflib.FFTok_error { - goto tokerror - } - - switch state { - - case fflib.FFParse_map_start: - if tok != fflib.FFTok_left_bracket { - wantedTok = fflib.FFTok_left_bracket - goto wrongtokenerror - } - state = fflib.FFParse_want_key - continue - - case fflib.FFParse_after_value: - if tok == fflib.FFTok_comma { - state = fflib.FFParse_want_key - } else if tok == fflib.FFTok_right_bracket { - goto done - } else { - wantedTok = fflib.FFTok_comma - goto wrongtokenerror - } - - case fflib.FFParse_want_key: - // json {} ended. goto exit. woo. - if tok == fflib.FFTok_right_bracket { - goto done - } - if tok != fflib.FFTok_string { - wantedTok = fflib.FFTok_string - goto wrongtokenerror - } - - kn := fs.Output.Bytes() - if len(kn) <= 0 { - // "" case. hrm. - currentKey = ffjt{{.SI.Name}}nosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } else { - switch kn[0] { - {{range $byte, $fields := $si.FieldsByFirstByte}} - case '{{$byte}}': - {{range $index, $field := $fields}} - {{if ne $index 0 }}} else if {{else}}if {{end}} bytes.Equal(ffjKey{{$si.Name}}{{$field.Name}}, kn) { - currentKey = ffjt{{$si.Name}}{{$field.Name}} - state = fflib.FFParse_want_colon - goto mainparse - {{end}} } - {{end}} - } - {{range $index, $field := $si.ReverseFields}} - if {{$field.FoldFuncName}}(ffjKey{{$si.Name}}{{$field.Name}}, kn) { - currentKey = ffjt{{$si.Name}}{{$field.Name}} - state = fflib.FFParse_want_colon - goto mainparse - } - {{end}} - currentKey = ffjt{{.SI.Name}}nosuchkey - state = fflib.FFParse_want_colon - goto mainparse - } - - case fflib.FFParse_want_colon: - if tok != fflib.FFTok_colon { - wantedTok = fflib.FFTok_colon - goto wrongtokenerror - } - state = fflib.FFParse_want_value - continue - case fflib.FFParse_want_value: - - if {{range $index, $v := .ValidValues}}{{if ne $index 0 }}||{{end}}tok == fflib.{{$v}}{{end}} { - switch currentKey { - {{range $index, $field := $si.Fields}} - case ffjt{{$si.Name}}{{$field.Name}}: - goto handle_{{$field.Name}} - {{end}} - case ffjt{{$si.Name}}nosuchkey: - err = fs.SkipField(tok) - if err != nil { - return fs.WrapErr(err) - } - state = fflib.FFParse_after_value - goto mainparse - } - } else { - goto wantedvalue - } - } - } -{{range $index, $field := $si.Fields}} -handle_{{$field.Name}}: - {{with $fieldName := $field.Name | printf "j.%s"}} - {{handleField $ic $fieldName $field.Typ $field.Pointer $field.ForceString}} - {{if eq $.ResetFields true}} - ffjSet{{$si.Name}}{{$field.Name}} = true - {{end}} - state = fflib.FFParse_after_value - goto mainparse - {{end}} -{{end}} - -wantedvalue: - return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok)) -wrongtokenerror: - return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String())) -tokerror: - if fs.BigError != nil { - return fs.WrapErr(fs.BigError) - } - err = fs.Error.ToError() - if err != nil { - return fs.WrapErr(err) - } - panic("ffjson-generated: unreachable, please report bug.") -done: -{{if eq .ResetFields true}} -{{range $index, $field := $si.Fields}} - if !ffjSet{{$si.Name}}{{$field.Name}} { - {{with $fieldName := $field.Name | printf "j.%s"}} - {{if eq $field.Pointer true}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Interface), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Slice), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Array), 10) + `}} - {{$fieldName}} = [{{$field.Typ.Len}}]{{getType $ic $fieldName $field.Typ.Elem}}{} - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Map), 10) + `}} - {{$fieldName}} = nil - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Bool), 10) + `}} - {{$fieldName}} = false - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.String), 10) + `}} - {{$fieldName}} = "" - {{else if eq $field.Typ.Kind ` + strconv.FormatUint(uint64(reflect.Struct), 10) + `}} - {{$fieldName}} = {{getType $ic $fieldName $field.Typ}}{} - {{else}} - {{$fieldName}} = {{getType $ic $fieldName $field.Typ}}(0) - {{end}} - {{end}} - } -{{end}} -{{end}} - return nil -} -` - -type handleUnmarshaler struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - TakeAddr bool - UnmarshalJSONFFLexer bool - Unmarshaler bool -} - -var handleUnmarshalerTxt = ` - {{$ic := .IC}} - - {{if eq .UnmarshalJSONFFLexer true}} - { - if tok == fflib.FFTok_null { - {{if eq .Typ.Kind .Ptr }} - {{.Name}} = nil - {{end}} - {{if eq .TakeAddr true }} - {{.Name}} = nil - {{end}} - } else { - {{if eq .Typ.Kind .Ptr }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Elem.Name .Typ.Elem}}) - } - {{end}} - {{if eq .TakeAddr true }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Name .Typ}}) - } - {{end}} - err = {{.Name}}.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key) - if err != nil { - return err - } - } - state = fflib.FFParse_after_value - } - {{else}} - {{if eq .Unmarshaler true}} - { - if tok == fflib.FFTok_null { - {{if eq .TakeAddr true }} - {{.Name}} = nil - {{end}} - } else { - - tbuf, err := fs.CaptureField(tok) - if err != nil { - return fs.WrapErr(err) - } - - {{if eq .TakeAddr true }} - if {{.Name}} == nil { - {{.Name}} = new({{getType $ic .Typ.Name .Typ}}) - } - {{end}} - err = {{.Name}}.UnmarshalJSON(tbuf) - if err != nil { - return fs.WrapErr(err) - } - } - state = fflib.FFParse_after_value - } - {{end}} - {{end}} -` diff --git a/vendor/github.com/pquerna/ffjson/inception/encoder.go b/vendor/github.com/pquerna/ffjson/inception/encoder.go deleted file mode 100644 index 3e37a2814a..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/encoder.go +++ /dev/null @@ -1,544 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "fmt" - "reflect" - - "github.com/pquerna/ffjson/shared" -) - -func typeInInception(ic *Inception, typ reflect.Type, f shared.Feature) bool { - for _, v := range ic.objs { - if v.Typ == typ { - return v.Options.HasFeature(f) - } - if typ.Kind() == reflect.Ptr { - if v.Typ == typ.Elem() { - return v.Options.HasFeature(f) - } - } - } - - return false -} - -func getOmitEmpty(ic *Inception, sf *StructField) string { - ptname := "j." + sf.Name - if sf.Pointer { - ptname = "*" + ptname - return "if true {\n" - } - switch sf.Typ.Kind() { - - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return "if len(" + ptname + ") != 0 {" + "\n" - - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr, - reflect.Float32, - reflect.Float64: - return "if " + ptname + " != 0 {" + "\n" - - case reflect.Bool: - return "if " + ptname + " != false {" + "\n" - - case reflect.Interface, reflect.Ptr: - return "if " + ptname + " != nil {" + "\n" - - default: - // TODO(pquerna): fix types - return "if true {" + "\n" - } -} - -func getMapValue(ic *Inception, name string, typ reflect.Type, ptr bool, forceString bool) string { - var out = "" - - if typ.Key().Kind() != reflect.String { - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += ic.q.Flush() - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - return out - } - - var elemKind reflect.Kind - elemKind = typ.Elem().Kind() - - switch elemKind { - case reflect.String, - reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, - reflect.Float32, - reflect.Float64, - reflect.Bool: - - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - - out += "if " + name + " == nil {" + "\n" - ic.q.Write("null") - out += ic.q.GetQueued() - ic.q.DeleteLast() - out += "} else {" + "\n" - out += ic.q.WriteFlush("{ ") - out += " for key, value := range " + name + " {" + "\n" - out += " fflib.WriteJsonString(buf, key)" + "\n" - out += " buf.WriteString(`:`)" + "\n" - out += getGetInnerValue(ic, "value", typ.Elem(), false, forceString) - out += " buf.WriteByte(',')" + "\n" - out += " }" + "\n" - out += "buf.Rewind(1)" + "\n" - out += ic.q.WriteFlush("}") - out += "}" + "\n" - - default: - out += ic.q.Flush() - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - return out -} - -func getGetInnerValue(ic *Inception, name string, typ reflect.Type, ptr bool, forceString bool) string { - var out = "" - - // Flush if not bool or maps - if typ.Kind() != reflect.Bool && typ.Kind() != reflect.Map && typ.Kind() != reflect.Struct { - out += ic.q.Flush() - } - - if typ.Implements(marshalerFasterType) || - reflect.PtrTo(typ).Implements(marshalerFasterType) || - typeInInception(ic, typ, shared.MustEncoder) || - typ.Implements(marshalerType) || - reflect.PtrTo(typ).Implements(marshalerType) { - - out += ic.q.Flush() - out += tplStr(encodeTpl["handleMarshaler"], handleMarshaler{ - IC: ic, - Name: name, - Typ: typ, - Ptr: reflect.Ptr, - MarshalJSONBuf: typ.Implements(marshalerFasterType) || reflect.PtrTo(typ).Implements(marshalerFasterType) || typeInInception(ic, typ, shared.MustEncoder), - Marshaler: typ.Implements(marshalerType) || reflect.PtrTo(typ).Implements(marshalerType), - }) - return out - } - - ptname := name - if ptr { - ptname = "*" + name - } - - switch typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.FormatBits2(buf, uint64(" + ptname + "), 10, " + ptname + " < 0)" + "\n" - case reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.FormatBits2(buf, uint64(" + ptname + "), 10, false)" + "\n" - case reflect.Float32: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.AppendFloat(buf, float64(" + ptname + "), 'g', -1, 32)" + "\n" - case reflect.Float64: - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - out += "fflib.AppendFloat(buf, float64(" + ptname + "), 'g', -1, 64)" + "\n" - case reflect.Array, - reflect.Slice: - - // Arrays cannot be nil - if typ.Kind() != reflect.Array { - out += "if " + name + "!= nil {" + "\n" - } - // Array and slice values encode as JSON arrays, except that - // []byte encodes as a base64-encoded string, and a nil slice - // encodes as the null JSON object. - if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { - ic.OutputImports[`"encoding/base64"`] = true - - out += "buf.WriteString(`\"`)" + "\n" - out += `{` + "\n" - out += `enc := base64.NewEncoder(base64.StdEncoding, buf)` + "\n" - if typ.Elem().Name() != "byte" { - ic.OutputImports[`"reflect"`] = true - out += `enc.Write(reflect.Indirect(reflect.ValueOf(` + ptname + `)).Bytes())` + "\n" - - } else { - out += `enc.Write(` + ptname + `)` + "\n" - } - out += `enc.Close()` + "\n" - out += `}` + "\n" - out += "buf.WriteString(`\"`)" + "\n" - } else { - out += "buf.WriteString(`[`)" + "\n" - out += "for i, v := range " + ptname + "{" + "\n" - out += "if i != 0 {" + "\n" - out += "buf.WriteString(`,`)" + "\n" - out += "}" + "\n" - out += getGetInnerValue(ic, "v", typ.Elem(), false, false) - out += "}" + "\n" - out += "buf.WriteString(`]`)" + "\n" - } - if typ.Kind() != reflect.Array { - out += "} else {" + "\n" - out += "buf.WriteString(`null`)" + "\n" - out += "}" + "\n" - } - case reflect.String: - // Is it a json.Number? - if typ.PkgPath() == "encoding/json" && typ.Name() == "Number" { - // Fall back to json package to rely on the valid number check. - // See: https://github.com/golang/go/blob/92cd6e3af9f423ab4d8ac78f24e7fd81c31a8ce6/src/encoding/json/encode.go#L550 - out += fmt.Sprintf("/* json.Number */\n") - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } else { - ic.OutputImports[`fflib "github.com/pquerna/ffjson/fflib/v1"`] = true - if forceString { - // Forcestring on strings does double-escaping of the entire value. - // We create a temporary buffer, encode to that an re-encode it. - out += "{" + "\n" - out += "tmpbuf := fflib.Buffer{}" + "\n" - out += "tmpbuf.Grow(len(" + ptname + ") + 16)" + "\n" - out += "fflib.WriteJsonString(&tmpbuf, string(" + ptname + "))" + "\n" - out += "fflib.WriteJsonString(buf, string( tmpbuf.Bytes() " + `))` + "\n" - out += "}" + "\n" - } else { - out += "fflib.WriteJsonString(buf, string(" + ptname + "))" + "\n" - } - } - case reflect.Ptr: - out += "if " + name + "!= nil {" + "\n" - switch typ.Elem().Kind() { - case reflect.Struct: - out += getGetInnerValue(ic, name, typ.Elem(), false, false) - default: - out += getGetInnerValue(ic, "*"+name, typ.Elem(), false, false) - } - out += "} else {" + "\n" - out += "buf.WriteString(`null`)" + "\n" - out += "}" + "\n" - case reflect.Bool: - out += "if " + ptname + " {" + "\n" - ic.q.Write("true") - out += ic.q.GetQueued() - out += "} else {" + "\n" - // Delete 'true' - ic.q.DeleteLast() - out += ic.q.WriteFlush("false") - out += "}" + "\n" - case reflect.Interface: - out += fmt.Sprintf("/* Interface types must use runtime reflection. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - case reflect.Map: - out += getMapValue(ic, ptname, typ, ptr, forceString) - case reflect.Struct: - if typ.Name() == "" { - ic.q.Write("{") - ic.q.Write(" ") - out += fmt.Sprintf("/* Inline struct. type=%v kind=%v */\n", typ, typ.Kind()) - newV := reflect.Indirect(reflect.New(typ)).Interface() - fields := extractFields(newV) - - // Output all fields - for _, field := range fields { - // Adjust field name - field.Name = name + "." + field.Name - out += getField(ic, field, "") - } - - if lastConditional(fields) { - out += ic.q.Flush() - out += `buf.Rewind(1)` + "\n" - } else { - ic.q.DeleteLast() - } - out += ic.q.WriteFlush("}") - } else { - out += fmt.Sprintf("/* Struct fall back. type=%v kind=%v */\n", typ, typ.Kind()) - out += ic.q.Flush() - if ptr { - out += "err = buf.Encode(" + name + ")" + "\n" - } else { - // We send pointer to avoid copying entire struct - out += "err = buf.Encode(&" + name + ")" + "\n" - } - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - default: - out += fmt.Sprintf("/* Falling back. type=%v kind=%v */\n", typ, typ.Kind()) - out += "err = buf.Encode(" + name + ")" + "\n" - out += "if err != nil {" + "\n" - out += " return err" + "\n" - out += "}" + "\n" - } - - return out -} - -func getValue(ic *Inception, sf *StructField, prefix string) string { - closequote := false - if sf.ForceString { - switch sf.Typ.Kind() { - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Int64, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32, - reflect.Uint64, - reflect.Uintptr, - reflect.Float32, - reflect.Float64, - reflect.Bool: - ic.q.Write(`"`) - closequote = true - } - } - out := getGetInnerValue(ic, prefix+sf.Name, sf.Typ, sf.Pointer, sf.ForceString) - if closequote { - if sf.Pointer { - out += ic.q.WriteFlush(`"`) - } else { - ic.q.Write(`"`) - } - } - - return out -} - -func p2(v uint32) uint32 { - v-- - v |= v >> 1 - v |= v >> 2 - v |= v >> 4 - v |= v >> 8 - v |= v >> 16 - v++ - return v -} - -func getTypeSize(t reflect.Type) uint32 { - switch t.Kind() { - case reflect.String: - // TODO: consider runtime analysis. - return 32 - case reflect.Array, reflect.Map, reflect.Slice: - // TODO: consider runtime analysis. - return 4 * getTypeSize(t.Elem()) - case reflect.Int, - reflect.Int8, - reflect.Int16, - reflect.Int32, - reflect.Uint, - reflect.Uint8, - reflect.Uint16, - reflect.Uint32: - return 8 - case reflect.Int64, - reflect.Uint64, - reflect.Uintptr: - return 16 - case reflect.Float32, - reflect.Float64: - return 16 - case reflect.Bool: - return 4 - case reflect.Ptr: - return getTypeSize(t.Elem()) - default: - return 16 - } -} - -func getTotalSize(si *StructInfo) uint32 { - rv := uint32(si.Typ.Size()) - for _, f := range si.Fields { - rv += getTypeSize(f.Typ) - } - return rv -} - -func getBufGrowSize(si *StructInfo) uint32 { - - // TOOD(pquerna): automatically calc a better grow size based on history - // of a struct. - return p2(getTotalSize(si)) -} - -func isIntish(t reflect.Type) bool { - if t.Kind() >= reflect.Int && t.Kind() <= reflect.Uintptr { - return true - } - if t.Kind() == reflect.Array || t.Kind() == reflect.Slice || t.Kind() == reflect.Ptr { - if t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { - // base64 special case. - return false - } else { - return isIntish(t.Elem()) - } - } - return false -} - -func getField(ic *Inception, f *StructField, prefix string) string { - out := "" - if f.OmitEmpty { - out += ic.q.Flush() - if f.Pointer { - out += "if " + prefix + f.Name + " != nil {" + "\n" - } - out += getOmitEmpty(ic, f) - } - - if f.Pointer && !f.OmitEmpty { - // Pointer values encode as the value pointed to. A nil pointer encodes as the null JSON object. - out += "if " + prefix + f.Name + " != nil {" + "\n" - } - - // JsonName is already escaped and quoted. - // getInnervalue should flush - ic.q.Write(f.JsonName + ":") - // We save a copy in case we need it - t := ic.q - - out += getValue(ic, f, prefix) - ic.q.Write(",") - - if f.Pointer && !f.OmitEmpty { - out += "} else {" + "\n" - out += t.WriteFlush("null") - out += "}" + "\n" - } - - if f.OmitEmpty { - out += ic.q.Flush() - if f.Pointer { - out += "}" + "\n" - } - out += "}" + "\n" - } - return out -} - -// We check if the last field is conditional. -func lastConditional(fields []*StructField) bool { - if len(fields) > 0 { - f := fields[len(fields)-1] - return f.OmitEmpty - } - return false -} - -func CreateMarshalJSON(ic *Inception, si *StructInfo) error { - conditionalWrites := lastConditional(si.Fields) - out := "" - - out += "// MarshalJSON marshal bytes to json - template\n" - out += `func (j *` + si.Name + `) MarshalJSON() ([]byte, error) {` + "\n" - out += `var buf fflib.Buffer` + "\n" - - out += `if j == nil {` + "\n" - out += ` buf.WriteString("null")` + "\n" - out += " return buf.Bytes(), nil" + "\n" - out += `}` + "\n" - - out += `err := j.MarshalJSONBuf(&buf)` + "\n" - out += `if err != nil {` + "\n" - out += " return nil, err" + "\n" - out += `}` + "\n" - out += `return buf.Bytes(), nil` + "\n" - out += `}` + "\n" - - out += "// MarshalJSONBuf marshal buff to json - template\n" - out += `func (j *` + si.Name + `) MarshalJSONBuf(buf fflib.EncodingBuffer) (error) {` + "\n" - out += ` if j == nil {` + "\n" - out += ` buf.WriteString("null")` + "\n" - out += " return nil" + "\n" - out += ` }` + "\n" - - out += `var err error` + "\n" - out += `var obj []byte` + "\n" - out += `_ = obj` + "\n" - out += `_ = err` + "\n" - - ic.q.Write("{") - - // The extra space is inserted here. - // If nothing is written to the field this will be deleted - // instead of the last comma. - if conditionalWrites || len(si.Fields) == 0 { - ic.q.Write(" ") - } - - for _, f := range si.Fields { - out += getField(ic, f, "j.") - } - - // Handling the last comma is tricky. - // If the last field has omitempty, conditionalWrites is set. - // If something has been written, we delete the last comma, - // by backing up the buffer, otherwise it will delete a space. - if conditionalWrites { - out += ic.q.Flush() - out += `buf.Rewind(1)` + "\n" - } else { - ic.q.DeleteLast() - } - - out += ic.q.WriteFlush("}") - out += `return nil` + "\n" - out += `}` + "\n" - ic.OutputFuncs = append(ic.OutputFuncs, out) - return nil -} diff --git a/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go b/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go deleted file mode 100644 index 22ab5292e7..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/encoder_tpl.go +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "reflect" - "text/template" -) - -var encodeTpl map[string]*template.Template - -func init() { - encodeTpl = make(map[string]*template.Template) - - funcs := map[string]string{ - "handleMarshaler": handleMarshalerTxt, - } - tplFuncs := template.FuncMap{} - - for k, v := range funcs { - encodeTpl[k] = template.Must(template.New(k).Funcs(tplFuncs).Parse(v)) - } -} - -type handleMarshaler struct { - IC *Inception - Name string - Typ reflect.Type - Ptr reflect.Kind - MarshalJSONBuf bool - Marshaler bool -} - -var handleMarshalerTxt = ` - { - {{if eq .Typ.Kind .Ptr}} - if {{.Name}} == nil { - buf.WriteString("null") - } else { - {{end}} - - {{if eq .MarshalJSONBuf true}} - err = {{.Name}}.MarshalJSONBuf(buf) - if err != nil { - return err - } - {{else if eq .Marshaler true}} - obj, err = {{.Name}}.MarshalJSON() - if err != nil { - return err - } - buf.Write(obj) - {{end}} - {{if eq .Typ.Kind .Ptr}} - } - {{end}} - } -` diff --git a/vendor/github.com/pquerna/ffjson/inception/inception.go b/vendor/github.com/pquerna/ffjson/inception/inception.go deleted file mode 100644 index 10cb2712cc..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/inception.go +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "errors" - "fmt" - "github.com/pquerna/ffjson/shared" - "io/ioutil" - "os" - "reflect" - "sort" -) - -type Inception struct { - objs []*StructInfo - InputPath string - OutputPath string - PackageName string - PackagePath string - OutputImports map[string]bool - OutputFuncs []string - q ConditionalWrite - ResetFields bool -} - -func NewInception(inputPath string, packageName string, outputPath string, resetFields bool) *Inception { - return &Inception{ - objs: make([]*StructInfo, 0), - InputPath: inputPath, - OutputPath: outputPath, - PackageName: packageName, - OutputFuncs: make([]string, 0), - OutputImports: make(map[string]bool), - ResetFields: resetFields, - } -} - -func (i *Inception) AddMany(objs []shared.InceptionType) { - for _, obj := range objs { - i.Add(obj) - } -} - -func (i *Inception) Add(obj shared.InceptionType) { - i.objs = append(i.objs, NewStructInfo(obj)) - i.PackagePath = i.objs[0].Typ.PkgPath() -} - -func (i *Inception) wantUnmarshal(si *StructInfo) bool { - if si.Options.SkipDecoder { - return false - } - typ := si.Typ - umlx := typ.Implements(unmarshalFasterType) || reflect.PtrTo(typ).Implements(unmarshalFasterType) - umlstd := typ.Implements(unmarshalerType) || reflect.PtrTo(typ).Implements(unmarshalerType) - if umlstd && !umlx { - // structure has UnmarshalJSON, but not our faster version -- skip it. - return false - } - return true -} - -func (i *Inception) wantMarshal(si *StructInfo) bool { - if si.Options.SkipEncoder { - return false - } - typ := si.Typ - mlx := typ.Implements(marshalerFasterType) || reflect.PtrTo(typ).Implements(marshalerFasterType) - mlstd := typ.Implements(marshalerType) || reflect.PtrTo(typ).Implements(marshalerType) - if mlstd && !mlx { - // structure has MarshalJSON, but not our faster version -- skip it. - return false - } - return true -} - -type sortedStructs []*StructInfo - -func (p sortedStructs) Len() int { return len(p) } -func (p sortedStructs) Less(i, j int) bool { return p[i].Name < p[j].Name } -func (p sortedStructs) Swap(i, j int) { p[i], p[j] = p[j], p[i] } -func (p sortedStructs) Sort() { sort.Sort(p) } - -func (i *Inception) generateCode() error { - // We sort the structs by name, so output if predictable. - sorted := sortedStructs(i.objs) - sorted.Sort() - - for _, si := range sorted { - if i.wantMarshal(si) { - err := CreateMarshalJSON(i, si) - if err != nil { - return err - } - } - - if i.wantUnmarshal(si) { - err := CreateUnmarshalJSON(i, si) - if err != nil { - return err - } - } - } - return nil -} - -func (i *Inception) handleError(err error) { - fmt.Fprintf(os.Stderr, "Error: %s:\n\n", err) - os.Exit(1) -} - -func (i *Inception) Execute() { - if len(os.Args) != 1 { - i.handleError(errors.New(fmt.Sprintf("Internal ffjson error: inception executable takes no args: %v", os.Args))) - return - } - - err := i.generateCode() - if err != nil { - i.handleError(err) - return - } - - data, err := RenderTemplate(i) - if err != nil { - i.handleError(err) - return - } - - stat, err := os.Stat(i.InputPath) - - if err != nil { - i.handleError(err) - return - } - - err = ioutil.WriteFile(i.OutputPath, data, stat.Mode()) - - if err != nil { - i.handleError(err) - return - } - -} diff --git a/vendor/github.com/pquerna/ffjson/inception/reflect.go b/vendor/github.com/pquerna/ffjson/inception/reflect.go deleted file mode 100644 index 8fb0bd5cb5..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/reflect.go +++ /dev/null @@ -1,290 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - fflib "github.com/pquerna/ffjson/fflib/v1" - "github.com/pquerna/ffjson/shared" - - "bytes" - "encoding/json" - "reflect" - "unicode/utf8" -) - -type StructField struct { - Name string - JsonName string - FoldFuncName string - Typ reflect.Type - OmitEmpty bool - ForceString bool - HasMarshalJSON bool - HasUnmarshalJSON bool - Pointer bool - Tagged bool -} - -type FieldByJsonName []*StructField - -func (a FieldByJsonName) Len() int { return len(a) } -func (a FieldByJsonName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a FieldByJsonName) Less(i, j int) bool { return a[i].JsonName < a[j].JsonName } - -type StructInfo struct { - Name string - Obj interface{} - Typ reflect.Type - Fields []*StructField - Options shared.StructOptions -} - -func NewStructInfo(obj shared.InceptionType) *StructInfo { - t := reflect.TypeOf(obj.Obj) - return &StructInfo{ - Obj: obj.Obj, - Name: t.Name(), - Typ: t, - Fields: extractFields(obj.Obj), - Options: obj.Options, - } -} - -func (si *StructInfo) FieldsByFirstByte() map[string][]*StructField { - rv := make(map[string][]*StructField) - for _, f := range si.Fields { - b := string(f.JsonName[1]) - rv[b] = append(rv[b], f) - } - return rv -} - -func (si *StructInfo) ReverseFields() []*StructField { - var i int - rv := make([]*StructField, 0) - for i = len(si.Fields) - 1; i >= 0; i-- { - rv = append(rv, si.Fields[i]) - } - return rv -} - -const ( - caseMask = ^byte(0x20) // Mask to ignore case in ASCII. -) - -func foldFunc(key []byte) string { - nonLetter := false - special := false // special letter - for _, b := range key { - if b >= utf8.RuneSelf { - return "bytes.EqualFold" - } - upper := b & caseMask - if upper < 'A' || upper > 'Z' { - nonLetter = true - } else if upper == 'K' || upper == 'S' { - // See above for why these letters are special. - special = true - } - } - if special { - return "fflib.EqualFoldRight" - } - if nonLetter { - return "fflib.AsciiEqualFold" - } - return "fflib.SimpleLetterEqualFold" -} - -type MarshalerFaster interface { - MarshalJSONBuf(buf fflib.EncodingBuffer) error -} - -type UnmarshalFaster interface { - UnmarshalJSONFFLexer(l *fflib.FFLexer, state fflib.FFParseState) error -} - -var marshalerType = reflect.TypeOf(new(json.Marshaler)).Elem() -var marshalerFasterType = reflect.TypeOf(new(MarshalerFaster)).Elem() -var unmarshalerType = reflect.TypeOf(new(json.Unmarshaler)).Elem() -var unmarshalFasterType = reflect.TypeOf(new(UnmarshalFaster)).Elem() - -// extractFields returns a list of fields that JSON should recognize for the given type. -// The algorithm is breadth-first search over the set of structs to include - the top struct -// and then any reachable anonymous structs. -func extractFields(obj interface{}) []*StructField { - t := reflect.TypeOf(obj) - // Anonymous fields to explore at the current level and the next. - current := []StructField{} - next := []StructField{{Typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []*StructField - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.Typ] { - continue - } - visited[f.Typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.Typ.NumField(); i++ { - sf := f.Typ.Field(i) - if sf.PkgPath != "" { // unexported - continue - } - tag := sf.Tag.Get("json") - if tag == "-" { - continue - } - name, opts := parseTag(tag) - if !isValidTag(name) { - name = "" - } - - ft := sf.Type - ptr := false - if ft.Kind() == reflect.Ptr { - ptr = true - } - - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := name != "" - if name == "" { - name = sf.Name - } - - var buf bytes.Buffer - fflib.WriteJsonString(&buf, name) - - field := &StructField{ - Name: sf.Name, - JsonName: string(buf.Bytes()), - FoldFuncName: foldFunc([]byte(name)), - Typ: ft, - HasMarshalJSON: ft.Implements(marshalerType), - HasUnmarshalJSON: ft.Implements(unmarshalerType), - OmitEmpty: opts.Contains("omitempty"), - ForceString: opts.Contains("string"), - Pointer: ptr, - Tagged: tagged, - } - - fields = append(fields, field) - - if count[f.Typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - next = append(next, StructField{ - Name: ft.Name(), - Typ: ft, - }) - } - } - } - } - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with JSON tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.JsonName - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.JsonName != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// JSON tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []*StructField) (*StructField, bool) { - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if f.Tagged { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return nil, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return nil, false - } - return fields[0], true -} diff --git a/vendor/github.com/pquerna/ffjson/inception/tags.go b/vendor/github.com/pquerna/ffjson/inception/tags.go deleted file mode 100644 index ccce101b84..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/tags.go +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "strings" - "unicode" -) - -// from: http://golang.org/src/pkg/encoding/json/tags.go - -// tagOptions is the string following a comma in a struct field's "json" -// tag, or the empty string. It does not include the leading comma. -type tagOptions string - -// parseTag splits a struct field's json tag into its name and -// comma-separated options. -func parseTag(tag string) (string, tagOptions) { - if idx := strings.Index(tag, ","); idx != -1 { - return tag[:idx], tagOptions(tag[idx+1:]) - } - return tag, tagOptions("") -} - -// Contains reports whether a comma-separated list of options -// contains a particular substr flag. substr must be surrounded by a -// string boundary or commas. -func (o tagOptions) Contains(optionName string) bool { - if len(o) == 0 { - return false - } - s := string(o) - for s != "" { - var next string - i := strings.Index(s, ",") - if i >= 0 { - s, next = s[:i], s[i+1:] - } - if s == optionName { - return true - } - s = next - } - return false -} - -func isValidTag(s string) bool { - if s == "" { - return false - } - for _, c := range s { - switch { - case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): - // Backslash and quote chars are reserved, but - // otherwise any punctuation chars are allowed - // in a tag name. - default: - if !unicode.IsLetter(c) && !unicode.IsDigit(c) { - return false - } - } - } - return true -} diff --git a/vendor/github.com/pquerna/ffjson/inception/template.go b/vendor/github.com/pquerna/ffjson/inception/template.go deleted file mode 100644 index 121a23dd8d..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/template.go +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Copyright 2014 Paul Querna - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package ffjsoninception - -import ( - "bytes" - "go/format" - "text/template" -) - -const ffjsonTemplate = ` -// Code generated by ffjson . DO NOT EDIT. -// source: {{.InputPath}} - -package {{.PackageName}} - -import ( -{{range $k, $v := .OutputImports}}{{$k}} -{{end}} -) - -{{range .OutputFuncs}} -{{.}} -{{end}} - -` - -func RenderTemplate(ic *Inception) ([]byte, error) { - t := template.Must(template.New("ffjson.go").Parse(ffjsonTemplate)) - buf := new(bytes.Buffer) - err := t.Execute(buf, ic) - if err != nil { - return nil, err - } - return format.Source(buf.Bytes()) -} - -func tplStr(t *template.Template, data interface{}) string { - buf := bytes.Buffer{} - err := t.Execute(&buf, data) - if err != nil { - panic(err) - } - return buf.String() -} diff --git a/vendor/github.com/pquerna/ffjson/inception/writerstack.go b/vendor/github.com/pquerna/ffjson/inception/writerstack.go deleted file mode 100644 index 1521961c94..0000000000 --- a/vendor/github.com/pquerna/ffjson/inception/writerstack.go +++ /dev/null @@ -1,65 +0,0 @@ -package ffjsoninception - -import "strings" - -// ConditionalWrite is a stack containing a number of pending writes -type ConditionalWrite struct { - Queued []string -} - -// Write will add a string to be written -func (w *ConditionalWrite) Write(s string) { - w.Queued = append(w.Queued, s) -} - -// DeleteLast will delete the last added write -func (w *ConditionalWrite) DeleteLast() { - if len(w.Queued) == 0 { - return - } - w.Queued = w.Queued[:len(w.Queued)-1] -} - -// Last will return the last added write -func (w *ConditionalWrite) Last() string { - if len(w.Queued) == 0 { - return "" - } - return w.Queued[len(w.Queued)-1] -} - -// Flush will return all queued writes, and return -// "" (empty string) in nothing has been queued -// "buf.WriteByte('" + byte + "')" + '\n' if one bute has been queued. -// "buf.WriteString(`" + string + "`)" + "\n" if more than one byte has been queued. -func (w *ConditionalWrite) Flush() string { - combined := strings.Join(w.Queued, "") - if len(combined) == 0 { - return "" - } - - w.Queued = nil - if len(combined) == 1 { - return "buf.WriteByte('" + combined + "')" + "\n" - } - return "buf.WriteString(`" + combined + "`)" + "\n" -} - -func (w *ConditionalWrite) FlushTo(out string) string { - out += w.Flush() - return out -} - -// WriteFlush will add a string and return the Flush result for the queue -func (w *ConditionalWrite) WriteFlush(s string) string { - w.Write(s) - return w.Flush() -} - -// GetQueued will return the current queued content without flushing. -func (w *ConditionalWrite) GetQueued() string { - t := w.Queued - s := w.Flush() - w.Queued = t - return s -} diff --git a/vendor/github.com/pquerna/ffjson/shared/options.go b/vendor/github.com/pquerna/ffjson/shared/options.go deleted file mode 100644 index d74edc1351..0000000000 --- a/vendor/github.com/pquerna/ffjson/shared/options.go +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright 2014 Paul Querna, Klaus Post - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package shared - -type StructOptions struct { - SkipDecoder bool - SkipEncoder bool -} - -type InceptionType struct { - Obj interface{} - Options StructOptions -} -type Feature int - -const ( - Nothing Feature = 0 - MustDecoder = 1 << 1 - MustEncoder = 1 << 2 - MustEncDec = MustDecoder | MustEncoder -) - -func (i InceptionType) HasFeature(f Feature) bool { - return i.HasFeature(f) -} - -func (s StructOptions) HasFeature(f Feature) bool { - hasNeeded := true - if f&MustDecoder != 0 && s.SkipDecoder { - hasNeeded = false - } - if f&MustEncoder != 0 && s.SkipEncoder { - hasNeeded = false - } - return hasNeeded -} diff --git a/vendor/modules.txt b/vendor/modules.txt index c2ed81c6d9..a48648c583 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -100,12 +100,6 @@ github.com/opencontainers/selinux/pkg/pwalk github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 github.com/pmezard/go-difflib/difflib -# github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 -## explicit -github.com/pquerna/ffjson/fflib/v1 -github.com/pquerna/ffjson/fflib/v1/internal -github.com/pquerna/ffjson/inception -github.com/pquerna/ffjson/shared # github.com/sirupsen/logrus v1.8.1 ## explicit github.com/sirupsen/logrus From 5ef1e9d68b2653e8cf616e226555af7b1ff525e0 Mon Sep 17 00:00:00 2001 From: Nalin Dahyabhai Date: Thu, 6 May 2021 11:58:22 -0400 Subject: [PATCH 2/2] Use json-iterator instead of encoding/json Signed-off-by: Nalin Dahyabhai --- containers.go | 1 - drivers/chown.go | 1 - drivers/devmapper/device_setup.go | 1 - drivers/devmapper/deviceset.go | 1 - drivers/devmapper/jsoniter.go | 5 + drivers/jsoniter.go | 5 + drivers/overlay/jsoniter.go | 5 + drivers/overlay/mount.go | 1 - drivers/windows/jsoniter_windows.go | 5 + drivers/windows/windows.go | 1 - go.mod | 1 + go.sum | 4 + images.go | 1 - jsoniter.go | 5 + layers.go | 1 - pkg/chrootarchive/archive_unix.go | 1 - pkg/chrootarchive/diff_unix.go | 1 - pkg/chrootarchive/jsoniter.go | 5 + store.go | 1 - .../github.com/json-iterator/go/.codecov.yml | 3 + vendor/github.com/json-iterator/go/.gitignore | 4 + .../github.com/json-iterator/go/.travis.yml | 14 + vendor/github.com/json-iterator/go/Gopkg.lock | 21 + vendor/github.com/json-iterator/go/Gopkg.toml | 26 + vendor/github.com/json-iterator/go/LICENSE | 21 + vendor/github.com/json-iterator/go/README.md | 87 ++ vendor/github.com/json-iterator/go/adapter.go | 150 +++ vendor/github.com/json-iterator/go/any.go | 325 +++++ .../github.com/json-iterator/go/any_array.go | 278 +++++ .../github.com/json-iterator/go/any_bool.go | 137 ++ .../github.com/json-iterator/go/any_float.go | 83 ++ .../github.com/json-iterator/go/any_int32.go | 74 ++ .../github.com/json-iterator/go/any_int64.go | 74 ++ .../json-iterator/go/any_invalid.go | 82 ++ vendor/github.com/json-iterator/go/any_nil.go | 69 ++ .../github.com/json-iterator/go/any_number.go | 123 ++ .../github.com/json-iterator/go/any_object.go | 374 ++++++ vendor/github.com/json-iterator/go/any_str.go | 166 +++ .../github.com/json-iterator/go/any_uint32.go | 74 ++ .../github.com/json-iterator/go/any_uint64.go | 74 ++ vendor/github.com/json-iterator/go/build.sh | 12 + vendor/github.com/json-iterator/go/config.go | 375 ++++++ .../go/fuzzy_mode_convert_table.md | 7 + vendor/github.com/json-iterator/go/go.mod | 11 + vendor/github.com/json-iterator/go/go.sum | 15 + vendor/github.com/json-iterator/go/iter.go | 349 ++++++ .../github.com/json-iterator/go/iter_array.go | 64 + .../github.com/json-iterator/go/iter_float.go | 342 +++++ .../github.com/json-iterator/go/iter_int.go | 346 ++++++ .../json-iterator/go/iter_object.go | 267 ++++ .../github.com/json-iterator/go/iter_skip.go | 130 ++ .../json-iterator/go/iter_skip_sloppy.go | 163 +++ .../json-iterator/go/iter_skip_strict.go | 99 ++ .../github.com/json-iterator/go/iter_str.go | 215 ++++ .../github.com/json-iterator/go/jsoniter.go | 18 + vendor/github.com/json-iterator/go/pool.go | 42 + vendor/github.com/json-iterator/go/reflect.go | 337 +++++ .../json-iterator/go/reflect_array.go | 104 ++ .../json-iterator/go/reflect_dynamic.go | 70 ++ .../json-iterator/go/reflect_extension.go | 483 ++++++++ .../json-iterator/go/reflect_json_number.go | 112 ++ .../go/reflect_json_raw_message.go | 76 ++ .../json-iterator/go/reflect_map.go | 346 ++++++ .../json-iterator/go/reflect_marshaler.go | 225 ++++ .../json-iterator/go/reflect_native.go | 453 +++++++ .../json-iterator/go/reflect_optional.go | 129 ++ .../json-iterator/go/reflect_slice.go | 99 ++ .../go/reflect_struct_decoder.go | 1097 +++++++++++++++++ .../go/reflect_struct_encoder.go | 211 ++++ vendor/github.com/json-iterator/go/stream.go | 210 ++++ .../json-iterator/go/stream_float.go | 111 ++ .../github.com/json-iterator/go/stream_int.go | 190 +++ .../github.com/json-iterator/go/stream_str.go | 372 ++++++ vendor/github.com/json-iterator/go/test.sh | 12 + .../modern-go/concurrent/.gitignore | 1 + .../modern-go/concurrent/.travis.yml | 14 + .../github.com/modern-go/concurrent/LICENSE | 201 +++ .../github.com/modern-go/concurrent/README.md | 49 + .../modern-go/concurrent/executor.go | 14 + .../modern-go/concurrent/go_above_19.go | 15 + .../modern-go/concurrent/go_below_19.go | 33 + vendor/github.com/modern-go/concurrent/log.go | 13 + .../github.com/modern-go/concurrent/test.sh | 12 + .../concurrent/unbounded_executor.go | 119 ++ .../github.com/modern-go/reflect2/.gitignore | 2 + .../github.com/modern-go/reflect2/.travis.yml | 15 + .../github.com/modern-go/reflect2/Gopkg.lock | 15 + .../github.com/modern-go/reflect2/Gopkg.toml | 35 + vendor/github.com/modern-go/reflect2/LICENSE | 201 +++ .../github.com/modern-go/reflect2/README.md | 71 ++ .../modern-go/reflect2/go_above_17.go | 8 + .../modern-go/reflect2/go_above_19.go | 14 + .../modern-go/reflect2/go_below_17.go | 9 + .../modern-go/reflect2/go_below_19.go | 14 + .../github.com/modern-go/reflect2/reflect2.go | 298 +++++ .../modern-go/reflect2/reflect2_amd64.s | 0 .../modern-go/reflect2/reflect2_kind.go | 30 + .../modern-go/reflect2/relfect2_386.s | 0 .../modern-go/reflect2/relfect2_amd64p32.s | 0 .../modern-go/reflect2/relfect2_arm.s | 0 .../modern-go/reflect2/relfect2_arm64.s | 0 .../modern-go/reflect2/relfect2_mips64x.s | 0 .../modern-go/reflect2/relfect2_mipsx.s | 0 .../modern-go/reflect2/relfect2_ppc64x.s | 0 .../modern-go/reflect2/relfect2_s390x.s | 0 .../modern-go/reflect2/safe_field.go | 58 + .../github.com/modern-go/reflect2/safe_map.go | 101 ++ .../modern-go/reflect2/safe_slice.go | 92 ++ .../modern-go/reflect2/safe_struct.go | 29 + .../modern-go/reflect2/safe_type.go | 78 ++ vendor/github.com/modern-go/reflect2/test.sh | 12 + .../github.com/modern-go/reflect2/type_map.go | 113 ++ .../modern-go/reflect2/unsafe_array.go | 65 + .../modern-go/reflect2/unsafe_eface.go | 59 + .../modern-go/reflect2/unsafe_field.go | 74 ++ .../modern-go/reflect2/unsafe_iface.go | 64 + .../modern-go/reflect2/unsafe_link.go | 70 ++ .../modern-go/reflect2/unsafe_map.go | 138 +++ .../modern-go/reflect2/unsafe_ptr.go | 46 + .../modern-go/reflect2/unsafe_slice.go | 177 +++ .../modern-go/reflect2/unsafe_struct.go | 59 + .../modern-go/reflect2/unsafe_type.go | 85 ++ vendor/modules.txt | 7 + 123 files changed, 11896 insertions(+), 11 deletions(-) create mode 100644 drivers/devmapper/jsoniter.go create mode 100644 drivers/jsoniter.go create mode 100644 drivers/overlay/jsoniter.go create mode 100644 drivers/windows/jsoniter_windows.go create mode 100644 jsoniter.go create mode 100644 pkg/chrootarchive/jsoniter.go create mode 100644 vendor/github.com/json-iterator/go/.codecov.yml create mode 100644 vendor/github.com/json-iterator/go/.gitignore create mode 100644 vendor/github.com/json-iterator/go/.travis.yml create mode 100644 vendor/github.com/json-iterator/go/Gopkg.lock create mode 100644 vendor/github.com/json-iterator/go/Gopkg.toml create mode 100644 vendor/github.com/json-iterator/go/LICENSE create mode 100644 vendor/github.com/json-iterator/go/README.md create mode 100644 vendor/github.com/json-iterator/go/adapter.go create mode 100644 vendor/github.com/json-iterator/go/any.go create mode 100644 vendor/github.com/json-iterator/go/any_array.go create mode 100644 vendor/github.com/json-iterator/go/any_bool.go create mode 100644 vendor/github.com/json-iterator/go/any_float.go create mode 100644 vendor/github.com/json-iterator/go/any_int32.go create mode 100644 vendor/github.com/json-iterator/go/any_int64.go create mode 100644 vendor/github.com/json-iterator/go/any_invalid.go create mode 100644 vendor/github.com/json-iterator/go/any_nil.go create mode 100644 vendor/github.com/json-iterator/go/any_number.go create mode 100644 vendor/github.com/json-iterator/go/any_object.go create mode 100644 vendor/github.com/json-iterator/go/any_str.go create mode 100644 vendor/github.com/json-iterator/go/any_uint32.go create mode 100644 vendor/github.com/json-iterator/go/any_uint64.go create mode 100644 vendor/github.com/json-iterator/go/build.sh create mode 100644 vendor/github.com/json-iterator/go/config.go create mode 100644 vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md create mode 100644 vendor/github.com/json-iterator/go/go.mod create mode 100644 vendor/github.com/json-iterator/go/go.sum create mode 100644 vendor/github.com/json-iterator/go/iter.go create mode 100644 vendor/github.com/json-iterator/go/iter_array.go create mode 100644 vendor/github.com/json-iterator/go/iter_float.go create mode 100644 vendor/github.com/json-iterator/go/iter_int.go create mode 100644 vendor/github.com/json-iterator/go/iter_object.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip_sloppy.go create mode 100644 vendor/github.com/json-iterator/go/iter_skip_strict.go create mode 100644 vendor/github.com/json-iterator/go/iter_str.go create mode 100644 vendor/github.com/json-iterator/go/jsoniter.go create mode 100644 vendor/github.com/json-iterator/go/pool.go create mode 100644 vendor/github.com/json-iterator/go/reflect.go create mode 100644 vendor/github.com/json-iterator/go/reflect_array.go create mode 100644 vendor/github.com/json-iterator/go/reflect_dynamic.go create mode 100644 vendor/github.com/json-iterator/go/reflect_extension.go create mode 100644 vendor/github.com/json-iterator/go/reflect_json_number.go create mode 100644 vendor/github.com/json-iterator/go/reflect_json_raw_message.go create mode 100644 vendor/github.com/json-iterator/go/reflect_map.go create mode 100644 vendor/github.com/json-iterator/go/reflect_marshaler.go create mode 100644 vendor/github.com/json-iterator/go/reflect_native.go create mode 100644 vendor/github.com/json-iterator/go/reflect_optional.go create mode 100644 vendor/github.com/json-iterator/go/reflect_slice.go create mode 100644 vendor/github.com/json-iterator/go/reflect_struct_decoder.go create mode 100644 vendor/github.com/json-iterator/go/reflect_struct_encoder.go create mode 100644 vendor/github.com/json-iterator/go/stream.go create mode 100644 vendor/github.com/json-iterator/go/stream_float.go create mode 100644 vendor/github.com/json-iterator/go/stream_int.go create mode 100644 vendor/github.com/json-iterator/go/stream_str.go create mode 100644 vendor/github.com/json-iterator/go/test.sh create mode 100644 vendor/github.com/modern-go/concurrent/.gitignore create mode 100644 vendor/github.com/modern-go/concurrent/.travis.yml create mode 100644 vendor/github.com/modern-go/concurrent/LICENSE create mode 100644 vendor/github.com/modern-go/concurrent/README.md create mode 100644 vendor/github.com/modern-go/concurrent/executor.go create mode 100644 vendor/github.com/modern-go/concurrent/go_above_19.go create mode 100644 vendor/github.com/modern-go/concurrent/go_below_19.go create mode 100644 vendor/github.com/modern-go/concurrent/log.go create mode 100644 vendor/github.com/modern-go/concurrent/test.sh create mode 100644 vendor/github.com/modern-go/concurrent/unbounded_executor.go create mode 100644 vendor/github.com/modern-go/reflect2/.gitignore create mode 100644 vendor/github.com/modern-go/reflect2/.travis.yml create mode 100644 vendor/github.com/modern-go/reflect2/Gopkg.lock create mode 100644 vendor/github.com/modern-go/reflect2/Gopkg.toml create mode 100644 vendor/github.com/modern-go/reflect2/LICENSE create mode 100644 vendor/github.com/modern-go/reflect2/README.md create mode 100644 vendor/github.com/modern-go/reflect2/go_above_17.go create mode 100644 vendor/github.com/modern-go/reflect2/go_above_19.go create mode 100644 vendor/github.com/modern-go/reflect2/go_below_17.go create mode 100644 vendor/github.com/modern-go/reflect2/go_below_19.go create mode 100644 vendor/github.com/modern-go/reflect2/reflect2.go create mode 100644 vendor/github.com/modern-go/reflect2/reflect2_amd64.s create mode 100644 vendor/github.com/modern-go/reflect2/reflect2_kind.go create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_386.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_amd64p32.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_arm.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_arm64.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_mips64x.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_mipsx.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_ppc64x.s create mode 100644 vendor/github.com/modern-go/reflect2/relfect2_s390x.s create mode 100644 vendor/github.com/modern-go/reflect2/safe_field.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_map.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_slice.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_struct.go create mode 100644 vendor/github.com/modern-go/reflect2/safe_type.go create mode 100644 vendor/github.com/modern-go/reflect2/test.sh create mode 100644 vendor/github.com/modern-go/reflect2/type_map.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_array.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_eface.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_field.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_iface.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_link.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_map.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_ptr.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_slice.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_struct.go create mode 100644 vendor/github.com/modern-go/reflect2/unsafe_type.go diff --git a/containers.go b/containers.go index 513f96c2c3..90b58dedab 100644 --- a/containers.go +++ b/containers.go @@ -1,7 +1,6 @@ package storage import ( - "encoding/json" "fmt" "io/ioutil" "os" diff --git a/drivers/chown.go b/drivers/chown.go index 7604a86db1..63bfd2d136 100644 --- a/drivers/chown.go +++ b/drivers/chown.go @@ -2,7 +2,6 @@ package graphdriver import ( "bytes" - "encoding/json" "fmt" "os" diff --git a/drivers/devmapper/device_setup.go b/drivers/devmapper/device_setup.go index 5bfbb49e2a..c23097b763 100644 --- a/drivers/devmapper/device_setup.go +++ b/drivers/devmapper/device_setup.go @@ -5,7 +5,6 @@ package devmapper import ( "bufio" "bytes" - "encoding/json" "fmt" "io/ioutil" "os" diff --git a/drivers/devmapper/deviceset.go b/drivers/devmapper/deviceset.go index 775dc1685d..3b74bcf4d3 100644 --- a/drivers/devmapper/deviceset.go +++ b/drivers/devmapper/deviceset.go @@ -4,7 +4,6 @@ package devmapper import ( "bufio" - "encoding/json" "fmt" "io" "io/ioutil" diff --git a/drivers/devmapper/jsoniter.go b/drivers/devmapper/jsoniter.go new file mode 100644 index 0000000000..54db6ab4ae --- /dev/null +++ b/drivers/devmapper/jsoniter.go @@ -0,0 +1,5 @@ +package devmapper + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/drivers/jsoniter.go b/drivers/jsoniter.go new file mode 100644 index 0000000000..097f923ab5 --- /dev/null +++ b/drivers/jsoniter.go @@ -0,0 +1,5 @@ +package graphdriver + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/drivers/overlay/jsoniter.go b/drivers/overlay/jsoniter.go new file mode 100644 index 0000000000..2a1e9d0cc1 --- /dev/null +++ b/drivers/overlay/jsoniter.go @@ -0,0 +1,5 @@ +package overlay + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/drivers/overlay/mount.go b/drivers/overlay/mount.go index feb0395924..7c8fd50a3a 100644 --- a/drivers/overlay/mount.go +++ b/drivers/overlay/mount.go @@ -4,7 +4,6 @@ package overlay import ( "bytes" - "encoding/json" "flag" "fmt" "os" diff --git a/drivers/windows/jsoniter_windows.go b/drivers/windows/jsoniter_windows.go new file mode 100644 index 0000000000..fa14126301 --- /dev/null +++ b/drivers/windows/jsoniter_windows.go @@ -0,0 +1,5 @@ +package windows + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/drivers/windows/windows.go b/drivers/windows/windows.go index c8a3408015..1491517413 100644 --- a/drivers/windows/windows.go +++ b/drivers/windows/windows.go @@ -6,7 +6,6 @@ import ( "archive/tar" "bufio" "bytes" - "encoding/json" "errors" "fmt" "io" diff --git a/go.mod b/go.mod index 1600a03636..08a54d60d1 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/docker/go-units v0.4.0 github.com/google/go-intervals v0.0.2 github.com/hashicorp/go-multierror v1.1.1 + github.com/json-iterator/go v1.1.11 github.com/klauspost/compress v1.12.2 github.com/klauspost/pgzip v1.2.5 github.com/mattn/go-shellwords v1.0.11 diff --git a/go.sum b/go.sum index 825ab3ff85..dc2cbc338b 100644 --- a/go.sum +++ b/go.sum @@ -330,6 +330,8 @@ github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22 github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -377,8 +379,10 @@ github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2J github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= diff --git a/images.go b/images.go index 2808f579f2..b3a2821b6a 100644 --- a/images.go +++ b/images.go @@ -1,7 +1,6 @@ package storage import ( - "encoding/json" "io/ioutil" "os" "path/filepath" diff --git a/jsoniter.go b/jsoniter.go new file mode 100644 index 0000000000..7dd6388d7f --- /dev/null +++ b/jsoniter.go @@ -0,0 +1,5 @@ +package storage + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/layers.go b/layers.go index d398a3ff94..b0cea50d0e 100644 --- a/layers.go +++ b/layers.go @@ -2,7 +2,6 @@ package storage import ( "bytes" - "encoding/json" "fmt" "io" "io/ioutil" diff --git a/pkg/chrootarchive/archive_unix.go b/pkg/chrootarchive/archive_unix.go index 630826db1e..9da10fe33c 100644 --- a/pkg/chrootarchive/archive_unix.go +++ b/pkg/chrootarchive/archive_unix.go @@ -4,7 +4,6 @@ package chrootarchive import ( "bytes" - "encoding/json" "flag" "fmt" "io" diff --git a/pkg/chrootarchive/diff_unix.go b/pkg/chrootarchive/diff_unix.go index 4369f30c99..c884d3784b 100644 --- a/pkg/chrootarchive/diff_unix.go +++ b/pkg/chrootarchive/diff_unix.go @@ -4,7 +4,6 @@ package chrootarchive import ( "bytes" - "encoding/json" "flag" "fmt" "io" diff --git a/pkg/chrootarchive/jsoniter.go b/pkg/chrootarchive/jsoniter.go new file mode 100644 index 0000000000..63f9704564 --- /dev/null +++ b/pkg/chrootarchive/jsoniter.go @@ -0,0 +1,5 @@ +package chrootarchive + +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary diff --git a/store.go b/store.go index 25767b7a2f..b449d000cc 100644 --- a/store.go +++ b/store.go @@ -2,7 +2,6 @@ package storage import ( "encoding/base64" - "encoding/json" "fmt" "io" "io/ioutil" diff --git a/vendor/github.com/json-iterator/go/.codecov.yml b/vendor/github.com/json-iterator/go/.codecov.yml new file mode 100644 index 0000000000..955dc0be5f --- /dev/null +++ b/vendor/github.com/json-iterator/go/.codecov.yml @@ -0,0 +1,3 @@ +ignore: + - "output_tests/.*" + diff --git a/vendor/github.com/json-iterator/go/.gitignore b/vendor/github.com/json-iterator/go/.gitignore new file mode 100644 index 0000000000..15556530a8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/.gitignore @@ -0,0 +1,4 @@ +/vendor +/bug_test.go +/coverage.txt +/.idea diff --git a/vendor/github.com/json-iterator/go/.travis.yml b/vendor/github.com/json-iterator/go/.travis.yml new file mode 100644 index 0000000000..449e67cd01 --- /dev/null +++ b/vendor/github.com/json-iterator/go/.travis.yml @@ -0,0 +1,14 @@ +language: go + +go: + - 1.8.x + - 1.x + +before_install: + - go get -t -v ./... + +script: + - ./test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock new file mode 100644 index 0000000000..c8a9fbb387 --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.lock @@ -0,0 +1,21 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/modern-go/concurrent" + packages = ["."] + revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a" + version = "1.0.0" + +[[projects]] + name = "github.com/modern-go/reflect2" + packages = ["."] + revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd" + version = "1.0.1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml new file mode 100644 index 0000000000..313a0f887b --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.toml @@ -0,0 +1,26 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + +ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"] + +[[constraint]] + name = "github.com/modern-go/reflect2" + version = "1.0.1" diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE new file mode 100644 index 0000000000..2cf4f5ab28 --- /dev/null +++ b/vendor/github.com/json-iterator/go/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 json-iterator + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md new file mode 100644 index 0000000000..52b111d5f3 --- /dev/null +++ b/vendor/github.com/json-iterator/go/README.md @@ -0,0 +1,87 @@ +[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge) +[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go) +[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go) +[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go) +[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go) +[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE) +[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) + +A high-performance 100% compatible drop-in replacement of "encoding/json" + +You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go) + +# Benchmark + +![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png) + +Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go + +Raw Result (easyjson requires static code generation) + +| | ns/op | allocation bytes | allocation times | +| --------------- | ----------- | ---------------- | ---------------- | +| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op | +| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op | +| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op | +| std encode | 2213 ns/op | 712 B/op | 5 allocs/op | +| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op | +| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op | + +Always benchmark with your own workload. +The result depends heavily on the data input. + +# Usage + +100% compatibility with standard lib + +Replace + +```go +import "encoding/json" +json.Marshal(&data) +``` + +with + +```go +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Marshal(&data) +``` + +Replace + +```go +import "encoding/json" +json.Unmarshal(input, &data) +``` + +with + +```go +import jsoniter "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Unmarshal(input, &data) +``` + +[More documentation](http://jsoniter.com/migrate-from-go-std.html) + +# How to get + +``` +go get github.com/json-iterator/go +``` + +# Contribution Welcomed ! + +Contributors + +- [thockin](https://github.com/thockin) +- [mattn](https://github.com/mattn) +- [cch123](https://github.com/cch123) +- [Oleg Shaldybin](https://github.com/olegshaldybin) +- [Jason Toffaletti](https://github.com/toffaletti) + +Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) diff --git a/vendor/github.com/json-iterator/go/adapter.go b/vendor/github.com/json-iterator/go/adapter.go new file mode 100644 index 0000000000..92d2cc4a3d --- /dev/null +++ b/vendor/github.com/json-iterator/go/adapter.go @@ -0,0 +1,150 @@ +package jsoniter + +import ( + "bytes" + "io" +) + +// RawMessage to make replace json with jsoniter +type RawMessage []byte + +// Unmarshal adapts to json/encoding Unmarshal API +// +// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v. +// Refer to https://godoc.org/encoding/json#Unmarshal for more information +func Unmarshal(data []byte, v interface{}) error { + return ConfigDefault.Unmarshal(data, v) +} + +// UnmarshalFromString is a convenient method to read from string instead of []byte +func UnmarshalFromString(str string, v interface{}) error { + return ConfigDefault.UnmarshalFromString(str, v) +} + +// Get quick method to get value from deeply nested JSON structure +func Get(data []byte, path ...interface{}) Any { + return ConfigDefault.Get(data, path...) +} + +// Marshal adapts to json/encoding Marshal API +// +// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API +// Refer to https://godoc.org/encoding/json#Marshal for more information +func Marshal(v interface{}) ([]byte, error) { + return ConfigDefault.Marshal(v) +} + +// MarshalIndent same as json.MarshalIndent. Prefix is not supported. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + +// MarshalToString convenient method to write as string instead of []byte +func MarshalToString(v interface{}) (string, error) { + return ConfigDefault.MarshalToString(v) +} + +// NewDecoder adapts to json/stream NewDecoder API. +// +// NewDecoder returns a new decoder that reads from r. +// +// Instead of a json/encoding Decoder, an Decoder is returned +// Refer to https://godoc.org/encoding/json#NewDecoder for more information +func NewDecoder(reader io.Reader) *Decoder { + return ConfigDefault.NewDecoder(reader) +} + +// Decoder reads and decodes JSON values from an input stream. +// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress) +type Decoder struct { + iter *Iterator +} + +// Decode decode JSON into interface{} +func (adapter *Decoder) Decode(obj interface{}) error { + if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil { + if !adapter.iter.loadMore() { + return io.EOF + } + } + adapter.iter.ReadVal(obj) + err := adapter.iter.Error + if err == io.EOF { + return nil + } + return adapter.iter.Error +} + +// More is there more? +func (adapter *Decoder) More() bool { + iter := adapter.iter + if iter.Error != nil { + return false + } + c := iter.nextToken() + if c == 0 { + return false + } + iter.unreadByte() + return c != ']' && c != '}' +} + +// Buffered remaining buffer +func (adapter *Decoder) Buffered() io.Reader { + remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail] + return bytes.NewReader(remaining) +} + +// UseNumber causes the Decoder to unmarshal a number into an interface{} as a +// Number instead of as a float64. +func (adapter *Decoder) UseNumber() { + cfg := adapter.iter.cfg.configBeforeFrozen + cfg.UseNumber = true + adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions) +} + +// DisallowUnknownFields causes the Decoder to return an error when the destination +// is a struct and the input contains object keys which do not match any +// non-ignored, exported fields in the destination. +func (adapter *Decoder) DisallowUnknownFields() { + cfg := adapter.iter.cfg.configBeforeFrozen + cfg.DisallowUnknownFields = true + adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions) +} + +// NewEncoder same as json.NewEncoder +func NewEncoder(writer io.Writer) *Encoder { + return ConfigDefault.NewEncoder(writer) +} + +// Encoder same as json.Encoder +type Encoder struct { + stream *Stream +} + +// Encode encode interface{} as JSON to io.Writer +func (adapter *Encoder) Encode(val interface{}) error { + adapter.stream.WriteVal(val) + adapter.stream.WriteRaw("\n") + adapter.stream.Flush() + return adapter.stream.Error +} + +// SetIndent set the indention. Prefix is not supported +func (adapter *Encoder) SetIndent(prefix, indent string) { + config := adapter.stream.cfg.configBeforeFrozen + config.IndentionStep = len(indent) + adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions) +} + +// SetEscapeHTML escape html by default, set to false to disable +func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) { + config := adapter.stream.cfg.configBeforeFrozen + config.EscapeHTML = escapeHTML + adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions) +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + return ConfigDefault.Valid(data) +} diff --git a/vendor/github.com/json-iterator/go/any.go b/vendor/github.com/json-iterator/go/any.go new file mode 100644 index 0000000000..f6b8aeab0a --- /dev/null +++ b/vendor/github.com/json-iterator/go/any.go @@ -0,0 +1,325 @@ +package jsoniter + +import ( + "errors" + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "strconv" + "unsafe" +) + +// Any generic object representation. +// The lazy json implementation holds []byte and parse lazily. +type Any interface { + LastError() error + ValueType() ValueType + MustBeValid() Any + ToBool() bool + ToInt() int + ToInt32() int32 + ToInt64() int64 + ToUint() uint + ToUint32() uint32 + ToUint64() uint64 + ToFloat32() float32 + ToFloat64() float64 + ToString() string + ToVal(val interface{}) + Get(path ...interface{}) Any + Size() int + Keys() []string + GetInterface() interface{} + WriteTo(stream *Stream) +} + +type baseAny struct{} + +func (any *baseAny) Get(path ...interface{}) Any { + return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)} +} + +func (any *baseAny) Size() int { + return 0 +} + +func (any *baseAny) Keys() []string { + return []string{} +} + +func (any *baseAny) ToVal(obj interface{}) { + panic("not implemented") +} + +// WrapInt32 turn int32 into Any interface +func WrapInt32(val int32) Any { + return &int32Any{baseAny{}, val} +} + +// WrapInt64 turn int64 into Any interface +func WrapInt64(val int64) Any { + return &int64Any{baseAny{}, val} +} + +// WrapUint32 turn uint32 into Any interface +func WrapUint32(val uint32) Any { + return &uint32Any{baseAny{}, val} +} + +// WrapUint64 turn uint64 into Any interface +func WrapUint64(val uint64) Any { + return &uint64Any{baseAny{}, val} +} + +// WrapFloat64 turn float64 into Any interface +func WrapFloat64(val float64) Any { + return &floatAny{baseAny{}, val} +} + +// WrapString turn string into Any interface +func WrapString(val string) Any { + return &stringAny{baseAny{}, val} +} + +// Wrap turn a go object into Any interface +func Wrap(val interface{}) Any { + if val == nil { + return &nilAny{} + } + asAny, isAny := val.(Any) + if isAny { + return asAny + } + typ := reflect2.TypeOf(val) + switch typ.Kind() { + case reflect.Slice: + return wrapArray(val) + case reflect.Struct: + return wrapStruct(val) + case reflect.Map: + return wrapMap(val) + case reflect.String: + return WrapString(val.(string)) + case reflect.Int: + if strconv.IntSize == 32 { + return WrapInt32(int32(val.(int))) + } + return WrapInt64(int64(val.(int))) + case reflect.Int8: + return WrapInt32(int32(val.(int8))) + case reflect.Int16: + return WrapInt32(int32(val.(int16))) + case reflect.Int32: + return WrapInt32(val.(int32)) + case reflect.Int64: + return WrapInt64(val.(int64)) + case reflect.Uint: + if strconv.IntSize == 32 { + return WrapUint32(uint32(val.(uint))) + } + return WrapUint64(uint64(val.(uint))) + case reflect.Uintptr: + if ptrSize == 32 { + return WrapUint32(uint32(val.(uintptr))) + } + return WrapUint64(uint64(val.(uintptr))) + case reflect.Uint8: + return WrapUint32(uint32(val.(uint8))) + case reflect.Uint16: + return WrapUint32(uint32(val.(uint16))) + case reflect.Uint32: + return WrapUint32(uint32(val.(uint32))) + case reflect.Uint64: + return WrapUint64(val.(uint64)) + case reflect.Float32: + return WrapFloat64(float64(val.(float32))) + case reflect.Float64: + return WrapFloat64(val.(float64)) + case reflect.Bool: + if val.(bool) == true { + return &trueAny{} + } + return &falseAny{} + } + return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)} +} + +// ReadAny read next JSON element as an Any object. It is a better json.RawMessage. +func (iter *Iterator) ReadAny() Any { + return iter.readAny() +} + +func (iter *Iterator) readAny() Any { + c := iter.nextToken() + switch c { + case '"': + iter.unreadByte() + return &stringAny{baseAny{}, iter.ReadString()} + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + return &nilAny{} + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + return &trueAny{} + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + return &falseAny{} + case '{': + return iter.readObjectAny() + case '[': + return iter.readArrayAny() + case '-': + return iter.readNumberAny(false) + case 0: + return &invalidAny{baseAny{}, errors.New("input is empty")} + default: + return iter.readNumberAny(true) + } +} + +func (iter *Iterator) readNumberAny(positive bool) Any { + iter.startCapture(iter.head - 1) + iter.skipNumber() + lazyBuf := iter.stopCapture() + return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readObjectAny() Any { + iter.startCapture(iter.head - 1) + iter.skipObject() + lazyBuf := iter.stopCapture() + return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readArrayAny() Any { + iter.startCapture(iter.head - 1) + iter.skipArray() + lazyBuf := iter.stopCapture() + return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func locateObjectField(iter *Iterator, target string) []byte { + var found []byte + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + if field == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + return true + }) + return found +} + +func locateArrayElement(iter *Iterator, target int) []byte { + var found []byte + n := 0 + iter.ReadArrayCB(func(iter *Iterator) bool { + if n == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + n++ + return true + }) + return found +} + +func locatePath(iter *Iterator, path []interface{}) Any { + for i, pathKeyObj := range path { + switch pathKey := pathKeyObj.(type) { + case string: + valueBytes := locateObjectField(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int: + valueBytes := locateArrayElement(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int32: + if '*' == pathKey { + return iter.readAny().Get(path[i:]...) + } + return newInvalidAny(path[i:]) + default: + return newInvalidAny(path[i:]) + } + } + if iter.Error != nil && iter.Error != io.EOF { + return &invalidAny{baseAny{}, iter.Error} + } + return iter.readAny() +} + +var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem() + +func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ == anyType { + return &directAnyCodec{} + } + if typ.Implements(anyType) { + return &anyCodec{ + valType: typ, + } + } + return nil +} + +func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == anyType { + return &directAnyCodec{} + } + if typ.Implements(anyType) { + return &anyCodec{ + valType: typ, + } + } + return nil +} + +type anyCodec struct { + valType reflect2.Type +} + +func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + panic("not implemented") +} + +func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := codec.valType.UnsafeIndirect(ptr) + any := obj.(Any) + any.WriteTo(stream) +} + +func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool { + obj := codec.valType.UnsafeIndirect(ptr) + any := obj.(Any) + return any.Size() == 0 +} + +type directAnyCodec struct { +} + +func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *(*Any)(ptr) = iter.readAny() +} + +func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + any := *(*Any)(ptr) + if any == nil { + stream.WriteNil() + return + } + any.WriteTo(stream) +} + +func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool { + any := *(*Any)(ptr) + return any.Size() == 0 +} diff --git a/vendor/github.com/json-iterator/go/any_array.go b/vendor/github.com/json-iterator/go/any_array.go new file mode 100644 index 0000000000..0449e9aa42 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_array.go @@ -0,0 +1,278 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type arrayLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *arrayLazyAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayLazyAny) MustBeValid() Any { + return any +} + +func (any *arrayLazyAny) LastError() error { + return any.err +} + +func (any *arrayLazyAny) ToBool() bool { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.ReadArray() +} + +func (any *arrayLazyAny) ToInt() int { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt32() int32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt64() int64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint() uint { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint32() uint32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint64() uint64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat32() float32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat64() float64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *arrayLazyAny) ToVal(val interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(val) +} + +func (any *arrayLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateArrayElement(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + arr := make([]Any, 0) + iter.ReadArrayCB(func(iter *Iterator) bool { + found := iter.readAny().Get(path[1:]...) + if found.ValueType() != InvalidValue { + arr = append(arr, found) + } + return true + }) + return wrapArray(arr) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadArrayCB(func(iter *Iterator) bool { + size++ + iter.Skip() + return true + }) + return size +} + +func (any *arrayLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *arrayLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type arrayAny struct { + baseAny + val reflect.Value +} + +func wrapArray(val interface{}) *arrayAny { + return &arrayAny{baseAny{}, reflect.ValueOf(val)} +} + +func (any *arrayAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayAny) MustBeValid() Any { + return any +} + +func (any *arrayAny) LastError() error { + return nil +} + +func (any *arrayAny) ToBool() bool { + return any.val.Len() != 0 +} + +func (any *arrayAny) ToInt() int { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt32() int32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt64() int64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint() uint { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint32() uint32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint64() uint64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat32() float32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat64() float64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToString() string { + str, _ := MarshalToString(any.val.Interface()) + return str +} + +func (any *arrayAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + if firstPath < 0 || firstPath >= any.val.Len() { + return newInvalidAny(path) + } + return Wrap(any.val.Index(firstPath).Interface()) + case int32: + if '*' == firstPath { + mappedAll := make([]Any, 0) + for i := 0; i < any.val.Len(); i++ { + mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll = append(mappedAll, mapped) + } + } + return wrapArray(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayAny) Size() int { + return any.val.Len() +} + +func (any *arrayAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *arrayAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/any_bool.go b/vendor/github.com/json-iterator/go/any_bool.go new file mode 100644 index 0000000000..9452324af5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_bool.go @@ -0,0 +1,137 @@ +package jsoniter + +type trueAny struct { + baseAny +} + +func (any *trueAny) LastError() error { + return nil +} + +func (any *trueAny) ToBool() bool { + return true +} + +func (any *trueAny) ToInt() int { + return 1 +} + +func (any *trueAny) ToInt32() int32 { + return 1 +} + +func (any *trueAny) ToInt64() int64 { + return 1 +} + +func (any *trueAny) ToUint() uint { + return 1 +} + +func (any *trueAny) ToUint32() uint32 { + return 1 +} + +func (any *trueAny) ToUint64() uint64 { + return 1 +} + +func (any *trueAny) ToFloat32() float32 { + return 1 +} + +func (any *trueAny) ToFloat64() float64 { + return 1 +} + +func (any *trueAny) ToString() string { + return "true" +} + +func (any *trueAny) WriteTo(stream *Stream) { + stream.WriteTrue() +} + +func (any *trueAny) Parse() *Iterator { + return nil +} + +func (any *trueAny) GetInterface() interface{} { + return true +} + +func (any *trueAny) ValueType() ValueType { + return BoolValue +} + +func (any *trueAny) MustBeValid() Any { + return any +} + +type falseAny struct { + baseAny +} + +func (any *falseAny) LastError() error { + return nil +} + +func (any *falseAny) ToBool() bool { + return false +} + +func (any *falseAny) ToInt() int { + return 0 +} + +func (any *falseAny) ToInt32() int32 { + return 0 +} + +func (any *falseAny) ToInt64() int64 { + return 0 +} + +func (any *falseAny) ToUint() uint { + return 0 +} + +func (any *falseAny) ToUint32() uint32 { + return 0 +} + +func (any *falseAny) ToUint64() uint64 { + return 0 +} + +func (any *falseAny) ToFloat32() float32 { + return 0 +} + +func (any *falseAny) ToFloat64() float64 { + return 0 +} + +func (any *falseAny) ToString() string { + return "false" +} + +func (any *falseAny) WriteTo(stream *Stream) { + stream.WriteFalse() +} + +func (any *falseAny) Parse() *Iterator { + return nil +} + +func (any *falseAny) GetInterface() interface{} { + return false +} + +func (any *falseAny) ValueType() ValueType { + return BoolValue +} + +func (any *falseAny) MustBeValid() Any { + return any +} diff --git a/vendor/github.com/json-iterator/go/any_float.go b/vendor/github.com/json-iterator/go/any_float.go new file mode 100644 index 0000000000..35fdb09497 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_float.go @@ -0,0 +1,83 @@ +package jsoniter + +import ( + "strconv" +) + +type floatAny struct { + baseAny + val float64 +} + +func (any *floatAny) Parse() *Iterator { + return nil +} + +func (any *floatAny) ValueType() ValueType { + return NumberValue +} + +func (any *floatAny) MustBeValid() Any { + return any +} + +func (any *floatAny) LastError() error { + return nil +} + +func (any *floatAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *floatAny) ToInt() int { + return int(any.val) +} + +func (any *floatAny) ToInt32() int32 { + return int32(any.val) +} + +func (any *floatAny) ToInt64() int64 { + return int64(any.val) +} + +func (any *floatAny) ToUint() uint { + if any.val > 0 { + return uint(any.val) + } + return 0 +} + +func (any *floatAny) ToUint32() uint32 { + if any.val > 0 { + return uint32(any.val) + } + return 0 +} + +func (any *floatAny) ToUint64() uint64 { + if any.val > 0 { + return uint64(any.val) + } + return 0 +} + +func (any *floatAny) ToFloat32() float32 { + return float32(any.val) +} + +func (any *floatAny) ToFloat64() float64 { + return any.val +} + +func (any *floatAny) ToString() string { + return strconv.FormatFloat(any.val, 'E', -1, 64) +} + +func (any *floatAny) WriteTo(stream *Stream) { + stream.WriteFloat64(any.val) +} + +func (any *floatAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_int32.go b/vendor/github.com/json-iterator/go/any_int32.go new file mode 100644 index 0000000000..1b56f39915 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_int32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int32Any struct { + baseAny + val int32 +} + +func (any *int32Any) LastError() error { + return nil +} + +func (any *int32Any) ValueType() ValueType { + return NumberValue +} + +func (any *int32Any) MustBeValid() Any { + return any +} + +func (any *int32Any) ToBool() bool { + return any.val != 0 +} + +func (any *int32Any) ToInt() int { + return int(any.val) +} + +func (any *int32Any) ToInt32() int32 { + return any.val +} + +func (any *int32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *int32Any) ToUint() uint { + return uint(any.val) +} + +func (any *int32Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *int32Any) WriteTo(stream *Stream) { + stream.WriteInt32(any.val) +} + +func (any *int32Any) Parse() *Iterator { + return nil +} + +func (any *int32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_int64.go b/vendor/github.com/json-iterator/go/any_int64.go new file mode 100644 index 0000000000..c440d72b6d --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_int64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int64Any struct { + baseAny + val int64 +} + +func (any *int64Any) LastError() error { + return nil +} + +func (any *int64Any) ValueType() ValueType { + return NumberValue +} + +func (any *int64Any) MustBeValid() Any { + return any +} + +func (any *int64Any) ToBool() bool { + return any.val != 0 +} + +func (any *int64Any) ToInt() int { + return int(any.val) +} + +func (any *int64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *int64Any) ToInt64() int64 { + return any.val +} + +func (any *int64Any) ToUint() uint { + return uint(any.val) +} + +func (any *int64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int64Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int64Any) ToString() string { + return strconv.FormatInt(any.val, 10) +} + +func (any *int64Any) WriteTo(stream *Stream) { + stream.WriteInt64(any.val) +} + +func (any *int64Any) Parse() *Iterator { + return nil +} + +func (any *int64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_invalid.go b/vendor/github.com/json-iterator/go/any_invalid.go new file mode 100644 index 0000000000..1d859eac32 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_invalid.go @@ -0,0 +1,82 @@ +package jsoniter + +import "fmt" + +type invalidAny struct { + baseAny + err error +} + +func newInvalidAny(path []interface{}) *invalidAny { + return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)} +} + +func (any *invalidAny) LastError() error { + return any.err +} + +func (any *invalidAny) ValueType() ValueType { + return InvalidValue +} + +func (any *invalidAny) MustBeValid() Any { + panic(any.err) +} + +func (any *invalidAny) ToBool() bool { + return false +} + +func (any *invalidAny) ToInt() int { + return 0 +} + +func (any *invalidAny) ToInt32() int32 { + return 0 +} + +func (any *invalidAny) ToInt64() int64 { + return 0 +} + +func (any *invalidAny) ToUint() uint { + return 0 +} + +func (any *invalidAny) ToUint32() uint32 { + return 0 +} + +func (any *invalidAny) ToUint64() uint64 { + return 0 +} + +func (any *invalidAny) ToFloat32() float32 { + return 0 +} + +func (any *invalidAny) ToFloat64() float64 { + return 0 +} + +func (any *invalidAny) ToString() string { + return "" +} + +func (any *invalidAny) WriteTo(stream *Stream) { +} + +func (any *invalidAny) Get(path ...interface{}) Any { + if any.err == nil { + return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)} + } + return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)} +} + +func (any *invalidAny) Parse() *Iterator { + return nil +} + +func (any *invalidAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/any_nil.go b/vendor/github.com/json-iterator/go/any_nil.go new file mode 100644 index 0000000000..d04cb54c11 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_nil.go @@ -0,0 +1,69 @@ +package jsoniter + +type nilAny struct { + baseAny +} + +func (any *nilAny) LastError() error { + return nil +} + +func (any *nilAny) ValueType() ValueType { + return NilValue +} + +func (any *nilAny) MustBeValid() Any { + return any +} + +func (any *nilAny) ToBool() bool { + return false +} + +func (any *nilAny) ToInt() int { + return 0 +} + +func (any *nilAny) ToInt32() int32 { + return 0 +} + +func (any *nilAny) ToInt64() int64 { + return 0 +} + +func (any *nilAny) ToUint() uint { + return 0 +} + +func (any *nilAny) ToUint32() uint32 { + return 0 +} + +func (any *nilAny) ToUint64() uint64 { + return 0 +} + +func (any *nilAny) ToFloat32() float32 { + return 0 +} + +func (any *nilAny) ToFloat64() float64 { + return 0 +} + +func (any *nilAny) ToString() string { + return "" +} + +func (any *nilAny) WriteTo(stream *Stream) { + stream.WriteNil() +} + +func (any *nilAny) Parse() *Iterator { + return nil +} + +func (any *nilAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/any_number.go b/vendor/github.com/json-iterator/go/any_number.go new file mode 100644 index 0000000000..9d1e901a66 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_number.go @@ -0,0 +1,123 @@ +package jsoniter + +import ( + "io" + "unsafe" +) + +type numberLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *numberLazyAny) ValueType() ValueType { + return NumberValue +} + +func (any *numberLazyAny) MustBeValid() Any { + return any +} + +func (any *numberLazyAny) LastError() error { + return any.err +} + +func (any *numberLazyAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *numberLazyAny) ToInt() int { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToInt32() int32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToInt64() int64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint() uint { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint32() uint32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToUint64() uint64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToFloat32() float32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat32() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToFloat64() float64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat64() + if iter.Error != nil && iter.Error != io.EOF { + any.err = iter.Error + } + return val +} + +func (any *numberLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *numberLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *numberLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} diff --git a/vendor/github.com/json-iterator/go/any_object.go b/vendor/github.com/json-iterator/go/any_object.go new file mode 100644 index 0000000000..c44ef5c989 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_object.go @@ -0,0 +1,374 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type objectLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *objectLazyAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectLazyAny) MustBeValid() Any { + return any +} + +func (any *objectLazyAny) LastError() error { + return any.err +} + +func (any *objectLazyAny) ToBool() bool { + return true +} + +func (any *objectLazyAny) ToInt() int { + return 0 +} + +func (any *objectLazyAny) ToInt32() int32 { + return 0 +} + +func (any *objectLazyAny) ToInt64() int64 { + return 0 +} + +func (any *objectLazyAny) ToUint() uint { + return 0 +} + +func (any *objectLazyAny) ToUint32() uint32 { + return 0 +} + +func (any *objectLazyAny) ToUint64() uint64 { + return 0 +} + +func (any *objectLazyAny) ToFloat32() float32 { + return 0 +} + +func (any *objectLazyAny) ToFloat64() float64 { + return 0 +} + +func (any *objectLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *objectLazyAny) ToVal(obj interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(obj) +} + +func (any *objectLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateObjectField(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + mapped := locatePath(iter, path[1:]) + if mapped.ValueType() != InvalidValue { + mappedAll[field] = mapped + } + return true + }) + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectLazyAny) Keys() []string { + keys := []string{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + iter.Skip() + keys = append(keys, field) + return true + }) + return keys +} + +func (any *objectLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + size++ + return true + }) + return size +} + +func (any *objectLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *objectLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type objectAny struct { + baseAny + err error + val reflect.Value +} + +func wrapStruct(val interface{}) *objectAny { + return &objectAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *objectAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectAny) MustBeValid() Any { + return any +} + +func (any *objectAny) Parse() *Iterator { + return nil +} + +func (any *objectAny) LastError() error { + return any.err +} + +func (any *objectAny) ToBool() bool { + return any.val.NumField() != 0 +} + +func (any *objectAny) ToInt() int { + return 0 +} + +func (any *objectAny) ToInt32() int32 { + return 0 +} + +func (any *objectAny) ToInt64() int64 { + return 0 +} + +func (any *objectAny) ToUint() uint { + return 0 +} + +func (any *objectAny) ToUint32() uint32 { + return 0 +} + +func (any *objectAny) ToUint64() uint64 { + return 0 +} + +func (any *objectAny) ToFloat32() float32 { + return 0 +} + +func (any *objectAny) ToFloat64() float64 { + return 0 +} + +func (any *objectAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *objectAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + field := any.val.FieldByName(firstPath) + if !field.IsValid() { + return newInvalidAny(path) + } + return Wrap(field.Interface()) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for i := 0; i < any.val.NumField(); i++ { + field := any.val.Field(i) + if field.CanInterface() { + mapped := Wrap(field.Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[any.val.Type().Field(i).Name] = mapped + } + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectAny) Keys() []string { + keys := make([]string, 0, any.val.NumField()) + for i := 0; i < any.val.NumField(); i++ { + keys = append(keys, any.val.Type().Field(i).Name) + } + return keys +} + +func (any *objectAny) Size() int { + return any.val.NumField() +} + +func (any *objectAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *objectAny) GetInterface() interface{} { + return any.val.Interface() +} + +type mapAny struct { + baseAny + err error + val reflect.Value +} + +func wrapMap(val interface{}) *mapAny { + return &mapAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *mapAny) ValueType() ValueType { + return ObjectValue +} + +func (any *mapAny) MustBeValid() Any { + return any +} + +func (any *mapAny) Parse() *Iterator { + return nil +} + +func (any *mapAny) LastError() error { + return any.err +} + +func (any *mapAny) ToBool() bool { + return true +} + +func (any *mapAny) ToInt() int { + return 0 +} + +func (any *mapAny) ToInt32() int32 { + return 0 +} + +func (any *mapAny) ToInt64() int64 { + return 0 +} + +func (any *mapAny) ToUint() uint { + return 0 +} + +func (any *mapAny) ToUint32() uint32 { + return 0 +} + +func (any *mapAny) ToUint64() uint64 { + return 0 +} + +func (any *mapAny) ToFloat32() float32 { + return 0 +} + +func (any *mapAny) ToFloat64() float64 { + return 0 +} + +func (any *mapAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *mapAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for _, key := range any.val.MapKeys() { + keyAsStr := key.String() + element := Wrap(any.val.MapIndex(key).Interface()) + mapped := element.Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[keyAsStr] = mapped + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + value := any.val.MapIndex(reflect.ValueOf(firstPath)) + if !value.IsValid() { + return newInvalidAny(path) + } + return Wrap(value.Interface()) + } +} + +func (any *mapAny) Keys() []string { + keys := make([]string, 0, any.val.Len()) + for _, key := range any.val.MapKeys() { + keys = append(keys, key.String()) + } + return keys +} + +func (any *mapAny) Size() int { + return any.val.Len() +} + +func (any *mapAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *mapAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/any_str.go b/vendor/github.com/json-iterator/go/any_str.go new file mode 100644 index 0000000000..1f12f6612d --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_str.go @@ -0,0 +1,166 @@ +package jsoniter + +import ( + "fmt" + "strconv" +) + +type stringAny struct { + baseAny + val string +} + +func (any *stringAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)} +} + +func (any *stringAny) Parse() *Iterator { + return nil +} + +func (any *stringAny) ValueType() ValueType { + return StringValue +} + +func (any *stringAny) MustBeValid() Any { + return any +} + +func (any *stringAny) LastError() error { + return nil +} + +func (any *stringAny) ToBool() bool { + str := any.ToString() + if str == "0" { + return false + } + for _, c := range str { + switch c { + case ' ', '\n', '\r', '\t': + default: + return true + } + } + return false +} + +func (any *stringAny) ToInt() int { + return int(any.ToInt64()) + +} + +func (any *stringAny) ToInt32() int32 { + return int32(any.ToInt64()) +} + +func (any *stringAny) ToInt64() int64 { + if any.val == "" { + return 0 + } + + flag := 1 + startPos := 0 + if any.val[0] == '+' || any.val[0] == '-' { + startPos = 1 + } + + if any.val[0] == '-' { + flag = -1 + } + + endPos := startPos + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64) + return int64(flag) * parsed +} + +func (any *stringAny) ToUint() uint { + return uint(any.ToUint64()) +} + +func (any *stringAny) ToUint32() uint32 { + return uint32(any.ToUint64()) +} + +func (any *stringAny) ToUint64() uint64 { + if any.val == "" { + return 0 + } + + startPos := 0 + + if any.val[0] == '-' { + return 0 + } + if any.val[0] == '+' { + startPos = 1 + } + + endPos := startPos + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64) + return parsed +} + +func (any *stringAny) ToFloat32() float32 { + return float32(any.ToFloat64()) +} + +func (any *stringAny) ToFloat64() float64 { + if len(any.val) == 0 { + return 0 + } + + // first char invalid + if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') { + return 0 + } + + // extract valid num expression from string + // eg 123true => 123, -12.12xxa => -12.12 + endPos := 1 + for i := 1; i < len(any.val); i++ { + if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' { + endPos = i + 1 + continue + } + + // end position is the first char which is not digit + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + endPos = i + break + } + } + parsed, _ := strconv.ParseFloat(any.val[:endPos], 64) + return parsed +} + +func (any *stringAny) ToString() string { + return any.val +} + +func (any *stringAny) WriteTo(stream *Stream) { + stream.WriteString(any.val) +} + +func (any *stringAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_uint32.go b/vendor/github.com/json-iterator/go/any_uint32.go new file mode 100644 index 0000000000..656bbd33d7 --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_uint32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint32Any struct { + baseAny + val uint32 +} + +func (any *uint32Any) LastError() error { + return nil +} + +func (any *uint32Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint32Any) MustBeValid() Any { + return any +} + +func (any *uint32Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint32Any) ToInt() int { + return int(any.val) +} + +func (any *uint32Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint32Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint32Any) ToUint32() uint32 { + return any.val +} + +func (any *uint32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *uint32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *uint32Any) WriteTo(stream *Stream) { + stream.WriteUint32(any.val) +} + +func (any *uint32Any) Parse() *Iterator { + return nil +} + +func (any *uint32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/any_uint64.go b/vendor/github.com/json-iterator/go/any_uint64.go new file mode 100644 index 0000000000..7df2fce33b --- /dev/null +++ b/vendor/github.com/json-iterator/go/any_uint64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint64Any struct { + baseAny + val uint64 +} + +func (any *uint64Any) LastError() error { + return nil +} + +func (any *uint64Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint64Any) MustBeValid() Any { + return any +} + +func (any *uint64Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint64Any) ToInt() int { + return int(any.val) +} + +func (any *uint64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint64Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint64Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *uint64Any) ToUint64() uint64 { + return any.val +} + +func (any *uint64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint64Any) ToString() string { + return strconv.FormatUint(any.val, 10) +} + +func (any *uint64Any) WriteTo(stream *Stream) { + stream.WriteUint64(any.val) +} + +func (any *uint64Any) Parse() *Iterator { + return nil +} + +func (any *uint64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh new file mode 100644 index 0000000000..b45ef68831 --- /dev/null +++ b/vendor/github.com/json-iterator/go/build.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e +set -x + +if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then + mkdir -p /tmp/build-golang/src/github.com/json-iterator + ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go +fi +export GOPATH=/tmp/build-golang +go get -u github.com/golang/dep/cmd/dep +cd /tmp/build-golang/src/github.com/json-iterator/go +exec $GOPATH/bin/dep ensure -update diff --git a/vendor/github.com/json-iterator/go/config.go b/vendor/github.com/json-iterator/go/config.go new file mode 100644 index 0000000000..2adcdc3b79 --- /dev/null +++ b/vendor/github.com/json-iterator/go/config.go @@ -0,0 +1,375 @@ +package jsoniter + +import ( + "encoding/json" + "io" + "reflect" + "sync" + "unsafe" + + "github.com/modern-go/concurrent" + "github.com/modern-go/reflect2" +) + +// Config customize how the API should behave. +// The API is created from Config by Froze. +type Config struct { + IndentionStep int + MarshalFloatWith6Digits bool + EscapeHTML bool + SortMapKeys bool + UseNumber bool + DisallowUnknownFields bool + TagKey string + OnlyTaggedField bool + ValidateJsonRawMessage bool + ObjectFieldMustBeSimpleString bool + CaseSensitive bool +} + +// API the public interface of this package. +// Primary Marshal and Unmarshal. +type API interface { + IteratorPool + StreamPool + MarshalToString(v interface{}) (string, error) + Marshal(v interface{}) ([]byte, error) + MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) + UnmarshalFromString(str string, v interface{}) error + Unmarshal(data []byte, v interface{}) error + Get(data []byte, path ...interface{}) Any + NewEncoder(writer io.Writer) *Encoder + NewDecoder(reader io.Reader) *Decoder + Valid(data []byte) bool + RegisterExtension(extension Extension) + DecoderOf(typ reflect2.Type) ValDecoder + EncoderOf(typ reflect2.Type) ValEncoder +} + +// ConfigDefault the default API +var ConfigDefault = Config{ + EscapeHTML: true, +}.Froze() + +// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior +var ConfigCompatibleWithStandardLibrary = Config{ + EscapeHTML: true, + SortMapKeys: true, + ValidateJsonRawMessage: true, +}.Froze() + +// ConfigFastest marshals float with only 6 digits precision +var ConfigFastest = Config{ + EscapeHTML: false, + MarshalFloatWith6Digits: true, // will lose precession + ObjectFieldMustBeSimpleString: true, // do not unescape object field +}.Froze() + +type frozenConfig struct { + configBeforeFrozen Config + sortMapKeys bool + indentionStep int + objectFieldMustBeSimpleString bool + onlyTaggedField bool + disallowUnknownFields bool + decoderCache *concurrent.Map + encoderCache *concurrent.Map + encoderExtension Extension + decoderExtension Extension + extraExtensions []Extension + streamPool *sync.Pool + iteratorPool *sync.Pool + caseSensitive bool +} + +func (cfg *frozenConfig) initCache() { + cfg.decoderCache = concurrent.NewMap() + cfg.encoderCache = concurrent.NewMap() +} + +func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) { + cfg.decoderCache.Store(cacheKey, decoder) +} + +func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) { + cfg.encoderCache.Store(cacheKey, encoder) +} + +func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder { + decoder, found := cfg.decoderCache.Load(cacheKey) + if found { + return decoder.(ValDecoder) + } + return nil +} + +func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder { + encoder, found := cfg.encoderCache.Load(cacheKey) + if found { + return encoder.(ValEncoder) + } + return nil +} + +var cfgCache = concurrent.NewMap() + +func getFrozenConfigFromCache(cfg Config) *frozenConfig { + obj, found := cfgCache.Load(cfg) + if found { + return obj.(*frozenConfig) + } + return nil +} + +func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) { + cfgCache.Store(cfg, frozenConfig) +} + +// Froze forge API from config +func (cfg Config) Froze() API { + api := &frozenConfig{ + sortMapKeys: cfg.SortMapKeys, + indentionStep: cfg.IndentionStep, + objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString, + onlyTaggedField: cfg.OnlyTaggedField, + disallowUnknownFields: cfg.DisallowUnknownFields, + caseSensitive: cfg.CaseSensitive, + } + api.streamPool = &sync.Pool{ + New: func() interface{} { + return NewStream(api, nil, 512) + }, + } + api.iteratorPool = &sync.Pool{ + New: func() interface{} { + return NewIterator(api) + }, + } + api.initCache() + encoderExtension := EncoderExtension{} + decoderExtension := DecoderExtension{} + if cfg.MarshalFloatWith6Digits { + api.marshalFloatWith6Digits(encoderExtension) + } + if cfg.EscapeHTML { + api.escapeHTML(encoderExtension) + } + if cfg.UseNumber { + api.useNumber(decoderExtension) + } + if cfg.ValidateJsonRawMessage { + api.validateJsonRawMessage(encoderExtension) + } + api.encoderExtension = encoderExtension + api.decoderExtension = decoderExtension + api.configBeforeFrozen = cfg + return api +} + +func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig { + api := getFrozenConfigFromCache(cfg) + if api != nil { + return api + } + api = cfg.Froze().(*frozenConfig) + for _, extension := range extraExtensions { + api.RegisterExtension(extension) + } + addFrozenConfigToCache(cfg, api) + return api +} + +func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) { + encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) { + rawMessage := *(*json.RawMessage)(ptr) + iter := cfg.BorrowIterator([]byte(rawMessage)) + defer cfg.ReturnIterator(iter) + iter.Read() + if iter.Error != nil && iter.Error != io.EOF { + stream.WriteRaw("null") + } else { + stream.WriteRaw(string(rawMessage)) + } + }, func(ptr unsafe.Pointer) bool { + return len(*((*json.RawMessage)(ptr))) == 0 + }} + extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder + extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder +} + +func (cfg *frozenConfig) useNumber(extension DecoderExtension) { + extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) { + exitingValue := *((*interface{})(ptr)) + if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr { + iter.ReadVal(exitingValue) + return + } + if iter.WhatIsNext() == NumberValue { + *((*interface{})(ptr)) = json.Number(iter.readNumberAsString()) + } else { + *((*interface{})(ptr)) = iter.Read() + } + }} +} +func (cfg *frozenConfig) getTagKey() string { + tagKey := cfg.configBeforeFrozen.TagKey + if tagKey == "" { + return "json" + } + return tagKey +} + +func (cfg *frozenConfig) RegisterExtension(extension Extension) { + cfg.extraExtensions = append(cfg.extraExtensions, extension) + copied := cfg.configBeforeFrozen + cfg.configBeforeFrozen = copied +} + +type lossyFloat32Encoder struct { +} + +func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32Lossy(*((*float32)(ptr))) +} + +func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type lossyFloat64Encoder struct { +} + +func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64Lossy(*((*float64)(ptr))) +} + +func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +// EnableLossyFloatMarshalling keeps 10**(-6) precision +// for float variables for better performance. +func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) { + // for better performance + extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{} + extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{} +} + +type htmlEscapedStringEncoder struct { +} + +func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteStringWithHTMLEscaped(str) +} + +func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) { + encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{} +} + +func (cfg *frozenConfig) cleanDecoders() { + typeDecoders = map[string]ValDecoder{} + fieldDecoders = map[string]ValDecoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) cleanEncoders() { + typeEncoders = map[string]ValEncoder{} + fieldEncoders = map[string]ValEncoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return "", stream.Error + } + return string(stream.Buffer()), nil +} + +func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return nil, stream.Error + } + result := stream.Buffer() + copied := make([]byte, len(result)) + copy(copied, result) + return copied, nil +} + +func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + if prefix != "" { + panic("prefix is not supported") + } + for _, r := range indent { + if r != ' ' { + panic("indent can only be space") + } + } + newCfg := cfg.configBeforeFrozen + newCfg.IndentionStep = len(indent) + return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v) +} + +func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error { + data := []byte(str) + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + c := iter.nextToken() + if c == 0 { + if iter.Error == io.EOF { + return nil + } + return iter.Error + } + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + return iter.Error +} + +func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + return locatePath(iter, path) +} + +func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + c := iter.nextToken() + if c == 0 { + if iter.Error == io.EOF { + return nil + } + return iter.Error + } + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + return iter.Error +} + +func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder { + stream := NewStream(cfg, writer, 512) + return &Encoder{stream} +} + +func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder { + iter := Parse(cfg, reader, 512) + return &Decoder{iter} +} + +func (cfg *frozenConfig) Valid(data []byte) bool { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.Skip() + return iter.Error == nil +} diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md new file mode 100644 index 0000000000..3095662b06 --- /dev/null +++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md @@ -0,0 +1,7 @@ +| json type \ dest type | bool | int | uint | float |string| +| --- | --- | --- | --- |--|--| +| number | positive => true
negative => true
zero => false| 23.2 => 23
-32.1 => -32| 12.1 => 12
-12.1 => 0|as normal|same as origin| +| string | empty string => false
string "0" => false
other strings => true | "123.32" => 123
"-123.4" => -123
"123.23xxxw" => 123
"abcde12" => 0
"-32.1" => -32| 13.2 => 13
-1.1 => 0 |12.1 => 12.1
-12.3 => -12.3
12.4xxa => 12.4
+1.1e2 =>110 |same as origin| +| bool | true => true
false => false| true => 1
false => 0 | true => 1
false => 0 |true => 1
false => 0|true => "true"
false => "false"| +| object | true | 0 | 0 |0|originnal json| +| array | empty array => false
nonempty array => true| [] => 0
[1,2] => 1 | [] => 0
[1,2] => 1 |[] => 0
[1,2] => 1|original json| \ No newline at end of file diff --git a/vendor/github.com/json-iterator/go/go.mod b/vendor/github.com/json-iterator/go/go.mod new file mode 100644 index 0000000000..e05c42ff58 --- /dev/null +++ b/vendor/github.com/json-iterator/go/go.mod @@ -0,0 +1,11 @@ +module github.com/json-iterator/go + +go 1.12 + +require ( + github.com/davecgh/go-spew v1.1.1 + github.com/google/gofuzz v1.0.0 + github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 + github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 + github.com/stretchr/testify v1.3.0 +) diff --git a/vendor/github.com/json-iterator/go/go.sum b/vendor/github.com/json-iterator/go/go.sum new file mode 100644 index 0000000000..be00a6df96 --- /dev/null +++ b/vendor/github.com/json-iterator/go/go.sum @@ -0,0 +1,15 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= diff --git a/vendor/github.com/json-iterator/go/iter.go b/vendor/github.com/json-iterator/go/iter.go new file mode 100644 index 0000000000..29b31cf789 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter.go @@ -0,0 +1,349 @@ +package jsoniter + +import ( + "encoding/json" + "fmt" + "io" +) + +// ValueType the type for JSON element +type ValueType int + +const ( + // InvalidValue invalid JSON element + InvalidValue ValueType = iota + // StringValue JSON element "string" + StringValue + // NumberValue JSON element 100 or 0.10 + NumberValue + // NilValue JSON element null + NilValue + // BoolValue JSON element true or false + BoolValue + // ArrayValue JSON element [] + ArrayValue + // ObjectValue JSON element {} + ObjectValue +) + +var hexDigits []byte +var valueTypes []ValueType + +func init() { + hexDigits = make([]byte, 256) + for i := 0; i < len(hexDigits); i++ { + hexDigits[i] = 255 + } + for i := '0'; i <= '9'; i++ { + hexDigits[i] = byte(i - '0') + } + for i := 'a'; i <= 'f'; i++ { + hexDigits[i] = byte((i - 'a') + 10) + } + for i := 'A'; i <= 'F'; i++ { + hexDigits[i] = byte((i - 'A') + 10) + } + valueTypes = make([]ValueType, 256) + for i := 0; i < len(valueTypes); i++ { + valueTypes[i] = InvalidValue + } + valueTypes['"'] = StringValue + valueTypes['-'] = NumberValue + valueTypes['0'] = NumberValue + valueTypes['1'] = NumberValue + valueTypes['2'] = NumberValue + valueTypes['3'] = NumberValue + valueTypes['4'] = NumberValue + valueTypes['5'] = NumberValue + valueTypes['6'] = NumberValue + valueTypes['7'] = NumberValue + valueTypes['8'] = NumberValue + valueTypes['9'] = NumberValue + valueTypes['t'] = BoolValue + valueTypes['f'] = BoolValue + valueTypes['n'] = NilValue + valueTypes['['] = ArrayValue + valueTypes['{'] = ObjectValue +} + +// Iterator is a io.Reader like object, with JSON specific read functions. +// Error is not returned as return value, but stored as Error member on this iterator instance. +type Iterator struct { + cfg *frozenConfig + reader io.Reader + buf []byte + head int + tail int + depth int + captureStartedAt int + captured []byte + Error error + Attachment interface{} // open for customized decoder +} + +// NewIterator creates an empty Iterator instance +func NewIterator(cfg API) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: nil, + head: 0, + tail: 0, + depth: 0, + } +} + +// Parse creates an Iterator instance from io.Reader +func Parse(cfg API, reader io.Reader, bufSize int) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: reader, + buf: make([]byte, bufSize), + head: 0, + tail: 0, + depth: 0, + } +} + +// ParseBytes creates an Iterator instance from byte array +func ParseBytes(cfg API, input []byte) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: input, + head: 0, + tail: len(input), + depth: 0, + } +} + +// ParseString creates an Iterator instance from string +func ParseString(cfg API, input string) *Iterator { + return ParseBytes(cfg, []byte(input)) +} + +// Pool returns a pool can provide more iterator with same configuration +func (iter *Iterator) Pool() IteratorPool { + return iter.cfg +} + +// Reset reuse iterator instance by specifying another reader +func (iter *Iterator) Reset(reader io.Reader) *Iterator { + iter.reader = reader + iter.head = 0 + iter.tail = 0 + iter.depth = 0 + return iter +} + +// ResetBytes reuse iterator instance by specifying another byte array as input +func (iter *Iterator) ResetBytes(input []byte) *Iterator { + iter.reader = nil + iter.buf = input + iter.head = 0 + iter.tail = len(input) + iter.depth = 0 + return iter +} + +// WhatIsNext gets ValueType of relatively next json element +func (iter *Iterator) WhatIsNext() ValueType { + valueType := valueTypes[iter.nextToken()] + iter.unreadByte() + return valueType +} + +func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + return false + } + return true +} + +func (iter *Iterator) isObjectEnd() bool { + c := iter.nextToken() + if c == ',' { + return false + } + if c == '}' { + return true + } + iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c})) + return true +} + +func (iter *Iterator) nextToken() byte { + // a variation of skip whitespaces, returning the next non-whitespace token + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + 1 + return c + } + if !iter.loadMore() { + return 0 + } + } +} + +// ReportError record a error in iterator instance with current position. +func (iter *Iterator) ReportError(operation string, msg string) { + if iter.Error != nil { + if iter.Error != io.EOF { + return + } + } + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + peekEnd := iter.head + 10 + if peekEnd > iter.tail { + peekEnd = iter.tail + } + parsing := string(iter.buf[peekStart:peekEnd]) + contextStart := iter.head - 50 + if contextStart < 0 { + contextStart = 0 + } + contextEnd := iter.head + 50 + if contextEnd > iter.tail { + contextEnd = iter.tail + } + context := string(iter.buf[contextStart:contextEnd]) + iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...", + operation, msg, iter.head-peekStart, parsing, context) +} + +// CurrentBuffer gets current buffer as string for debugging purpose +func (iter *Iterator) CurrentBuffer() string { + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head, + string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) +} + +func (iter *Iterator) readByte() (ret byte) { + if iter.head == iter.tail { + if iter.loadMore() { + ret = iter.buf[iter.head] + iter.head++ + return ret + } + return 0 + } + ret = iter.buf[iter.head] + iter.head++ + return ret +} + +func (iter *Iterator) loadMore() bool { + if iter.reader == nil { + if iter.Error == nil { + iter.head = iter.tail + iter.Error = io.EOF + } + return false + } + if iter.captured != nil { + iter.captured = append(iter.captured, + iter.buf[iter.captureStartedAt:iter.tail]...) + iter.captureStartedAt = 0 + } + for { + n, err := iter.reader.Read(iter.buf) + if n == 0 { + if err != nil { + if iter.Error == nil { + iter.Error = err + } + return false + } + } else { + iter.head = 0 + iter.tail = n + return true + } + } +} + +func (iter *Iterator) unreadByte() { + if iter.Error != nil { + return + } + iter.head-- + return +} + +// Read read the next JSON element as generic interface{}. +func (iter *Iterator) Read() interface{} { + valueType := iter.WhatIsNext() + switch valueType { + case StringValue: + return iter.ReadString() + case NumberValue: + if iter.cfg.configBeforeFrozen.UseNumber { + return json.Number(iter.readNumberAsString()) + } + return iter.ReadFloat64() + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + return nil + case BoolValue: + return iter.ReadBool() + case ArrayValue: + arr := []interface{}{} + iter.ReadArrayCB(func(iter *Iterator) bool { + var elem interface{} + iter.ReadVal(&elem) + arr = append(arr, elem) + return true + }) + return arr + case ObjectValue: + obj := map[string]interface{}{} + iter.ReadMapCB(func(Iter *Iterator, field string) bool { + var elem interface{} + iter.ReadVal(&elem) + obj[field] = elem + return true + }) + return obj + default: + iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType)) + return nil + } +} + +// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9 +const maxDepth = 10000 + +func (iter *Iterator) incrementDepth() (success bool) { + iter.depth++ + if iter.depth <= maxDepth { + return true + } + iter.ReportError("incrementDepth", "exceeded max depth") + return false +} + +func (iter *Iterator) decrementDepth() (success bool) { + iter.depth-- + if iter.depth >= 0 { + return true + } + iter.ReportError("decrementDepth", "unexpected negative nesting") + return false +} diff --git a/vendor/github.com/json-iterator/go/iter_array.go b/vendor/github.com/json-iterator/go/iter_array.go new file mode 100644 index 0000000000..204fe0e092 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_array.go @@ -0,0 +1,64 @@ +package jsoniter + +// ReadArray read array element, tells if the array has more element to read. +func (iter *Iterator) ReadArray() (ret bool) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return false // null + case '[': + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + return true + } + return false + case ']': + return false + case ',': + return true + default: + iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c})) + return + } +} + +// ReadArrayCB read array with callback +func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { + c := iter.nextToken() + if c == '[' { + if !iter.incrementDepth() { + return false + } + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + if !callback(iter) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + for c == ',' { + if !callback(iter) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + } + if c != ']' { + iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c})) + iter.decrementDepth() + return false + } + return iter.decrementDepth() + } + return iter.decrementDepth() + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c})) + return false +} diff --git a/vendor/github.com/json-iterator/go/iter_float.go b/vendor/github.com/json-iterator/go/iter_float.go new file mode 100644 index 0000000000..8a3d8b6fb4 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_float.go @@ -0,0 +1,342 @@ +package jsoniter + +import ( + "encoding/json" + "io" + "math/big" + "strconv" + "strings" + "unsafe" +) + +var floatDigits []int8 + +const invalidCharForNumber = int8(-1) +const endOfNumber = int8(-2) +const dotInNumber = int8(-3) + +func init() { + floatDigits = make([]int8, 256) + for i := 0; i < len(floatDigits); i++ { + floatDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + floatDigits[i] = i - int8('0') + } + floatDigits[','] = endOfNumber + floatDigits[']'] = endOfNumber + floatDigits['}'] = endOfNumber + floatDigits[' '] = endOfNumber + floatDigits['\t'] = endOfNumber + floatDigits['\n'] = endOfNumber + floatDigits['.'] = dotInNumber +} + +// ReadBigFloat read big.Float +func (iter *Iterator) ReadBigFloat() (ret *big.Float) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + prec := 64 + if len(str) > prec { + prec = len(str) + } + val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero) + if err != nil { + iter.Error = err + return nil + } + return val +} + +// ReadBigInt read big.Int +func (iter *Iterator) ReadBigInt() (ret *big.Int) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + ret = big.NewInt(0) + var success bool + ret, success = ret.SetString(str, 10) + if !success { + iter.ReportError("ReadBigInt", "invalid big int") + return nil + } + return ret +} + +//ReadFloat32 read float32 +func (iter *Iterator) ReadFloat32() (ret float32) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat32() + } + iter.unreadByte() + return iter.readPositiveFloat32() +} + +func (iter *Iterator) readPositiveFloat32() (ret float32) { + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c := iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.ReportError("readFloat32", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat32", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat32", "leading zero is invalid") + return + } + } + value := uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.head = i + return float32(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat32SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float32(float64(value) / float64(pow10[decimalPlaces])) + } + // too many decimal places + return iter.readFloat32SlowPath() + case invalidCharForNumber, dotInNumber: + return iter.readFloat32SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat32SlowPath() +} + +func (iter *Iterator) readNumberAsString() (ret string) { + strBuf := [16]byte{} + str := strBuf[0:0] +load_loop: + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + str = append(str, c) + continue + default: + iter.head = i + break load_loop + } + } + if !iter.loadMore() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + return + } + if len(str) == 0 { + iter.ReportError("readNumberAsString", "invalid number") + } + return *(*string)(unsafe.Pointer(&str)) +} + +func (iter *Iterator) readFloat32SlowPath() (ret float32) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat32SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 32) + if err != nil { + iter.Error = err + return + } + return float32(val) +} + +// ReadFloat64 read float64 +func (iter *Iterator) ReadFloat64() (ret float64) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat64() + } + iter.unreadByte() + return iter.readPositiveFloat64() +} + +func (iter *Iterator) readPositiveFloat64() (ret float64) { + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c := iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.ReportError("readFloat64", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat64", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat64", "leading zero is invalid") + return + } + } + value := uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.head = i + return float64(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat64SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float64(value) / float64(pow10[decimalPlaces]) + } + // too many decimal places + return iter.readFloat64SlowPath() + case invalidCharForNumber, dotInNumber: + return iter.readFloat64SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + if value > maxFloat64 { + return iter.readFloat64SlowPath() + } + } + } + return iter.readFloat64SlowPath() +} + +func (iter *Iterator) readFloat64SlowPath() (ret float64) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat64SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 64) + if err != nil { + iter.Error = err + return + } + return val +} + +func validateFloat(str string) string { + // strconv.ParseFloat is not validating `1.` or `1.e1` + if len(str) == 0 { + return "empty number" + } + if str[0] == '-' { + return "-- is not valid" + } + dotPos := strings.IndexByte(str, '.') + if dotPos != -1 { + if dotPos == len(str)-1 { + return "dot can not be last character" + } + switch str[dotPos+1] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + return "missing digit after dot" + } + } + return "" +} + +// ReadNumber read json.Number +func (iter *Iterator) ReadNumber() (ret json.Number) { + return json.Number(iter.readNumberAsString()) +} diff --git a/vendor/github.com/json-iterator/go/iter_int.go b/vendor/github.com/json-iterator/go/iter_int.go new file mode 100644 index 0000000000..d786a89fe1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_int.go @@ -0,0 +1,346 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var intDigits []int8 + +const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1 +const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1 +const maxFloat64 = 1<<53 - 1 + +func init() { + intDigits = make([]int8, 256) + for i := 0; i < len(intDigits); i++ { + intDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + intDigits[i] = i - int8('0') + } +} + +// ReadUint read uint +func (iter *Iterator) ReadUint() uint { + if strconv.IntSize == 32 { + return uint(iter.ReadUint32()) + } + return uint(iter.ReadUint64()) +} + +// ReadInt read int +func (iter *Iterator) ReadInt() int { + if strconv.IntSize == 32 { + return int(iter.ReadInt32()) + } + return int(iter.ReadInt64()) +} + +// ReadInt8 read int8 +func (iter *Iterator) ReadInt8() (ret int8) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt8+1 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int8(val) + } + val := iter.readUint32(c) + if val > math.MaxInt8 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int8(val) +} + +// ReadUint8 read uint8 +func (iter *Iterator) ReadUint8() (ret uint8) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint8 { + iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint8(val) +} + +// ReadInt16 read int16 +func (iter *Iterator) ReadInt16() (ret int16) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt16+1 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int16(val) + } + val := iter.readUint32(c) + if val > math.MaxInt16 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int16(val) +} + +// ReadUint16 read uint16 +func (iter *Iterator) ReadUint16() (ret uint16) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint16 { + iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint16(val) +} + +// ReadInt32 read int32 +func (iter *Iterator) ReadInt32() (ret int32) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt32+1 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int32(val) + } + val := iter.readUint32(c) + if val > math.MaxInt32 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int32(val) +} + +// ReadUint32 read uint32 +func (iter *Iterator) ReadUint32() (ret uint32) { + return iter.readUint32(iter.nextToken()) +} + +func (iter *Iterator) readUint32(c byte) (ret uint32) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint32(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint32(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint32(ind2)*10 + uint32(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint32SafeToMultiply10 { + value2 := (value << 3) + (value << 1) + uint32(ind) + if value2 < value { + iter.ReportError("readUint32", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint32(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +// ReadInt64 read int64 +func (iter *Iterator) ReadInt64() (ret int64) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint64(iter.readByte()) + if val > math.MaxInt64+1 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return -int64(val) + } + val := iter.readUint64(c) + if val > math.MaxInt64 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return int64(val) +} + +// ReadUint64 read uint64 +func (iter *Iterator) ReadUint64() uint64 { + return iter.readUint64(iter.nextToken()) +} + +func (iter *Iterator) readUint64(c byte) (ret uint64) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint64(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint64(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint64(ind2)*10 + uint64(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint64SafeToMultiple10 { + value2 := (value << 3) + (value << 1) + uint64(ind) + if value2 < value { + iter.ReportError("readUint64", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint64(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +func (iter *Iterator) assertInteger() { + if iter.head < iter.tail && iter.buf[iter.head] == '.' { + iter.ReportError("assertInteger", "can not decode float as int") + } +} diff --git a/vendor/github.com/json-iterator/go/iter_object.go b/vendor/github.com/json-iterator/go/iter_object.go new file mode 100644 index 0000000000..58ee89c849 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_object.go @@ -0,0 +1,267 @@ +package jsoniter + +import ( + "fmt" + "strings" +) + +// ReadObject read one field from object. +// If object ended, returns empty string. +// Otherwise, returns the field name. +func (iter *Iterator) ReadObject() (ret string) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return "" // null + case '{': + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + if c == '}' { + return "" // end of object + } + iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c})) + return + case ',': + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + case '}': + return "" // end of object + default: + iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c}))) + return + } +} + +// CaseInsensitive +func (iter *Iterator) readFieldHash() int64 { + hash := int64(0x811c9dc5) + c := iter.nextToken() + if c != '"' { + iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c})) + return 0 + } + for { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + b := iter.buf[i] + if b == '\\' { + iter.head = i + for _, b := range iter.readStringSlowPath() { + if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return hash + } + if b == '"' { + iter.head = i + 1 + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return hash + } + if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + if !iter.loadMore() { + iter.ReportError("readFieldHash", `incomplete field name`) + return 0 + } + } +} + +func calcHash(str string, caseSensitive bool) int64 { + if !caseSensitive { + str = strings.ToLower(str) + } + hash := int64(0x811c9dc5) + for _, b := range []byte(str) { + hash ^= int64(b) + hash *= 0x1000193 + } + return int64(hash) +} + +// ReadObjectCB read object with callback, the key is ascii only and field name not copied +func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + var field string + if c == '{' { + if !iter.incrementDepth() { + return false + } + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + if !callback(iter, field) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + if !callback(iter, field) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadObjectCB", `object not ended with }`) + iter.decrementDepth() + return false + } + return iter.decrementDepth() + } + if c == '}' { + return iter.decrementDepth() + } + iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c})) + iter.decrementDepth() + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +// ReadMapCB read map with callback, the key can be any string +func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + if c == '{' { + if !iter.incrementDepth() { + return false + } + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + iter.decrementDepth() + return false + } + if !callback(iter, field) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + iter.decrementDepth() + return false + } + if !callback(iter, field) { + iter.decrementDepth() + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadMapCB", `object not ended with }`) + iter.decrementDepth() + return false + } + return iter.decrementDepth() + } + if c == '}' { + return iter.decrementDepth() + } + iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c})) + iter.decrementDepth() + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectStart() bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '}' { + return false + } + iter.unreadByte() + return true + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return false + } + iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) { + str := iter.ReadStringAsSlice() + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if iter.buf[iter.head] != ':' { + iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]})) + return + } + iter.head++ + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if ret == nil { + return str + } + return ret +} diff --git a/vendor/github.com/json-iterator/go/iter_skip.go b/vendor/github.com/json-iterator/go/iter_skip.go new file mode 100644 index 0000000000..e91eefb15b --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip.go @@ -0,0 +1,130 @@ +package jsoniter + +import "fmt" + +// ReadNil reads a json object as nil and +// returns whether it's a nil or not +func (iter *Iterator) ReadNil() (ret bool) { + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') // null + return true + } + iter.unreadByte() + return false +} + +// ReadBool reads a json object as BoolValue +func (iter *Iterator) ReadBool() (ret bool) { + c := iter.nextToken() + if c == 't' { + iter.skipThreeBytes('r', 'u', 'e') + return true + } + if c == 'f' { + iter.skipFourBytes('a', 'l', 's', 'e') + return false + } + iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c})) + return +} + +// SkipAndReturnBytes skip next JSON element, and return its content as []byte. +// The []byte can be kept, it is a copy of data. +func (iter *Iterator) SkipAndReturnBytes() []byte { + iter.startCapture(iter.head) + iter.Skip() + return iter.stopCapture() +} + +// SkipAndAppendBytes skips next JSON element and appends its content to +// buffer, returning the result. +func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte { + iter.startCaptureTo(buf, iter.head) + iter.Skip() + return iter.stopCapture() +} + +func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) { + if iter.captured != nil { + panic("already in capture mode") + } + iter.captureStartedAt = captureStartedAt + iter.captured = buf +} + +func (iter *Iterator) startCapture(captureStartedAt int) { + iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt) +} + +func (iter *Iterator) stopCapture() []byte { + if iter.captured == nil { + panic("not in capture mode") + } + captured := iter.captured + remaining := iter.buf[iter.captureStartedAt:iter.head] + iter.captureStartedAt = -1 + iter.captured = nil + return append(captured, remaining...) +} + +// Skip skips a json object and positions to relatively the next json object +func (iter *Iterator) Skip() { + c := iter.nextToken() + switch c { + case '"': + iter.skipString() + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + case '0': + iter.unreadByte() + iter.ReadFloat32() + case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.skipNumber() + case '[': + iter.skipArray() + case '{': + iter.skipObject() + default: + iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c)) + return + } +} + +func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b4 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } +} + +func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } +} diff --git a/vendor/github.com/json-iterator/go/iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go new file mode 100644 index 0000000000..9303de41e4 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go @@ -0,0 +1,163 @@ +//+build jsoniter_sloppy + +package jsoniter + +// sloppy but faster implementation, do not validate the input json + +func (iter *Iterator) skipNumber() { + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\r', '\t', ',', '}', ']': + iter.head = i + return + } + } + if !iter.loadMore() { + return + } + } +} + +func (iter *Iterator) skipArray() { + level := 1 + if !iter.incrementDepth() { + return + } + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '[': // If open symbol, increase level + level++ + if !iter.incrementDepth() { + return + } + case ']': // If close symbol, increase level + level-- + if !iter.decrementDepth() { + return + } + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete array") + return + } + } +} + +func (iter *Iterator) skipObject() { + level := 1 + if !iter.incrementDepth() { + return + } + + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '{': // If open symbol, increase level + level++ + if !iter.incrementDepth() { + return + } + case '}': // If close symbol, increase level + level-- + if !iter.decrementDepth() { + return + } + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete object") + return + } + } +} + +func (iter *Iterator) skipString() { + for { + end, escaped := iter.findStringEnd() + if end == -1 { + if !iter.loadMore() { + iter.ReportError("skipString", "incomplete string") + return + } + if escaped { + iter.head = 1 // skip the first char as last char read is \ + } + } else { + iter.head = end + return + } + } +} + +// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go +// Tries to find the end of string +// Support if string contains escaped quote symbols. +func (iter *Iterator) findStringEnd() (int, bool) { + escaped := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + if !escaped { + return i + 1, false + } + j := i - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return i + 1, true + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + } + } else if c == '\\' { + escaped = true + } + } + j := iter.tail - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return -1, false // do not end with \ + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + + } + return -1, true // end with \ +} diff --git a/vendor/github.com/json-iterator/go/iter_skip_strict.go b/vendor/github.com/json-iterator/go/iter_skip_strict.go new file mode 100644 index 0000000000..6cf66d0438 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_skip_strict.go @@ -0,0 +1,99 @@ +//+build !jsoniter_sloppy + +package jsoniter + +import ( + "fmt" + "io" +) + +func (iter *Iterator) skipNumber() { + if !iter.trySkipNumber() { + iter.unreadByte() + if iter.Error != nil && iter.Error != io.EOF { + return + } + iter.ReadFloat64() + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = nil + iter.ReadBigFloat() + } + } +} + +func (iter *Iterator) trySkipNumber() bool { + dotFound := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + case '.': + if dotFound { + iter.ReportError("validateNumber", `more than one dot found in number`) + return true // already failed + } + if i+1 == iter.tail { + return false + } + c = iter.buf[i+1] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + iter.ReportError("validateNumber", `missing digit after dot`) + return true // already failed + } + dotFound = true + default: + switch c { + case ',', ']', '}', ' ', '\t', '\n', '\r': + if iter.head == i { + return false // if - without following digits + } + iter.head = i + return true // must be valid + } + return false // may be invalid + } + } + return false +} + +func (iter *Iterator) skipString() { + if !iter.trySkipString() { + iter.unreadByte() + iter.ReadString() + } +} + +func (iter *Iterator) trySkipString() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + iter.head = i + 1 + return true // valid + } else if c == '\\' { + return false + } else if c < ' ' { + iter.ReportError("trySkipString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return true // already failed + } + } + return false +} + +func (iter *Iterator) skipObject() { + iter.unreadByte() + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + return true + }) +} + +func (iter *Iterator) skipArray() { + iter.unreadByte() + iter.ReadArrayCB(func(iter *Iterator) bool { + iter.Skip() + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/iter_str.go b/vendor/github.com/json-iterator/go/iter_str.go new file mode 100644 index 0000000000..adc487ea80 --- /dev/null +++ b/vendor/github.com/json-iterator/go/iter_str.go @@ -0,0 +1,215 @@ +package jsoniter + +import ( + "fmt" + "unicode/utf16" +) + +// ReadString read string from iterator +func (iter *Iterator) ReadString() (ret string) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + ret = string(iter.buf[iter.head:i]) + iter.head = i + 1 + return ret + } else if c == '\\' { + break + } else if c < ' ' { + iter.ReportError("ReadString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return + } + } + return iter.readStringSlowPath() + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return "" + } + iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readStringSlowPath() (ret string) { + var str []byte + var c byte + for iter.Error == nil { + c = iter.readByte() + if c == '"' { + return string(str) + } + if c == '\\' { + c = iter.readByte() + str = iter.readEscapedChar(c, str) + } else { + str = append(str, c) + } + } + iter.ReportError("readStringSlowPath", "unexpected end of input") + return +} + +func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte { + switch c { + case 'u': + r := iter.readU4() + if utf16.IsSurrogate(r) { + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != '\\' { + iter.unreadByte() + str = appendRune(str, r) + return str + } + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != 'u' { + str = appendRune(str, r) + return iter.readEscapedChar(c, str) + } + r2 := iter.readU4() + if iter.Error != nil { + return nil + } + combined := utf16.DecodeRune(r, r2) + if combined == '\uFFFD' { + str = appendRune(str, r) + str = appendRune(str, r2) + } else { + str = appendRune(str, combined) + } + } else { + str = appendRune(str, r) + } + case '"': + str = append(str, '"') + case '\\': + str = append(str, '\\') + case '/': + str = append(str, '/') + case 'b': + str = append(str, '\b') + case 'f': + str = append(str, '\f') + case 'n': + str = append(str, '\n') + case 'r': + str = append(str, '\r') + case 't': + str = append(str, '\t') + default: + iter.ReportError("readEscapedChar", + `invalid escape char after \`) + return nil + } + return str +} + +// ReadStringAsSlice read string from iterator without copying into string form. +// The []byte can not be kept, as it will change after next iterator call. +func (iter *Iterator) ReadStringAsSlice() (ret []byte) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + // for: field name, base64, number + if iter.buf[i] == '"' { + // fast path: reuse the underlying buffer + ret = iter.buf[iter.head:i] + iter.head = i + 1 + return ret + } + } + readLen := iter.tail - iter.head + copied := make([]byte, readLen, readLen*2) + copy(copied, iter.buf[iter.head:iter.tail]) + iter.head = iter.tail + for iter.Error == nil { + c := iter.readByte() + if c == '"' { + return copied + } + copied = append(copied, c) + } + return copied + } + iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readU4() (ret rune) { + for i := 0; i < 4; i++ { + c := iter.readByte() + if iter.Error != nil { + return + } + if c >= '0' && c <= '9' { + ret = ret*16 + rune(c-'0') + } else if c >= 'a' && c <= 'f' { + ret = ret*16 + rune(c-'a'+10) + } else if c >= 'A' && c <= 'F' { + ret = ret*16 + rune(c-'A'+10) + } else { + iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c})) + return + } + } + return ret +} + +const ( + t1 = 0x00 // 0000 0000 + tx = 0x80 // 1000 0000 + t2 = 0xC0 // 1100 0000 + t3 = 0xE0 // 1110 0000 + t4 = 0xF0 // 1111 0000 + t5 = 0xF8 // 1111 1000 + + maskx = 0x3F // 0011 1111 + mask2 = 0x1F // 0001 1111 + mask3 = 0x0F // 0000 1111 + mask4 = 0x07 // 0000 0111 + + rune1Max = 1<<7 - 1 + rune2Max = 1<<11 - 1 + rune3Max = 1<<16 - 1 + + surrogateMin = 0xD800 + surrogateMax = 0xDFFF + + maxRune = '\U0010FFFF' // Maximum valid Unicode code point. + runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character" +) + +func appendRune(p []byte, r rune) []byte { + // Negative values are erroneous. Making it unsigned addresses the problem. + switch i := uint32(r); { + case i <= rune1Max: + p = append(p, byte(r)) + return p + case i <= rune2Max: + p = append(p, t2|byte(r>>6)) + p = append(p, tx|byte(r)&maskx) + return p + case i > maxRune, surrogateMin <= i && i <= surrogateMax: + r = runeError + fallthrough + case i <= rune3Max: + p = append(p, t3|byte(r>>12)) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + default: + p = append(p, t4|byte(r>>18)) + p = append(p, tx|byte(r>>12)&maskx) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + } +} diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go new file mode 100644 index 0000000000..c2934f916e --- /dev/null +++ b/vendor/github.com/json-iterator/go/jsoniter.go @@ -0,0 +1,18 @@ +// Package jsoniter implements encoding and decoding of JSON as defined in +// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json. +// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter +// and variable type declarations (if any). +// jsoniter interfaces gives 100% compatibility with code using standard lib. +// +// "JSON and Go" +// (https://golang.org/doc/articles/json_and_go.html) +// gives a description of how Marshal/Unmarshal operate +// between arbitrary or predefined json objects and bytes, +// and it applies to jsoniter.Marshal/Unmarshal as well. +// +// Besides, jsoniter.Iterator provides a different set of interfaces +// iterating given bytes/string/reader +// and yielding parsed elements one by one. +// This set of interfaces reads input as required and gives +// better performance. +package jsoniter diff --git a/vendor/github.com/json-iterator/go/pool.go b/vendor/github.com/json-iterator/go/pool.go new file mode 100644 index 0000000000..e2389b56cf --- /dev/null +++ b/vendor/github.com/json-iterator/go/pool.go @@ -0,0 +1,42 @@ +package jsoniter + +import ( + "io" +) + +// IteratorPool a thread safe pool of iterators with same configuration +type IteratorPool interface { + BorrowIterator(data []byte) *Iterator + ReturnIterator(iter *Iterator) +} + +// StreamPool a thread safe pool of streams with same configuration +type StreamPool interface { + BorrowStream(writer io.Writer) *Stream + ReturnStream(stream *Stream) +} + +func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream { + stream := cfg.streamPool.Get().(*Stream) + stream.Reset(writer) + return stream +} + +func (cfg *frozenConfig) ReturnStream(stream *Stream) { + stream.out = nil + stream.Error = nil + stream.Attachment = nil + cfg.streamPool.Put(stream) +} + +func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator { + iter := cfg.iteratorPool.Get().(*Iterator) + iter.ResetBytes(data) + return iter +} + +func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { + iter.Error = nil + iter.Attachment = nil + cfg.iteratorPool.Put(iter) +} diff --git a/vendor/github.com/json-iterator/go/reflect.go b/vendor/github.com/json-iterator/go/reflect.go new file mode 100644 index 0000000000..39acb320ac --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect.go @@ -0,0 +1,337 @@ +package jsoniter + +import ( + "fmt" + "reflect" + "unsafe" + + "github.com/modern-go/reflect2" +) + +// ValDecoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValDecoder with json.Decoder. +// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link). +// +// Reflection on type to create decoders, which is then cached +// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions +// 1. create instance of new value, for example *int will need a int to be allocated +// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New +// 3. assignment to map, both key and value will be reflect.Value +// For a simple struct binding, it will be reflect.Value free and allocation free +type ValDecoder interface { + Decode(ptr unsafe.Pointer, iter *Iterator) +} + +// ValEncoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValEncoder with json.Encoder. +// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link). +type ValEncoder interface { + IsEmpty(ptr unsafe.Pointer) bool + Encode(ptr unsafe.Pointer, stream *Stream) +} + +type checkIsEmpty interface { + IsEmpty(ptr unsafe.Pointer) bool +} + +type ctx struct { + *frozenConfig + prefix string + encoders map[reflect2.Type]ValEncoder + decoders map[reflect2.Type]ValDecoder +} + +func (b *ctx) caseSensitive() bool { + if b.frozenConfig == nil { + // default is case-insensitive + return false + } + return b.frozenConfig.caseSensitive +} + +func (b *ctx) append(prefix string) *ctx { + return &ctx{ + frozenConfig: b.frozenConfig, + prefix: b.prefix + " " + prefix, + encoders: b.encoders, + decoders: b.decoders, + } +} + +// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal +func (iter *Iterator) ReadVal(obj interface{}) { + depth := iter.depth + cacheKey := reflect2.RTypeOf(obj) + decoder := iter.cfg.getDecoderFromCache(cacheKey) + if decoder == nil { + typ := reflect2.TypeOf(obj) + if typ == nil || typ.Kind() != reflect.Ptr { + iter.ReportError("ReadVal", "can only unmarshal into pointer") + return + } + decoder = iter.cfg.DecoderOf(typ) + } + ptr := reflect2.PtrOf(obj) + if ptr == nil { + iter.ReportError("ReadVal", "can not read into nil pointer") + return + } + decoder.Decode(ptr, iter) + if iter.depth != depth { + iter.ReportError("ReadVal", "unexpected mismatched nesting") + return + } +} + +// WriteVal copy the go interface into underlying JSON, same as json.Marshal +func (stream *Stream) WriteVal(val interface{}) { + if nil == val { + stream.WriteNil() + return + } + cacheKey := reflect2.RTypeOf(val) + encoder := stream.cfg.getEncoderFromCache(cacheKey) + if encoder == nil { + typ := reflect2.TypeOf(val) + encoder = stream.cfg.EncoderOf(typ) + } + encoder.Encode(reflect2.PtrOf(val), stream) +} + +func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder { + cacheKey := typ.RType() + decoder := cfg.getDecoderFromCache(cacheKey) + if decoder != nil { + return decoder + } + ctx := &ctx{ + frozenConfig: cfg, + prefix: "", + decoders: map[reflect2.Type]ValDecoder{}, + encoders: map[reflect2.Type]ValEncoder{}, + } + ptrType := typ.(*reflect2.UnsafePtrType) + decoder = decoderOfType(ctx, ptrType.Elem()) + cfg.addDecoderToCache(cacheKey, decoder) + return decoder +} + +func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := getTypeDecoderFromExtension(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfType(ctx, typ) + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder) + for _, extension := range ctx.extraExtensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + return decoder +} + +func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := ctx.decoders[typ] + if decoder != nil { + return decoder + } + placeholder := &placeholderDecoder{} + ctx.decoders[typ] = placeholder + decoder = _createDecoderOfType(ctx, typ) + placeholder.decoder = decoder + return decoder +} + +func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := createDecoderOfJsonRawMessage(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfJsonNumber(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfMarshaler(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfAny(ctx, typ) + if decoder != nil { + return decoder + } + decoder = createDecoderOfNative(ctx, typ) + if decoder != nil { + return decoder + } + switch typ.Kind() { + case reflect.Interface: + ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType) + if isIFace { + return &ifaceDecoder{valType: ifaceType} + } + return &efaceDecoder{} + case reflect.Struct: + return decoderOfStruct(ctx, typ) + case reflect.Array: + return decoderOfArray(ctx, typ) + case reflect.Slice: + return decoderOfSlice(ctx, typ) + case reflect.Map: + return decoderOfMap(ctx, typ) + case reflect.Ptr: + return decoderOfOptional(ctx, typ) + default: + return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())} + } +} + +func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder { + cacheKey := typ.RType() + encoder := cfg.getEncoderFromCache(cacheKey) + if encoder != nil { + return encoder + } + ctx := &ctx{ + frozenConfig: cfg, + prefix: "", + decoders: map[reflect2.Type]ValDecoder{}, + encoders: map[reflect2.Type]ValEncoder{}, + } + encoder = encoderOfType(ctx, typ) + if typ.LikePtr() { + encoder = &onePtrEncoder{encoder} + } + cfg.addEncoderToCache(cacheKey, encoder) + return encoder +} + +type onePtrEncoder struct { + encoder ValEncoder +} + +func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr)) +} + +func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(unsafe.Pointer(&ptr), stream) +} + +func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := getTypeEncoderFromExtension(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfType(ctx, typ) + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder) + for _, extension := range ctx.extraExtensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + return encoder +} + +func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := ctx.encoders[typ] + if encoder != nil { + return encoder + } + placeholder := &placeholderEncoder{} + ctx.encoders[typ] = placeholder + encoder = _createEncoderOfType(ctx, typ) + placeholder.encoder = encoder + return encoder +} +func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := createEncoderOfJsonRawMessage(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfJsonNumber(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfMarshaler(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfAny(ctx, typ) + if encoder != nil { + return encoder + } + encoder = createEncoderOfNative(ctx, typ) + if encoder != nil { + return encoder + } + kind := typ.Kind() + switch kind { + case reflect.Interface: + return &dynamicEncoder{typ} + case reflect.Struct: + return encoderOfStruct(ctx, typ) + case reflect.Array: + return encoderOfArray(ctx, typ) + case reflect.Slice: + return encoderOfSlice(ctx, typ) + case reflect.Map: + return encoderOfMap(ctx, typ) + case reflect.Ptr: + return encoderOfOptional(ctx, typ) + default: + return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())} + } +} + +type lazyErrorDecoder struct { + err error +} + +func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() != NilValue { + if iter.Error == nil { + iter.Error = decoder.err + } + } else { + iter.Skip() + } +} + +type lazyErrorEncoder struct { + err error +} + +func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if ptr == nil { + stream.WriteNil() + } else if stream.Error == nil { + stream.Error = encoder.err + } +} + +func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type placeholderDecoder struct { + decoder ValDecoder +} + +func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.decoder.Decode(ptr, iter) +} + +type placeholderEncoder struct { + encoder ValEncoder +} + +func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(ptr, stream) +} + +func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(ptr) +} diff --git a/vendor/github.com/json-iterator/go/reflect_array.go b/vendor/github.com/json-iterator/go/reflect_array.go new file mode 100644 index 0000000000..13a0b7b087 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_array.go @@ -0,0 +1,104 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "unsafe" +) + +func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder { + arrayType := typ.(*reflect2.UnsafeArrayType) + decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem()) + return &arrayDecoder{arrayType, decoder} +} + +func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder { + arrayType := typ.(*reflect2.UnsafeArrayType) + if arrayType.Len() == 0 { + return emptyArrayEncoder{} + } + encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem()) + return &arrayEncoder{arrayType, encoder} +} + +type emptyArrayEncoder struct{} + +func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyArray() +} + +func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return true +} + +type arrayEncoder struct { + arrayType *reflect2.UnsafeArrayType + elemEncoder ValEncoder +} + +func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(ptr) + encoder.elemEncoder.Encode(elemPtr, stream) + for i := 1; i < encoder.arrayType.Len(); i++ { + stream.WriteMore() + elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i) + encoder.elemEncoder.Encode(elemPtr, stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error()) + } +} + +func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type arrayDecoder struct { + arrayType *reflect2.UnsafeArrayType + elemDecoder ValDecoder +} + +func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error()) + } +} + +func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + arrayType := decoder.arrayType + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return + } + if c != '[' { + iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c})) + return + } + c = iter.nextToken() + if c == ']' { + return + } + iter.unreadByte() + elemPtr := arrayType.UnsafeGetIndex(ptr, 0) + decoder.elemDecoder.Decode(elemPtr, iter) + length := 1 + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + if length >= arrayType.Len() { + iter.Skip() + continue + } + idx := length + length += 1 + elemPtr = arrayType.UnsafeGetIndex(ptr, idx) + decoder.elemDecoder.Decode(elemPtr, iter) + } + if c != ']' { + iter.ReportError("decode array", "expect ], but found "+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_dynamic.go b/vendor/github.com/json-iterator/go/reflect_dynamic.go new file mode 100644 index 0000000000..8b6bc8b433 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_dynamic.go @@ -0,0 +1,70 @@ +package jsoniter + +import ( + "github.com/modern-go/reflect2" + "reflect" + "unsafe" +) + +type dynamicEncoder struct { + valType reflect2.Type +} + +func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + stream.WriteVal(obj) +} + +func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.valType.UnsafeIndirect(ptr) == nil +} + +type efaceDecoder struct { +} + +func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + pObj := (*interface{})(ptr) + obj := *pObj + if obj == nil { + *pObj = iter.Read() + return + } + typ := reflect2.TypeOf(obj) + if typ.Kind() != reflect.Ptr { + *pObj = iter.Read() + return + } + ptrType := typ.(*reflect2.UnsafePtrType) + ptrElemType := ptrType.Elem() + if iter.WhatIsNext() == NilValue { + if ptrElemType.Kind() != reflect.Ptr { + iter.skipFourBytes('n', 'u', 'l', 'l') + *pObj = nil + return + } + } + if reflect2.IsNil(obj) { + obj := ptrElemType.New() + iter.ReadVal(obj) + *pObj = obj + return + } + iter.ReadVal(obj) +} + +type ifaceDecoder struct { + valType *reflect2.UnsafeIFaceType +} + +func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew()) + return + } + obj := decoder.valType.UnsafeIndirect(ptr) + if reflect2.IsNil(obj) { + iter.ReportError("decode non empty interface", "can not unmarshal into nil") + return + } + iter.ReadVal(obj) +} diff --git a/vendor/github.com/json-iterator/go/reflect_extension.go b/vendor/github.com/json-iterator/go/reflect_extension.go new file mode 100644 index 0000000000..74a97bfe5a --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_extension.go @@ -0,0 +1,483 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "reflect" + "sort" + "strings" + "unicode" + "unsafe" +) + +var typeDecoders = map[string]ValDecoder{} +var fieldDecoders = map[string]ValDecoder{} +var typeEncoders = map[string]ValEncoder{} +var fieldEncoders = map[string]ValEncoder{} +var extensions = []Extension{} + +// StructDescriptor describe how should we encode/decode the struct +type StructDescriptor struct { + Type reflect2.Type + Fields []*Binding +} + +// GetField get one field from the descriptor by its name. +// Can not use map here to keep field orders. +func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding { + for _, binding := range structDescriptor.Fields { + if binding.Field.Name() == fieldName { + return binding + } + } + return nil +} + +// Binding describe how should we encode/decode the struct field +type Binding struct { + levels []int + Field reflect2.StructField + FromNames []string + ToNames []string + Encoder ValEncoder + Decoder ValDecoder +} + +// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder. +// Can also rename fields by UpdateStructDescriptor. +type Extension interface { + UpdateStructDescriptor(structDescriptor *StructDescriptor) + CreateMapKeyDecoder(typ reflect2.Type) ValDecoder + CreateMapKeyEncoder(typ reflect2.Type) ValEncoder + CreateDecoder(typ reflect2.Type) ValDecoder + CreateEncoder(typ reflect2.Type) ValEncoder + DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder + DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder +} + +// DummyExtension embed this type get dummy implementation for all methods of Extension +type DummyExtension struct { +} + +// UpdateStructDescriptor No-op +func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateMapKeyDecoder No-op +func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// CreateDecoder No-op +func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateEncoder No-op +func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type EncoderExtension map[reflect2.Type]ValEncoder + +// UpdateStructDescriptor No-op +func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateDecoder No-op +func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateEncoder get encoder from map +func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return extension[typ] +} + +// CreateMapKeyDecoder No-op +func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type DecoderExtension map[reflect2.Type]ValDecoder + +// UpdateStructDescriptor No-op +func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateMapKeyDecoder No-op +func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder { + return nil +} + +// CreateMapKeyEncoder No-op +func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// CreateDecoder get decoder from map +func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder { + return extension[typ] +} + +// CreateEncoder No-op +func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type funcDecoder struct { + fun DecoderFunc +} + +func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.fun(ptr, iter) +} + +type funcEncoder struct { + fun EncoderFunc + isEmptyFunc func(ptr unsafe.Pointer) bool +} + +func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.fun(ptr, stream) +} + +func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool { + if encoder.isEmptyFunc == nil { + return false + } + return encoder.isEmptyFunc(ptr) +} + +// DecoderFunc the function form of TypeDecoder +type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator) + +// EncoderFunc the function form of TypeEncoder +type EncoderFunc func(ptr unsafe.Pointer, stream *Stream) + +// RegisterTypeDecoderFunc register TypeDecoder for a type with function +func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) { + typeDecoders[typ] = &funcDecoder{fun} +} + +// RegisterTypeDecoder register TypeDecoder for a typ +func RegisterTypeDecoder(typ string, decoder ValDecoder) { + typeDecoders[typ] = decoder +} + +// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function +func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) { + RegisterFieldDecoder(typ, field, &funcDecoder{fun}) +} + +// RegisterFieldDecoder register TypeDecoder for a struct field +func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) { + fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder +} + +// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function +func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc} +} + +// RegisterTypeEncoder register TypeEncoder for a type +func RegisterTypeEncoder(typ string, encoder ValEncoder) { + typeEncoders[typ] = encoder +} + +// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function +func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc}) +} + +// RegisterFieldEncoder register TypeEncoder for a struct field +func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) { + fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder +} + +// RegisterExtension register extension +func RegisterExtension(extension Extension) { + extensions = append(extensions, extension) +} + +func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := _getTypeDecoderFromExtension(ctx, typ) + if decoder != nil { + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder) + for _, extension := range ctx.extraExtensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + } + return decoder +} +func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder { + for _, extension := range extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + decoder := ctx.decoderExtension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + for _, extension := range ctx.extraExtensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + typeName := typ.String() + decoder = typeDecoders[typeName] + if decoder != nil { + return decoder + } + if typ.Kind() == reflect.Ptr { + ptrType := typ.(*reflect2.UnsafePtrType) + decoder := typeDecoders[ptrType.Elem().String()] + if decoder != nil { + return &OptionalDecoder{ptrType.Elem(), decoder} + } + } + return nil +} + +func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := _getTypeEncoderFromExtension(ctx, typ) + if encoder != nil { + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder) + for _, extension := range ctx.extraExtensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + } + return encoder +} + +func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder { + for _, extension := range extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + encoder := ctx.encoderExtension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + for _, extension := range ctx.extraExtensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + typeName := typ.String() + encoder = typeEncoders[typeName] + if encoder != nil { + return encoder + } + if typ.Kind() == reflect.Ptr { + typePtr := typ.(*reflect2.UnsafePtrType) + encoder := typeEncoders[typePtr.Elem().String()] + if encoder != nil { + return &OptionalEncoder{encoder} + } + } + return nil +} + +func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor { + structType := typ.(*reflect2.UnsafeStructType) + embeddedBindings := []*Binding{} + bindings := []*Binding{} + for i := 0; i < structType.NumField(); i++ { + field := structType.Field(i) + tag, hastag := field.Tag().Lookup(ctx.getTagKey()) + if ctx.onlyTaggedField && !hastag && !field.Anonymous() { + continue + } + if tag == "-" || field.Name() == "_" { + continue + } + tagParts := strings.Split(tag, ",") + if field.Anonymous() && (tag == "" || tagParts[0] == "") { + if field.Type().Kind() == reflect.Struct { + structDescriptor := describeStruct(ctx, field.Type()) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty} + binding.Decoder = &structFieldDecoder{field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } else if field.Type().Kind() == reflect.Ptr { + ptrType := field.Type().(*reflect2.UnsafePtrType) + if ptrType.Elem().Kind() == reflect.Struct { + structDescriptor := describeStruct(ctx, ptrType.Elem()) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &dereferenceEncoder{binding.Encoder} + binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty} + binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder} + binding.Decoder = &structFieldDecoder{field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } + } + } + fieldNames := calcFieldNames(field.Name(), tagParts[0], tag) + fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name()) + decoder := fieldDecoders[fieldCacheKey] + if decoder == nil { + decoder = decoderOfType(ctx.append(field.Name()), field.Type()) + } + encoder := fieldEncoders[fieldCacheKey] + if encoder == nil { + encoder = encoderOfType(ctx.append(field.Name()), field.Type()) + } + binding := &Binding{ + Field: field, + FromNames: fieldNames, + ToNames: fieldNames, + Decoder: decoder, + Encoder: encoder, + } + binding.levels = []int{i} + bindings = append(bindings, binding) + } + return createStructDescriptor(ctx, typ, bindings, embeddedBindings) +} +func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor { + structDescriptor := &StructDescriptor{ + Type: typ, + Fields: bindings, + } + for _, extension := range extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + ctx.encoderExtension.UpdateStructDescriptor(structDescriptor) + ctx.decoderExtension.UpdateStructDescriptor(structDescriptor) + for _, extension := range ctx.extraExtensions { + extension.UpdateStructDescriptor(structDescriptor) + } + processTags(structDescriptor, ctx.frozenConfig) + // merge normal & embedded bindings & sort with original order + allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...)) + sort.Sort(allBindings) + structDescriptor.Fields = allBindings + return structDescriptor +} + +type sortableBindings []*Binding + +func (bindings sortableBindings) Len() int { + return len(bindings) +} + +func (bindings sortableBindings) Less(i, j int) bool { + left := bindings[i].levels + right := bindings[j].levels + k := 0 + for { + if left[k] < right[k] { + return true + } else if left[k] > right[k] { + return false + } + k++ + } +} + +func (bindings sortableBindings) Swap(i, j int) { + bindings[i], bindings[j] = bindings[j], bindings[i] +} + +func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) { + for _, binding := range structDescriptor.Fields { + shouldOmitEmpty := false + tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",") + for _, tagPart := range tagParts[1:] { + if tagPart == "omitempty" { + shouldOmitEmpty = true + } else if tagPart == "string" { + if binding.Field.Type().Kind() == reflect.String { + binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg} + binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg} + } else { + binding.Decoder = &stringModeNumberDecoder{binding.Decoder} + binding.Encoder = &stringModeNumberEncoder{binding.Encoder} + } + } + } + binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder} + binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty} + } +} + +func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string { + // ignore? + if wholeTag == "-" { + return []string{} + } + // rename? + var fieldNames []string + if tagProvidedFieldName == "" { + fieldNames = []string{originalFieldName} + } else { + fieldNames = []string{tagProvidedFieldName} + } + // private? + isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_' + if isNotExported { + fieldNames = []string{} + } + return fieldNames +} diff --git a/vendor/github.com/json-iterator/go/reflect_json_number.go b/vendor/github.com/json-iterator/go/reflect_json_number.go new file mode 100644 index 0000000000..98d45c1ec2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_json_number.go @@ -0,0 +1,112 @@ +package jsoniter + +import ( + "encoding/json" + "github.com/modern-go/reflect2" + "strconv" + "unsafe" +) + +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +func CastJsonNumber(val interface{}) (string, bool) { + switch typedVal := val.(type) { + case json.Number: + return string(typedVal), true + case Number: + return string(typedVal), true + } + return "", false +} + +var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem() +var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem() + +func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + return nil +} + +func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + return nil +} + +type jsonNumberCodec struct { +} + +func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*json.Number)(ptr)) = json.Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*json.Number)(ptr)) = "" + default: + *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + number := *((*json.Number)(ptr)) + if len(number) == 0 { + stream.writeByte('0') + } else { + stream.WriteRaw(string(number)) + } +} + +func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.Number)(ptr))) == 0 +} + +type jsoniterNumberCodec struct { +} + +func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*Number)(ptr)) = Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*Number)(ptr)) = "" + default: + *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + number := *((*Number)(ptr)) + if len(number) == 0 { + stream.writeByte('0') + } else { + stream.WriteRaw(string(number)) + } +} + +func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*Number)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_json_raw_message.go b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go new file mode 100644 index 0000000000..eba434f2f1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go @@ -0,0 +1,76 @@ +package jsoniter + +import ( + "encoding/json" + "github.com/modern-go/reflect2" + "unsafe" +) + +var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem() +var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem() + +func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + return nil +} + +func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + return nil +} + +type jsonRawMessageCodec struct { +} + +func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*json.RawMessage)(ptr)) = nil + } else { + *((*json.RawMessage)(ptr)) = iter.SkipAndReturnBytes() + } +} + +func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*json.RawMessage)(ptr)) == nil { + stream.WriteNil() + } else { + stream.WriteRaw(string(*((*json.RawMessage)(ptr)))) + } +} + +func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.RawMessage)(ptr))) == 0 +} + +type jsoniterRawMessageCodec struct { +} + +func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*RawMessage)(ptr)) = nil + } else { + *((*RawMessage)(ptr)) = iter.SkipAndReturnBytes() + } +} + +func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*RawMessage)(ptr)) == nil { + stream.WriteNil() + } else { + stream.WriteRaw(string(*((*RawMessage)(ptr)))) + } +} + +func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*RawMessage)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_map.go b/vendor/github.com/json-iterator/go/reflect_map.go new file mode 100644 index 0000000000..5829671301 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_map.go @@ -0,0 +1,346 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "sort" + "unsafe" +) + +func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder { + mapType := typ.(*reflect2.UnsafeMapType) + keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()) + elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem()) + return &mapDecoder{ + mapType: mapType, + keyType: mapType.Key(), + elemType: mapType.Elem(), + keyDecoder: keyDecoder, + elemDecoder: elemDecoder, + } +} + +func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder { + mapType := typ.(*reflect2.UnsafeMapType) + if ctx.sortMapKeys { + return &sortKeysMapEncoder{ + mapType: mapType, + keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()), + elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()), + } + } + return &mapEncoder{ + mapType: mapType, + keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()), + elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()), + } +} + +func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder { + decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ) + if decoder != nil { + return decoder + } + for _, extension := range ctx.extraExtensions { + decoder := extension.CreateMapKeyDecoder(typ) + if decoder != nil { + return decoder + } + } + + ptrType := reflect2.PtrTo(typ) + if ptrType.Implements(unmarshalerType) { + return &referenceDecoder{ + &unmarshalerDecoder{ + valType: ptrType, + }, + } + } + if typ.Implements(unmarshalerType) { + return &unmarshalerDecoder{ + valType: typ, + } + } + if ptrType.Implements(textUnmarshalerType) { + return &referenceDecoder{ + &textUnmarshalerDecoder{ + valType: ptrType, + }, + } + } + if typ.Implements(textUnmarshalerType) { + return &textUnmarshalerDecoder{ + valType: typ, + } + } + + switch typ.Kind() { + case reflect.String: + return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String)) + case reflect.Bool, + reflect.Uint8, reflect.Int8, + reflect.Uint16, reflect.Int16, + reflect.Uint32, reflect.Int32, + reflect.Uint64, reflect.Int64, + reflect.Uint, reflect.Int, + reflect.Float32, reflect.Float64, + reflect.Uintptr: + typ = reflect2.DefaultTypeOfKind(typ.Kind()) + return &numericMapKeyDecoder{decoderOfType(ctx, typ)} + default: + return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)} + } +} + +func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder { + encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ) + if encoder != nil { + return encoder + } + for _, extension := range ctx.extraExtensions { + encoder := extension.CreateMapKeyEncoder(typ) + if encoder != nil { + return encoder + } + } + + if typ == textMarshalerType { + return &directTextMarshalerEncoder{ + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + } + if typ.Implements(textMarshalerType) { + return &textMarshalerEncoder{ + valType: typ, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + } + + switch typ.Kind() { + case reflect.String: + return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String)) + case reflect.Bool, + reflect.Uint8, reflect.Int8, + reflect.Uint16, reflect.Int16, + reflect.Uint32, reflect.Int32, + reflect.Uint64, reflect.Int64, + reflect.Uint, reflect.Int, + reflect.Float32, reflect.Float64, + reflect.Uintptr: + typ = reflect2.DefaultTypeOfKind(typ.Kind()) + return &numericMapKeyEncoder{encoderOfType(ctx, typ)} + default: + if typ.Kind() == reflect.Interface { + return &dynamicMapKeyEncoder{ctx, typ} + } + return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)} + } +} + +type mapDecoder struct { + mapType *reflect2.UnsafeMapType + keyType reflect2.Type + elemType reflect2.Type + keyDecoder ValDecoder + elemDecoder ValDecoder +} + +func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + mapType := decoder.mapType + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + *(*unsafe.Pointer)(ptr) = nil + mapType.UnsafeSet(ptr, mapType.UnsafeNew()) + return + } + if mapType.UnsafeIsNil(ptr) { + mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0)) + } + if c != '{' { + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return + } + c = iter.nextToken() + if c == '}' { + return + } + iter.unreadByte() + key := decoder.keyType.UnsafeNew() + decoder.keyDecoder.Decode(key, iter) + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return + } + elem := decoder.elemType.UnsafeNew() + decoder.elemDecoder.Decode(elem, iter) + decoder.mapType.UnsafeSetIndex(ptr, key, elem) + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + key := decoder.keyType.UnsafeNew() + decoder.keyDecoder.Decode(key, iter) + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return + } + elem := decoder.elemType.UnsafeNew() + decoder.elemDecoder.Decode(elem, iter) + decoder.mapType.UnsafeSetIndex(ptr, key, elem) + } + if c != '}' { + iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c})) + } +} + +type numericMapKeyDecoder struct { + decoder ValDecoder +} + +func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + if c != '"' { + iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c})) + return + } + decoder.decoder.Decode(ptr, iter) + c = iter.nextToken() + if c != '"' { + iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c})) + return + } +} + +type numericMapKeyEncoder struct { + encoder ValEncoder +} + +func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.encoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type dynamicMapKeyEncoder struct { + ctx *ctx + valType reflect2.Type +} + +func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream) +} + +func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool { + obj := encoder.valType.UnsafeIndirect(ptr) + return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj)) +} + +type mapEncoder struct { + mapType *reflect2.UnsafeMapType + keyEncoder ValEncoder + elemEncoder ValEncoder +} + +func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *(*unsafe.Pointer)(ptr) == nil { + stream.WriteNil() + return + } + stream.WriteObjectStart() + iter := encoder.mapType.UnsafeIterate(ptr) + for i := 0; iter.HasNext(); i++ { + if i != 0 { + stream.WriteMore() + } + key, elem := iter.UnsafeNext() + encoder.keyEncoder.Encode(key, stream) + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + encoder.elemEncoder.Encode(elem, stream) + } + stream.WriteObjectEnd() +} + +func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + iter := encoder.mapType.UnsafeIterate(ptr) + return !iter.HasNext() +} + +type sortKeysMapEncoder struct { + mapType *reflect2.UnsafeMapType + keyEncoder ValEncoder + elemEncoder ValEncoder +} + +func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *(*unsafe.Pointer)(ptr) == nil { + stream.WriteNil() + return + } + stream.WriteObjectStart() + mapIter := encoder.mapType.UnsafeIterate(ptr) + subStream := stream.cfg.BorrowStream(nil) + subStream.Attachment = stream.Attachment + subIter := stream.cfg.BorrowIterator(nil) + keyValues := encodedKeyValues{} + for mapIter.HasNext() { + key, elem := mapIter.UnsafeNext() + subStreamIndex := subStream.Buffered() + encoder.keyEncoder.Encode(key, subStream) + if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil { + stream.Error = subStream.Error + } + encodedKey := subStream.Buffer()[subStreamIndex:] + subIter.ResetBytes(encodedKey) + decodedKey := subIter.ReadString() + if stream.indention > 0 { + subStream.writeTwoBytes(byte(':'), byte(' ')) + } else { + subStream.writeByte(':') + } + encoder.elemEncoder.Encode(elem, subStream) + keyValues = append(keyValues, encodedKV{ + key: decodedKey, + keyValue: subStream.Buffer()[subStreamIndex:], + }) + } + sort.Sort(keyValues) + for i, keyValue := range keyValues { + if i != 0 { + stream.WriteMore() + } + stream.Write(keyValue.keyValue) + } + if subStream.Error != nil && stream.Error == nil { + stream.Error = subStream.Error + } + stream.WriteObjectEnd() + stream.cfg.ReturnStream(subStream) + stream.cfg.ReturnIterator(subIter) +} + +func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + iter := encoder.mapType.UnsafeIterate(ptr) + return !iter.HasNext() +} + +type encodedKeyValues []encodedKV + +type encodedKV struct { + key string + keyValue []byte +} + +func (sv encodedKeyValues) Len() int { return len(sv) } +func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key } diff --git a/vendor/github.com/json-iterator/go/reflect_marshaler.go b/vendor/github.com/json-iterator/go/reflect_marshaler.go new file mode 100644 index 0000000000..3e21f37567 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_marshaler.go @@ -0,0 +1,225 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "unsafe" + + "github.com/modern-go/reflect2" +) + +var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem() +var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem() +var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem() +var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem() + +func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder { + ptrType := reflect2.PtrTo(typ) + if ptrType.Implements(unmarshalerType) { + return &referenceDecoder{ + &unmarshalerDecoder{ptrType}, + } + } + if ptrType.Implements(textUnmarshalerType) { + return &referenceDecoder{ + &textUnmarshalerDecoder{ptrType}, + } + } + return nil +} + +func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ == marshalerType { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &directMarshalerEncoder{ + checkIsEmpty: checkIsEmpty, + } + return encoder + } + if typ.Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &marshalerEncoder{ + valType: typ, + checkIsEmpty: checkIsEmpty, + } + return encoder + } + ptrType := reflect2.PtrTo(typ) + if ctx.prefix != "" && ptrType.Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, ptrType) + var encoder ValEncoder = &marshalerEncoder{ + valType: ptrType, + checkIsEmpty: checkIsEmpty, + } + return &referenceEncoder{encoder} + } + if typ == textMarshalerType { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &directTextMarshalerEncoder{ + checkIsEmpty: checkIsEmpty, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + } + return encoder + } + if typ.Implements(textMarshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, typ) + var encoder ValEncoder = &textMarshalerEncoder{ + valType: typ, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + checkIsEmpty: checkIsEmpty, + } + return encoder + } + // if prefix is empty, the type is the root type + if ctx.prefix != "" && ptrType.Implements(textMarshalerType) { + checkIsEmpty := createCheckIsEmpty(ctx, ptrType) + var encoder ValEncoder = &textMarshalerEncoder{ + valType: ptrType, + stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")), + checkIsEmpty: checkIsEmpty, + } + return &referenceEncoder{encoder} + } + return nil +} + +type marshalerEncoder struct { + checkIsEmpty checkIsEmpty + valType reflect2.Type +} + +func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + if encoder.valType.IsNullable() && reflect2.IsNil(obj) { + stream.WriteNil() + return + } + marshaler := obj.(json.Marshaler) + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + // html escape was already done by jsoniter + // but the extra '\n' should be trimed + l := len(bytes) + if l > 0 && bytes[l-1] == '\n' { + bytes = bytes[:l-1] + } + stream.Write(bytes) + } +} + +func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type directMarshalerEncoder struct { + checkIsEmpty checkIsEmpty +} + +func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + marshaler := *(*json.Marshaler)(ptr) + if marshaler == nil { + stream.WriteNil() + return + } + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + stream.Write(bytes) + } +} + +func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type textMarshalerEncoder struct { + valType reflect2.Type + stringEncoder ValEncoder + checkIsEmpty checkIsEmpty +} + +func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + obj := encoder.valType.UnsafeIndirect(ptr) + if encoder.valType.IsNullable() && reflect2.IsNil(obj) { + stream.WriteNil() + return + } + marshaler := (obj).(encoding.TextMarshaler) + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + str := string(bytes) + encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream) + } +} + +func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type directTextMarshalerEncoder struct { + stringEncoder ValEncoder + checkIsEmpty checkIsEmpty +} + +func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + marshaler := *(*encoding.TextMarshaler)(ptr) + if marshaler == nil { + stream.WriteNil() + return + } + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + str := string(bytes) + encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream) + } +} + +func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type unmarshalerDecoder struct { + valType reflect2.Type +} + +func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valType := decoder.valType + obj := valType.UnsafeIndirect(ptr) + unmarshaler := obj.(json.Unmarshaler) + iter.nextToken() + iter.unreadByte() // skip spaces + bytes := iter.SkipAndReturnBytes() + err := unmarshaler.UnmarshalJSON(bytes) + if err != nil { + iter.ReportError("unmarshalerDecoder", err.Error()) + } +} + +type textUnmarshalerDecoder struct { + valType reflect2.Type +} + +func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valType := decoder.valType + obj := valType.UnsafeIndirect(ptr) + if reflect2.IsNil(obj) { + ptrType := valType.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + elem := elemType.UnsafeNew() + ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem)) + obj = valType.UnsafeIndirect(ptr) + } + unmarshaler := (obj).(encoding.TextUnmarshaler) + str := iter.ReadString() + err := unmarshaler.UnmarshalText([]byte(str)) + if err != nil { + iter.ReportError("textUnmarshalerDecoder", err.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_native.go b/vendor/github.com/json-iterator/go/reflect_native.go new file mode 100644 index 0000000000..f88722d14d --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_native.go @@ -0,0 +1,453 @@ +package jsoniter + +import ( + "encoding/base64" + "reflect" + "strconv" + "unsafe" + + "github.com/modern-go/reflect2" +) + +const ptrSize = 32 << uintptr(^uintptr(0)>>63) + +func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder { + if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 { + sliceDecoder := decoderOfSlice(ctx, typ) + return &base64Codec{sliceDecoder: sliceDecoder} + } + typeName := typ.String() + kind := typ.Kind() + switch kind { + case reflect.String: + if typeName != "string" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &int32Codec{} + } + return &int64Codec{} + case reflect.Int8: + if typeName != "int8" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint8: + if typeName != "uint8" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem()) + } + if ptrSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint64: + if typeName != "uint64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem()) + } + return &boolCodec{} + } + return nil +} + +func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder { + if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 { + sliceDecoder := decoderOfSlice(ctx, typ) + return &base64Codec{sliceDecoder: sliceDecoder} + } + typeName := typ.String() + switch typ.Kind() { + case reflect.String: + if typeName != "string" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &int32Codec{} + } + return &int64Codec{} + case reflect.Int8: + if typeName != "int8" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem()) + } + if strconv.IntSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint8: + if typeName != "uint8" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem()) + } + if ptrSize == 32 { + return &uint32Codec{} + } + return &uint64Codec{} + case reflect.Uint64: + if typeName != "uint64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem()) + } + return &boolCodec{} + } + return nil +} + +type stringCodec struct { +} + +func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*string)(ptr)) = iter.ReadString() +} + +func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteString(str) +} + +func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +type int8Codec struct { +} + +func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int8)(ptr)) = iter.ReadInt8() + } +} + +func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt8(*((*int8)(ptr))) +} + +func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int8)(ptr)) == 0 +} + +type int16Codec struct { +} + +func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int16)(ptr)) = iter.ReadInt16() + } +} + +func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt16(*((*int16)(ptr))) +} + +func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int16)(ptr)) == 0 +} + +type int32Codec struct { +} + +func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int32)(ptr)) = iter.ReadInt32() + } +} + +func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt32(*((*int32)(ptr))) +} + +func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int32)(ptr)) == 0 +} + +type int64Codec struct { +} + +func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int64)(ptr)) = iter.ReadInt64() + } +} + +func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt64(*((*int64)(ptr))) +} + +func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int64)(ptr)) == 0 +} + +type uint8Codec struct { +} + +func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint8)(ptr)) = iter.ReadUint8() + } +} + +func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint8(*((*uint8)(ptr))) +} + +func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint8)(ptr)) == 0 +} + +type uint16Codec struct { +} + +func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint16)(ptr)) = iter.ReadUint16() + } +} + +func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint16(*((*uint16)(ptr))) +} + +func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint16)(ptr)) == 0 +} + +type uint32Codec struct { +} + +func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint32)(ptr)) = iter.ReadUint32() + } +} + +func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint32(*((*uint32)(ptr))) +} + +func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint32)(ptr)) == 0 +} + +type uint64Codec struct { +} + +func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint64)(ptr)) = iter.ReadUint64() + } +} + +func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(*((*uint64)(ptr))) +} + +func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint64)(ptr)) == 0 +} + +type float32Codec struct { +} + +func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float32)(ptr)) = iter.ReadFloat32() + } +} + +func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32(*((*float32)(ptr))) +} + +func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type float64Codec struct { +} + +func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float64)(ptr)) = iter.ReadFloat64() + } +} + +func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64(*((*float64)(ptr))) +} + +func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +type boolCodec struct { +} + +func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*bool)(ptr)) = iter.ReadBool() + } +} + +func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteBool(*((*bool)(ptr))) +} + +func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool { + return !(*((*bool)(ptr))) +} + +type base64Codec struct { + sliceType *reflect2.UnsafeSliceType + sliceDecoder ValDecoder +} + +func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + codec.sliceType.UnsafeSetNil(ptr) + return + } + switch iter.WhatIsNext() { + case StringValue: + src := iter.ReadString() + dst, err := base64.StdEncoding.DecodeString(src) + if err != nil { + iter.ReportError("decode base64", err.Error()) + } else { + codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst)) + } + case ArrayValue: + codec.sliceDecoder.Decode(ptr, iter) + default: + iter.ReportError("base64Codec", "invalid input") + } +} + +func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + if codec.sliceType.UnsafeIsNil(ptr) { + stream.WriteNil() + return + } + src := *((*[]byte)(ptr)) + encoding := base64.StdEncoding + stream.writeByte('"') + if len(src) != 0 { + size := encoding.EncodedLen(len(src)) + buf := make([]byte, size) + encoding.Encode(buf, src) + stream.buf = append(stream.buf, buf...) + } + stream.writeByte('"') +} + +func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*[]byte)(ptr))) == 0 +} diff --git a/vendor/github.com/json-iterator/go/reflect_optional.go b/vendor/github.com/json-iterator/go/reflect_optional.go new file mode 100644 index 0000000000..fa71f47489 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_optional.go @@ -0,0 +1,129 @@ +package jsoniter + +import ( + "github.com/modern-go/reflect2" + "unsafe" +) + +func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder { + ptrType := typ.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + decoder := decoderOfType(ctx, elemType) + return &OptionalDecoder{elemType, decoder} +} + +func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder { + ptrType := typ.(*reflect2.UnsafePtrType) + elemType := ptrType.Elem() + elemEncoder := encoderOfType(ctx, elemType) + encoder := &OptionalEncoder{elemEncoder} + return encoder +} + +type OptionalDecoder struct { + ValueType reflect2.Type + ValueDecoder ValDecoder +} + +func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*unsafe.Pointer)(ptr)) = nil + } else { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + newPtr := decoder.ValueType.UnsafeNew() + decoder.ValueDecoder.Decode(newPtr, iter) + *((*unsafe.Pointer)(ptr)) = newPtr + } else { + //reuse existing instance + decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } + } +} + +type dereferenceDecoder struct { + // only to deference a pointer + valueType reflect2.Type + valueDecoder ValDecoder +} + +func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + newPtr := decoder.valueType.UnsafeNew() + decoder.valueDecoder.Decode(newPtr, iter) + *((*unsafe.Pointer)(ptr)) = newPtr + } else { + //reuse existing instance + decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } +} + +type OptionalEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*unsafe.Pointer)(ptr)) == nil +} + +type dereferenceEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + dePtr := *((*unsafe.Pointer)(ptr)) + if dePtr == nil { + return true + } + return encoder.ValueEncoder.IsEmpty(dePtr) +} + +func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool { + deReferenced := *((*unsafe.Pointer)(ptr)) + if deReferenced == nil { + return true + } + isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil) + if !converted { + return false + } + fieldPtr := unsafe.Pointer(deReferenced) + return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr) +} + +type referenceEncoder struct { + encoder ValEncoder +} + +func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.encoder.Encode(unsafe.Pointer(&ptr), stream) +} + +func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr)) +} + +type referenceDecoder struct { + decoder ValDecoder +} + +func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.decoder.Decode(unsafe.Pointer(&ptr), iter) +} diff --git a/vendor/github.com/json-iterator/go/reflect_slice.go b/vendor/github.com/json-iterator/go/reflect_slice.go new file mode 100644 index 0000000000..9441d79df3 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_slice.go @@ -0,0 +1,99 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "unsafe" +) + +func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder { + sliceType := typ.(*reflect2.UnsafeSliceType) + decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem()) + return &sliceDecoder{sliceType, decoder} +} + +func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder { + sliceType := typ.(*reflect2.UnsafeSliceType) + encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem()) + return &sliceEncoder{sliceType, encoder} +} + +type sliceEncoder struct { + sliceType *reflect2.UnsafeSliceType + elemEncoder ValEncoder +} + +func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if encoder.sliceType.UnsafeIsNil(ptr) { + stream.WriteNil() + return + } + length := encoder.sliceType.UnsafeLengthOf(ptr) + if length == 0 { + stream.WriteEmptyArray() + return + } + stream.WriteArrayStart() + encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream) + for i := 1; i < length; i++ { + stream.WriteMore() + elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i) + encoder.elemEncoder.Encode(elemPtr, stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error()) + } +} + +func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.sliceType.UnsafeLengthOf(ptr) == 0 +} + +type sliceDecoder struct { + sliceType *reflect2.UnsafeSliceType + elemDecoder ValDecoder +} + +func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error()) + } +} + +func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + sliceType := decoder.sliceType + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + sliceType.UnsafeSetNil(ptr) + return + } + if c != '[' { + iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c})) + return + } + c = iter.nextToken() + if c == ']' { + sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0)) + return + } + iter.unreadByte() + sliceType.UnsafeGrow(ptr, 1) + elemPtr := sliceType.UnsafeGetIndex(ptr, 0) + decoder.elemDecoder.Decode(elemPtr, iter) + length := 1 + for c = iter.nextToken(); c == ','; c = iter.nextToken() { + idx := length + length += 1 + sliceType.UnsafeGrow(ptr, length) + elemPtr = sliceType.UnsafeGetIndex(ptr, idx) + decoder.elemDecoder.Decode(elemPtr, iter) + } + if c != ']' { + iter.ReportError("decode slice", "expect ], but found "+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go new file mode 100644 index 0000000000..92ae912dc2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go @@ -0,0 +1,1097 @@ +package jsoniter + +import ( + "fmt" + "io" + "strings" + "unsafe" + + "github.com/modern-go/reflect2" +) + +func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder { + bindings := map[string]*Binding{} + structDescriptor := describeStruct(ctx, typ) + for _, binding := range structDescriptor.Fields { + for _, fromName := range binding.FromNames { + old := bindings[fromName] + if old == nil { + bindings[fromName] = binding + continue + } + ignoreOld, ignoreNew := resolveConflictBinding(ctx.frozenConfig, old, binding) + if ignoreOld { + delete(bindings, fromName) + } + if !ignoreNew { + bindings[fromName] = binding + } + } + } + fields := map[string]*structFieldDecoder{} + for k, binding := range bindings { + fields[k] = binding.Decoder.(*structFieldDecoder) + } + + if !ctx.caseSensitive() { + for k, binding := range bindings { + if _, found := fields[strings.ToLower(k)]; !found { + fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder) + } + } + } + + return createStructDecoder(ctx, typ, fields) +} + +func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structFieldDecoder) ValDecoder { + if ctx.disallowUnknownFields { + return &generalStructDecoder{typ: typ, fields: fields, disallowUnknownFields: true} + } + knownHash := map[int64]struct{}{ + 0: {}, + } + + switch len(fields) { + case 0: + return &skipObjectDecoder{typ} + case 1: + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder} + } + case 2: + var fieldHash1 int64 + var fieldHash2 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldHash1 == 0 { + fieldHash1 = fieldHash + fieldDecoder1 = fieldDecoder + } else { + fieldHash2 = fieldHash + fieldDecoder2 = fieldDecoder + } + } + return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2} + case 3: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } + } + return &threeFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3} + case 4: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } + } + return &fourFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4} + case 5: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } + } + return &fiveFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5} + case 6: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } + } + return &sixFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6} + case 7: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } + } + return &sevenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7} + case 8: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } + } + return &eightFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8} + case 9: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldName9 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } + } + return &nineFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9} + case 10: + var fieldName1 int64 + var fieldName2 int64 + var fieldName3 int64 + var fieldName4 int64 + var fieldName5 int64 + var fieldName6 int64 + var fieldName7 int64 + var fieldName8 int64 + var fieldName9 int64 + var fieldName10 int64 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + var fieldDecoder10 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName, ctx.caseSensitive()) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields, false} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else if fieldName9 == 0 { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } else { + fieldName10 = fieldHash + fieldDecoder10 = fieldDecoder + } + } + return &tenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9, + fieldName10, fieldDecoder10} + } + return &generalStructDecoder{typ, fields, false} +} + +type generalStructDecoder struct { + typ reflect2.Type + fields map[string]*structFieldDecoder + disallowUnknownFields bool +} + +func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + var c byte + for c = ','; c == ','; c = iter.nextToken() { + decoder.decodeOneField(ptr, iter) + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + if c != '}' { + iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c})) + } + iter.decrementDepth() +} + +func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) { + var field string + var fieldDecoder *structFieldDecoder + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes := iter.ReadStringAsSlice() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + fieldDecoder = decoder.fields[field] + if fieldDecoder == nil && !iter.cfg.caseSensitive { + fieldDecoder = decoder.fields[strings.ToLower(field)] + } + } else { + field = iter.ReadString() + fieldDecoder = decoder.fields[field] + if fieldDecoder == nil && !iter.cfg.caseSensitive { + fieldDecoder = decoder.fields[strings.ToLower(field)] + } + } + if fieldDecoder == nil { + if decoder.disallowUnknownFields { + msg := "found unknown field: " + field + iter.ReportError("ReadObject", msg) + } + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + iter.Skip() + return + } + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + fieldDecoder.Decode(ptr, iter) +} + +type skipObjectDecoder struct { + typ reflect2.Type +} + +func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valueType := iter.WhatIsNext() + if valueType != ObjectValue && valueType != NilValue { + iter.ReportError("skipObjectDecoder", "expect object or null") + return + } + iter.Skip() +} + +type oneFieldStructDecoder struct { + typ reflect2.Type + fieldHash int64 + fieldDecoder *structFieldDecoder +} + +func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + if iter.readFieldHash() == decoder.fieldHash { + decoder.fieldDecoder.Decode(ptr, iter) + } else { + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type twoFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder +} + +func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type threeFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder +} + +func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type fourFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder +} + +func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type fiveFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder +} + +func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type sixFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder +} + +func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type sevenFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder +} + +func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type eightFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder +} + +func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type nineFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder + fieldHash9 int64 + fieldDecoder9 *structFieldDecoder +} + +func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type tenFieldsStructDecoder struct { + typ reflect2.Type + fieldHash1 int64 + fieldDecoder1 *structFieldDecoder + fieldHash2 int64 + fieldDecoder2 *structFieldDecoder + fieldHash3 int64 + fieldDecoder3 *structFieldDecoder + fieldHash4 int64 + fieldDecoder4 *structFieldDecoder + fieldHash5 int64 + fieldDecoder5 *structFieldDecoder + fieldHash6 int64 + fieldDecoder6 *structFieldDecoder + fieldHash7 int64 + fieldDecoder7 *structFieldDecoder + fieldHash8 int64 + fieldDecoder8 *structFieldDecoder + fieldHash9 int64 + fieldDecoder9 *structFieldDecoder + fieldHash10 int64 + fieldDecoder10 *structFieldDecoder +} + +func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + if !iter.incrementDepth() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + case decoder.fieldHash10: + decoder.fieldDecoder10.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } + iter.decrementDepth() +} + +type structFieldDecoder struct { + field reflect2.StructField + fieldDecoder ValDecoder +} + +func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + fieldPtr := decoder.field.UnsafeGet(ptr) + decoder.fieldDecoder.Decode(fieldPtr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error()) + } +} + +type stringModeStringDecoder struct { + elemDecoder ValDecoder + cfg *frozenConfig +} + +func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.elemDecoder.Decode(ptr, iter) + str := *((*string)(ptr)) + tempIter := decoder.cfg.BorrowIterator([]byte(str)) + defer decoder.cfg.ReturnIterator(tempIter) + *((*string)(ptr)) = tempIter.ReadString() +} + +type stringModeNumberDecoder struct { + elemDecoder ValDecoder +} + +func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() == NilValue { + decoder.elemDecoder.Decode(ptr, iter) + return + } + + c := iter.nextToken() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } + decoder.elemDecoder.Decode(ptr, iter) + if iter.Error != nil { + return + } + c = iter.readByte() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } +} diff --git a/vendor/github.com/json-iterator/go/reflect_struct_encoder.go b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go new file mode 100644 index 0000000000..152e3ef5a9 --- /dev/null +++ b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go @@ -0,0 +1,211 @@ +package jsoniter + +import ( + "fmt" + "github.com/modern-go/reflect2" + "io" + "reflect" + "unsafe" +) + +func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder { + type bindingTo struct { + binding *Binding + toName string + ignored bool + } + orderedBindings := []*bindingTo{} + structDescriptor := describeStruct(ctx, typ) + for _, binding := range structDescriptor.Fields { + for _, toName := range binding.ToNames { + new := &bindingTo{ + binding: binding, + toName: toName, + } + for _, old := range orderedBindings { + if old.toName != toName { + continue + } + old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding) + } + orderedBindings = append(orderedBindings, new) + } + } + if len(orderedBindings) == 0 { + return &emptyStructEncoder{} + } + finalOrderedFields := []structFieldTo{} + for _, bindingTo := range orderedBindings { + if !bindingTo.ignored { + finalOrderedFields = append(finalOrderedFields, structFieldTo{ + encoder: bindingTo.binding.Encoder.(*structFieldEncoder), + toName: bindingTo.toName, + }) + } + } + return &structEncoder{typ, finalOrderedFields} +} + +func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty { + encoder := createEncoderOfNative(ctx, typ) + if encoder != nil { + return encoder + } + kind := typ.Kind() + switch kind { + case reflect.Interface: + return &dynamicEncoder{typ} + case reflect.Struct: + return &structEncoder{typ: typ} + case reflect.Array: + return &arrayEncoder{} + case reflect.Slice: + return &sliceEncoder{} + case reflect.Map: + return encoderOfMap(ctx, typ) + case reflect.Ptr: + return &OptionalEncoder{} + default: + return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)} + } +} + +func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) { + newTagged := new.Field.Tag().Get(cfg.getTagKey()) != "" + oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != "" + if newTagged { + if oldTagged { + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } else { + return true, false + } + } else { + if oldTagged { + return true, false + } + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } +} + +type structFieldEncoder struct { + field reflect2.StructField + fieldEncoder ValEncoder + omitempty bool +} + +func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + fieldPtr := encoder.field.UnsafeGet(ptr) + encoder.fieldEncoder.Encode(fieldPtr, stream) + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error()) + } +} + +func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool { + fieldPtr := encoder.field.UnsafeGet(ptr) + return encoder.fieldEncoder.IsEmpty(fieldPtr) +} + +func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool { + isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil) + if !converted { + return false + } + fieldPtr := encoder.field.UnsafeGet(ptr) + return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr) +} + +type IsEmbeddedPtrNil interface { + IsEmbeddedPtrNil(ptr unsafe.Pointer) bool +} + +type structEncoder struct { + typ reflect2.Type + fields []structFieldTo +} + +type structFieldTo struct { + encoder *structFieldEncoder + toName string +} + +func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteObjectStart() + isNotFirst := false + for _, field := range encoder.fields { + if field.encoder.omitempty && field.encoder.IsEmpty(ptr) { + continue + } + if field.encoder.IsEmbeddedPtrNil(ptr) { + continue + } + if isNotFirst { + stream.WriteMore() + } + stream.WriteObjectField(field.toName) + field.encoder.Encode(ptr, stream) + isNotFirst = true + } + stream.WriteObjectEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error()) + } +} + +func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type emptyStructEncoder struct { +} + +func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyObject() +} + +func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type stringModeNumberEncoder struct { + elemEncoder ValEncoder +} + +func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.elemEncoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type stringModeStringEncoder struct { + elemEncoder ValEncoder + cfg *frozenConfig +} + +func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + tempStream := encoder.cfg.BorrowStream(nil) + tempStream.Attachment = stream.Attachment + defer encoder.cfg.ReturnStream(tempStream) + encoder.elemEncoder.Encode(ptr, tempStream) + stream.WriteString(string(tempStream.Buffer())) +} + +func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} diff --git a/vendor/github.com/json-iterator/go/stream.go b/vendor/github.com/json-iterator/go/stream.go new file mode 100644 index 0000000000..23d8a3ad6b --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream.go @@ -0,0 +1,210 @@ +package jsoniter + +import ( + "io" +) + +// stream is a io.Writer like object, with JSON specific write functions. +// Error is not returned as return value, but stored as Error member on this stream instance. +type Stream struct { + cfg *frozenConfig + out io.Writer + buf []byte + Error error + indention int + Attachment interface{} // open for customized encoder +} + +// NewStream create new stream instance. +// cfg can be jsoniter.ConfigDefault. +// out can be nil if write to internal buffer. +// bufSize is the initial size for the internal buffer in bytes. +func NewStream(cfg API, out io.Writer, bufSize int) *Stream { + return &Stream{ + cfg: cfg.(*frozenConfig), + out: out, + buf: make([]byte, 0, bufSize), + Error: nil, + indention: 0, + } +} + +// Pool returns a pool can provide more stream with same configuration +func (stream *Stream) Pool() StreamPool { + return stream.cfg +} + +// Reset reuse this stream instance by assign a new writer +func (stream *Stream) Reset(out io.Writer) { + stream.out = out + stream.buf = stream.buf[:0] +} + +// Available returns how many bytes are unused in the buffer. +func (stream *Stream) Available() int { + return cap(stream.buf) - len(stream.buf) +} + +// Buffered returns the number of bytes that have been written into the current buffer. +func (stream *Stream) Buffered() int { + return len(stream.buf) +} + +// Buffer if writer is nil, use this method to take the result +func (stream *Stream) Buffer() []byte { + return stream.buf +} + +// SetBuffer allows to append to the internal buffer directly +func (stream *Stream) SetBuffer(buf []byte) { + stream.buf = buf +} + +// Write writes the contents of p into the buffer. +// It returns the number of bytes written. +// If nn < len(p), it also returns an error explaining +// why the write is short. +func (stream *Stream) Write(p []byte) (nn int, err error) { + stream.buf = append(stream.buf, p...) + if stream.out != nil { + nn, err = stream.out.Write(stream.buf) + stream.buf = stream.buf[nn:] + return + } + return len(p), nil +} + +// WriteByte writes a single byte. +func (stream *Stream) writeByte(c byte) { + stream.buf = append(stream.buf, c) +} + +func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) { + stream.buf = append(stream.buf, c1, c2) +} + +func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) { + stream.buf = append(stream.buf, c1, c2, c3) +} + +func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) { + stream.buf = append(stream.buf, c1, c2, c3, c4) +} + +func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) { + stream.buf = append(stream.buf, c1, c2, c3, c4, c5) +} + +// Flush writes any buffered data to the underlying io.Writer. +func (stream *Stream) Flush() error { + if stream.out == nil { + return nil + } + if stream.Error != nil { + return stream.Error + } + _, err := stream.out.Write(stream.buf) + if err != nil { + if stream.Error == nil { + stream.Error = err + } + return err + } + stream.buf = stream.buf[:0] + return nil +} + +// WriteRaw write string out without quotes, just like []byte +func (stream *Stream) WriteRaw(s string) { + stream.buf = append(stream.buf, s...) +} + +// WriteNil write null to stream +func (stream *Stream) WriteNil() { + stream.writeFourBytes('n', 'u', 'l', 'l') +} + +// WriteTrue write true to stream +func (stream *Stream) WriteTrue() { + stream.writeFourBytes('t', 'r', 'u', 'e') +} + +// WriteFalse write false to stream +func (stream *Stream) WriteFalse() { + stream.writeFiveBytes('f', 'a', 'l', 's', 'e') +} + +// WriteBool write true or false into stream +func (stream *Stream) WriteBool(val bool) { + if val { + stream.WriteTrue() + } else { + stream.WriteFalse() + } +} + +// WriteObjectStart write { with possible indention +func (stream *Stream) WriteObjectStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('{') + stream.writeIndention(0) +} + +// WriteObjectField write "field": with possible indention +func (stream *Stream) WriteObjectField(field string) { + stream.WriteString(field) + if stream.indention > 0 { + stream.writeTwoBytes(':', ' ') + } else { + stream.writeByte(':') + } +} + +// WriteObjectEnd write } with possible indention +func (stream *Stream) WriteObjectEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte('}') +} + +// WriteEmptyObject write {} +func (stream *Stream) WriteEmptyObject() { + stream.writeByte('{') + stream.writeByte('}') +} + +// WriteMore write , with possible indention +func (stream *Stream) WriteMore() { + stream.writeByte(',') + stream.writeIndention(0) +} + +// WriteArrayStart write [ with possible indention +func (stream *Stream) WriteArrayStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('[') + stream.writeIndention(0) +} + +// WriteEmptyArray write [] +func (stream *Stream) WriteEmptyArray() { + stream.writeTwoBytes('[', ']') +} + +// WriteArrayEnd write ] with possible indention +func (stream *Stream) WriteArrayEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte(']') +} + +func (stream *Stream) writeIndention(delta int) { + if stream.indention == 0 { + return + } + stream.writeByte('\n') + toWrite := stream.indention - delta + for i := 0; i < toWrite; i++ { + stream.buf = append(stream.buf, ' ') + } +} diff --git a/vendor/github.com/json-iterator/go/stream_float.go b/vendor/github.com/json-iterator/go/stream_float.go new file mode 100644 index 0000000000..826aa594ac --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_float.go @@ -0,0 +1,111 @@ +package jsoniter + +import ( + "fmt" + "math" + "strconv" +) + +var pow10 []uint64 + +func init() { + pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000} +} + +// WriteFloat32 write float32 to stream +func (stream *Stream) WriteFloat32(val float32) { + if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) { + stream.Error = fmt.Errorf("unsupported value: %f", val) + return + } + abs := math.Abs(float64(val)) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if float32(abs) < 1e-6 || float32(abs) >= 1e21 { + fmt = 'e' + } + } + stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32) +} + +// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat32Lossy(val float32) { + if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) { + stream.Error = fmt.Errorf("unsupported value: %f", val) + return + } + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat32(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(float64(val)*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[len(stream.buf)-1] == '0' { + stream.buf = stream.buf[:len(stream.buf)-1] + } +} + +// WriteFloat64 write float64 to stream +func (stream *Stream) WriteFloat64(val float64) { + if math.IsInf(val, 0) || math.IsNaN(val) { + stream.Error = fmt.Errorf("unsupported value: %f", val) + return + } + abs := math.Abs(val) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if abs < 1e-6 || abs >= 1e21 { + fmt = 'e' + } + } + stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64) +} + +// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat64Lossy(val float64) { + if math.IsInf(val, 0) || math.IsNaN(val) { + stream.Error = fmt.Errorf("unsupported value: %f", val) + return + } + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat64(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(val*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[len(stream.buf)-1] == '0' { + stream.buf = stream.buf[:len(stream.buf)-1] + } +} diff --git a/vendor/github.com/json-iterator/go/stream_int.go b/vendor/github.com/json-iterator/go/stream_int.go new file mode 100644 index 0000000000..d1059ee4c2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_int.go @@ -0,0 +1,190 @@ +package jsoniter + +var digits []uint32 + +func init() { + digits = make([]uint32, 1000) + for i := uint32(0); i < 1000; i++ { + digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0' + if i < 10 { + digits[i] += 2 << 24 + } else if i < 100 { + digits[i] += 1 << 24 + } + } +} + +func writeFirstBuf(space []byte, v uint32) []byte { + start := v >> 24 + if start == 0 { + space = append(space, byte(v>>16), byte(v>>8)) + } else if start == 1 { + space = append(space, byte(v>>8)) + } + space = append(space, byte(v)) + return space +} + +func writeBuf(buf []byte, v uint32) []byte { + return append(buf, byte(v>>16), byte(v>>8), byte(v)) +} + +// WriteUint8 write uint8 to stream +func (stream *Stream) WriteUint8(val uint8) { + stream.buf = writeFirstBuf(stream.buf, digits[val]) +} + +// WriteInt8 write int8 to stream +func (stream *Stream) WriteInt8(nval int8) { + var val uint8 + if nval < 0 { + val = uint8(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint8(nval) + } + stream.buf = writeFirstBuf(stream.buf, digits[val]) +} + +// WriteUint16 write uint16 to stream +func (stream *Stream) WriteUint16(val uint16) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return +} + +// WriteInt16 write int16 to stream +func (stream *Stream) WriteInt16(nval int16) { + var val uint16 + if nval < 0 { + val = uint16(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint16(nval) + } + stream.WriteUint16(val) +} + +// WriteUint32 write uint32 to stream +func (stream *Stream) WriteUint32(val uint32) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q2]) + } else { + r3 := q2 - q3*1000 + stream.buf = append(stream.buf, byte(q3+'0')) + stream.buf = writeBuf(stream.buf, digits[r3]) + } + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) +} + +// WriteInt32 write int32 to stream +func (stream *Stream) WriteInt32(nval int32) { + var val uint32 + if nval < 0 { + val = uint32(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint32(nval) + } + stream.WriteUint32(val) +} + +// WriteUint64 write uint64 to stream +func (stream *Stream) WriteUint64(val uint64) { + q1 := val / 1000 + if q1 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[val]) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q1]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q2]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q3]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q4]) + stream.buf = writeBuf(stream.buf, digits[r4]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + stream.buf = writeFirstBuf(stream.buf, digits[q5]) + } else { + stream.buf = writeFirstBuf(stream.buf, digits[q6]) + r6 := q5 - q6*1000 + stream.buf = writeBuf(stream.buf, digits[r6]) + } + stream.buf = writeBuf(stream.buf, digits[r5]) + stream.buf = writeBuf(stream.buf, digits[r4]) + stream.buf = writeBuf(stream.buf, digits[r3]) + stream.buf = writeBuf(stream.buf, digits[r2]) + stream.buf = writeBuf(stream.buf, digits[r1]) +} + +// WriteInt64 write int64 to stream +func (stream *Stream) WriteInt64(nval int64) { + var val uint64 + if nval < 0 { + val = uint64(-nval) + stream.buf = append(stream.buf, '-') + } else { + val = uint64(nval) + } + stream.WriteUint64(val) +} + +// WriteInt write int to stream +func (stream *Stream) WriteInt(val int) { + stream.WriteInt64(int64(val)) +} + +// WriteUint write uint to stream +func (stream *Stream) WriteUint(val uint) { + stream.WriteUint64(uint64(val)) +} diff --git a/vendor/github.com/json-iterator/go/stream_str.go b/vendor/github.com/json-iterator/go/stream_str.go new file mode 100644 index 0000000000..54c2ba0b3a --- /dev/null +++ b/vendor/github.com/json-iterator/go/stream_str.go @@ -0,0 +1,372 @@ +package jsoniter + +import ( + "unicode/utf8" +) + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML