mirror of
https://github.com/kubernetes-sigs/descheduler.git
synced 2026-01-26 05:14:13 +01:00
bump(*): Remove codecgen dependency
This commit is contained in:
36
vendor/github.com/ugorji/go/.travis.yml
generated
vendored
36
vendor/github.com/ugorji/go/.travis.yml
generated
vendored
@@ -1,36 +0,0 @@
|
||||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- master
|
||||
- 1.12.x
|
||||
- 1.11.x
|
||||
- 1.10.x
|
||||
- 1.9.x
|
||||
# go testing suite support, which we use, was introduced in go 1.7
|
||||
# testing.TB, TB.Helper() was introduced in go 1.9
|
||||
# Consequently, tests will only compile for go 1.9+
|
||||
script:
|
||||
- go test -tags "alltests" -run Suite -coverprofile coverage.txt github.com/ugorji/go/codec
|
||||
- go test -tags "alltests safe" -run Suite -coverprofile coverage.safe.txt github.com/ugorji/go/codec
|
||||
- go test -tags "alltests codecgen" -run Suite -coverprofile coverage.codecgen.txt github.com/ugorji/go/codec
|
||||
# we use if block below, so that we still return with success even if skipped.
|
||||
# Previously, we used [[...]] && go test ..., which returned exit code 0 and broke the build.
|
||||
- |
|
||||
if [[ "${TRAVIS_GO_VERSION}" == "1.9.x" ]]; then
|
||||
GOARCH=386 go test -tags "alltests" -run Suite -coverprofile coverage.386.txt github.com/ugorji/go/codec
|
||||
fi
|
||||
- |
|
||||
if [[ "${TRAVIS_GO_VERSION}" == "1.10.x" ]]; then
|
||||
GOARCH=386 go test -tags "alltests safe" -run Suite -coverprofile coverage.386.safe.txt github.com/ugorji/go/codec
|
||||
fi
|
||||
- |
|
||||
if [[ "${TRAVIS_GO_VERSION}" == "1.11.x" ]]; then
|
||||
GOARCH=386 go test -tags "alltests codecgen" -run Suite -coverprofile coverage.386.codecgen.txt github.com/ugorji/go/codec
|
||||
fi
|
||||
- |
|
||||
if [[ "${TRAVIS_GO_VERSION}" == "1.12.x" ]]; then
|
||||
echo "XXXX RACE" && go test "-race" -tags "alltests" -run Suite -coverprofile coverage.race.txt github.com/ugorji/go/codec;
|
||||
fi
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
49
vendor/github.com/ugorji/go/FAQ.md
generated
vendored
49
vendor/github.com/ugorji/go/FAQ.md
generated
vendored
@@ -1,49 +0,0 @@
|
||||
# FAQ
|
||||
|
||||
## Managing Binary Size
|
||||
|
||||
This package adds some size to any binary that depends on it.
|
||||
This is because we include an auto-generated file: `fast-path.generated.go`
|
||||
to help with performance when encoding/decoding slices and maps of
|
||||
built in numeric, boolean, string and interface{} types.
|
||||
|
||||
Prior to 2019-05-16, this package could add about `11MB` to the size of
|
||||
your binaries. We have now trimmed that in half, and the package
|
||||
contributes about `5.5MB`. This compares favorably to other packages like
|
||||
`json-iterator/go` `(3.3MB)` and `net/http` `(3.5MB)`.
|
||||
|
||||
Furthermore, you can bypass building `fast-path.generated.go`, by building
|
||||
(or running tests and benchmarks) with the tag: `notfastpath`.
|
||||
|
||||
go install -tags notfastpath
|
||||
go build -tags notfastpath
|
||||
go test -tags notfastpath
|
||||
|
||||
With the tag `notfastpath`, we trim that size to about `2.8MB`.
|
||||
|
||||
Be aware that, at least in our representative microbenchmarks for cbor (for example),
|
||||
passing `notfastpath` tag causes significant performance loss (about 33%).
|
||||
*YMMV*.
|
||||
|
||||
## Resolving Module Issues
|
||||
|
||||
Prior to v1.1.5, go-codec unknowingly introduced some headaches for its
|
||||
users while introducing module support. We tried to make
|
||||
`github.com/ugorji/go/codec` a module. At that time, multi-repository
|
||||
module support was weak, so we reverted and made `github.com/ugorji/go/`
|
||||
the module. However, folks previously used go-codec in module mode
|
||||
before it formally supported modules. Eventually, different established packages
|
||||
had go.mod files contain various real and pseudo versions of go-codec
|
||||
which causes `go` to barf with `ambiguous import` error.
|
||||
|
||||
To resolve this, from v1.1.5 and up, we use a requirements cycle between
|
||||
modules `github.com/ugorji/go/codec` and `github.com/ugorji/go/`,
|
||||
tagging them with parallel consistent tags (`codec/vX.Y.Z and vX.Y.Z`)
|
||||
to the same commit.
|
||||
|
||||
Fixing `ambiguous import` failure is now as simple as running
|
||||
|
||||
```
|
||||
go get -u github.com/ugorji/go/codec@latest
|
||||
```
|
||||
|
||||
22
vendor/github.com/ugorji/go/LICENSE
generated
vendored
22
vendor/github.com/ugorji/go/LICENSE
generated
vendored
@@ -1,22 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012-2015 Ugorji Nwoke.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
299
vendor/github.com/ugorji/go/README.md
generated
vendored
299
vendor/github.com/ugorji/go/README.md
generated
vendored
@@ -1,299 +0,0 @@
|
||||
[](https://sourcegraph.com/github.com/ugorji/go/-/tree/codec?badge)
|
||||
[](https://travis-ci.org/ugorji/go)
|
||||
[](https://codecov.io/gh/ugorji/go)
|
||||
[](http://godoc.org/github.com/ugorji/go/codec)
|
||||
[](https://goreportcard.com/report/github.com/ugorji/go/codec)
|
||||
[](https://raw.githubusercontent.com/ugorji/go/master/LICENSE)
|
||||
|
||||
# go-codec
|
||||
|
||||
This repository contains the `go-codec` library.
|
||||
|
||||
To install:
|
||||
|
||||
```
|
||||
go get github.com/ugorji/go/codec
|
||||
```
|
||||
|
||||
# Package Documentation
|
||||
|
||||
|
||||
Package codec provides a High Performance, Feature-Rich Idiomatic Go 1.4+
|
||||
codec/encoding library for binc, msgpack, cbor, json.
|
||||
|
||||
Supported Serialization formats are:
|
||||
|
||||
- msgpack: https://github.com/msgpack/msgpack
|
||||
- binc: http://github.com/ugorji/binc
|
||||
- cbor: http://cbor.io http://tools.ietf.org/html/rfc7049
|
||||
- json: http://json.org http://tools.ietf.org/html/rfc7159
|
||||
- simple:
|
||||
|
||||
This package will carefully use 'package unsafe' for performance reasons in
|
||||
specific places. You can build without unsafe use by passing the safe or
|
||||
appengine tag i.e. 'go install -tags=safe ...'.
|
||||
|
||||
For detailed usage information, read the primer at
|
||||
http://ugorji.net/blog/go-codec-primer .
|
||||
|
||||
The idiomatic Go support is as seen in other encoding packages in the
|
||||
standard library (ie json, xml, gob, etc).
|
||||
|
||||
Rich Feature Set includes:
|
||||
|
||||
- Simple but extremely powerful and feature-rich API
|
||||
- Support for go 1.4 and above, while selectively using newer APIs for later releases
|
||||
- Excellent code coverage ( > 90% )
|
||||
- Very High Performance.
|
||||
Our extensive benchmarks show us outperforming Gob, Json, Bson, etc by 2-4X.
|
||||
- Careful selected use of 'unsafe' for targeted performance gains.
|
||||
- 100% safe mode supported, where 'unsafe' is not used at all.
|
||||
- Lock-free (sans mutex) concurrency for scaling to 100's of cores
|
||||
- In-place updates during decode, with option to zero value in maps and slices prior to decode
|
||||
- Coerce types where appropriate
|
||||
e.g. decode an int in the stream into a float, decode numbers from formatted strings, etc
|
||||
- Corner Cases:
|
||||
Overflows, nil maps/slices, nil values in streams are handled correctly
|
||||
- Standard field renaming via tags
|
||||
- Support for omitting empty fields during an encoding
|
||||
- Encoding from any value and decoding into pointer to any value
|
||||
(struct, slice, map, primitives, pointers, interface{}, etc)
|
||||
- Extensions to support efficient encoding/decoding of any named types
|
||||
- Support encoding.(Binary|Text)(M|Unm)arshaler interfaces
|
||||
- Support IsZero() bool to determine if a value is a zero value.
|
||||
Analogous to time.Time.IsZero() bool.
|
||||
- Decoding without a schema (into a interface{}).
|
||||
Includes Options to configure what specific map or slice type to use
|
||||
when decoding an encoded list or map into a nil interface{}
|
||||
- Mapping a non-interface type to an interface, so we can decode appropriately
|
||||
into any interface type with a correctly configured non-interface value.
|
||||
- Encode a struct as an array, and decode struct from an array in the data stream
|
||||
- Option to encode struct keys as numbers (instead of strings)
|
||||
(to support structured streams with fields encoded as numeric codes)
|
||||
- Comprehensive support for anonymous fields
|
||||
- Fast (no-reflection) encoding/decoding of common maps and slices
|
||||
- Code-generation for faster performance, supported in go 1.6+
|
||||
- Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
|
||||
- Support indefinite-length formats to enable true streaming
|
||||
(for formats which support it e.g. json, cbor)
|
||||
- Support canonical encoding, where a value is ALWAYS encoded as same sequence of bytes.
|
||||
This mostly applies to maps, where iteration order is non-deterministic.
|
||||
- NIL in data stream decoded as zero value
|
||||
- Never silently skip data when decoding.
|
||||
User decides whether to return an error or silently skip data when keys or indexes
|
||||
in the data stream do not map to fields in the struct.
|
||||
- Detect and error when encoding a cyclic reference (instead of stack overflow shutdown)
|
||||
- Encode/Decode from/to chan types (for iterative streaming support)
|
||||
- Drop-in replacement for encoding/json. `json:` key in struct tag supported.
|
||||
- Provides a RPC Server and Client Codec for net/rpc communication protocol.
|
||||
- Handle unique idiosyncrasies of codecs e.g.
|
||||
- For messagepack, configure how ambiguities in handling raw bytes are resolved
|
||||
- For messagepack, provide rpc server/client codec to support
|
||||
msgpack-rpc protocol defined at:
|
||||
https://github.com/msgpack-rpc/msgpack-rpc/blob/master/spec.md
|
||||
|
||||
|
||||
## Extension Support
|
||||
|
||||
Users can register a function to handle the encoding or decoding of their
|
||||
custom types.
|
||||
|
||||
There are no restrictions on what the custom type can be. Some examples:
|
||||
|
||||
```go
|
||||
type BisSet []int
|
||||
type BitSet64 uint64
|
||||
type UUID string
|
||||
type MyStructWithUnexportedFields struct { a int; b bool; c []int; }
|
||||
type GifImage struct { ... }
|
||||
```
|
||||
|
||||
As an illustration, MyStructWithUnexportedFields would normally be encoded
|
||||
as an empty map because it has no exported fields, while UUID would be
|
||||
encoded as a string. However, with extension support, you can encode any of
|
||||
these however you like.
|
||||
|
||||
There is also seamless support provided for registering an extension (with a
|
||||
tag) but letting the encoding mechanism default to the standard way.
|
||||
|
||||
|
||||
## Custom Encoding and Decoding
|
||||
|
||||
This package maintains symmetry in the encoding and decoding halfs. We
|
||||
determine how to encode or decode by walking this decision tree
|
||||
|
||||
- is there an extension registered for the type?
|
||||
- is type a codec.Selfer?
|
||||
- is format binary, and is type a encoding.BinaryMarshaler and BinaryUnmarshaler?
|
||||
- is format specifically json, and is type a encoding/json.Marshaler and Unmarshaler?
|
||||
- is format text-based, and type an encoding.TextMarshaler and TextUnmarshaler?
|
||||
- else we use a pair of functions based on the "kind" of the type e.g. map, slice, int64, etc
|
||||
|
||||
This symmetry is important to reduce chances of issues happening because the
|
||||
encoding and decoding sides are out of sync e.g. decoded via very specific
|
||||
encoding.TextUnmarshaler but encoded via kind-specific generalized mode.
|
||||
|
||||
Consequently, if a type only defines one-half of the symmetry (e.g. it
|
||||
implements UnmarshalJSON() but not MarshalJSON() ), then that type doesn't
|
||||
satisfy the check and we will continue walking down the decision tree.
|
||||
|
||||
|
||||
## RPC
|
||||
|
||||
RPC Client and Server Codecs are implemented, so the codecs can be used with
|
||||
the standard net/rpc package.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
The Handle is SAFE for concurrent READ, but NOT SAFE for concurrent
|
||||
modification.
|
||||
|
||||
The Encoder and Decoder are NOT safe for concurrent use.
|
||||
|
||||
Consequently, the usage model is basically:
|
||||
|
||||
- Create and initialize the Handle before any use.
|
||||
Once created, DO NOT modify it.
|
||||
- Multiple Encoders or Decoders can now use the Handle concurrently.
|
||||
They only read information off the Handle (never write).
|
||||
- However, each Encoder or Decoder MUST not be used concurrently
|
||||
- To re-use an Encoder/Decoder, call Reset(...) on it first.
|
||||
This allows you use state maintained on the Encoder/Decoder.
|
||||
|
||||
Sample usage model:
|
||||
|
||||
```go
|
||||
// create and configure Handle
|
||||
var (
|
||||
bh codec.BincHandle
|
||||
mh codec.MsgpackHandle
|
||||
ch codec.CborHandle
|
||||
)
|
||||
|
||||
mh.MapType = reflect.TypeOf(map[string]interface{}(nil))
|
||||
|
||||
// configure extensions
|
||||
// e.g. for msgpack, define functions and enable Time support for tag 1
|
||||
// mh.SetExt(reflect.TypeOf(time.Time{}), 1, myExt)
|
||||
|
||||
// create and use decoder/encoder
|
||||
var (
|
||||
r io.Reader
|
||||
w io.Writer
|
||||
b []byte
|
||||
h = &bh // or mh to use msgpack
|
||||
)
|
||||
|
||||
dec = codec.NewDecoder(r, h)
|
||||
dec = codec.NewDecoderBytes(b, h)
|
||||
err = dec.Decode(&v)
|
||||
|
||||
enc = codec.NewEncoder(w, h)
|
||||
enc = codec.NewEncoderBytes(&b, h)
|
||||
err = enc.Encode(v)
|
||||
|
||||
//RPC Server
|
||||
go func() {
|
||||
for {
|
||||
conn, err := listener.Accept()
|
||||
rpcCodec := codec.GoRpc.ServerCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ServerCodec(conn, h)
|
||||
rpc.ServeCodec(rpcCodec)
|
||||
}
|
||||
}()
|
||||
|
||||
//RPC Communication (client side)
|
||||
conn, err = net.Dial("tcp", "localhost:5555")
|
||||
rpcCodec := codec.GoRpc.ClientCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ClientCodec(conn, h)
|
||||
client := rpc.NewClientWithCodec(rpcCodec)
|
||||
```
|
||||
|
||||
|
||||
## Running Tests
|
||||
|
||||
To run tests, use the following:
|
||||
|
||||
```
|
||||
go test
|
||||
```
|
||||
|
||||
To run the full suite of tests, use the following:
|
||||
|
||||
```
|
||||
go test -tags alltests -run Suite
|
||||
```
|
||||
|
||||
You can run the tag 'safe' to run tests or build in safe mode. e.g.
|
||||
|
||||
```
|
||||
go test -tags safe -run Json
|
||||
go test -tags "alltests safe" -run Suite
|
||||
```
|
||||
|
||||
## Running Benchmarks
|
||||
|
||||
```
|
||||
cd bench
|
||||
go test -bench . -benchmem -benchtime 1s
|
||||
```
|
||||
|
||||
Please see http://github.com/ugorji/go-codec-bench .
|
||||
|
||||
|
||||
## Caveats
|
||||
|
||||
Struct fields matching the following are ignored during encoding and
|
||||
decoding
|
||||
|
||||
- struct tag value set to -
|
||||
- func, complex numbers, unsafe pointers
|
||||
- unexported and not embedded
|
||||
- unexported and embedded and not struct kind
|
||||
- unexported and embedded pointers (from go1.10)
|
||||
|
||||
Every other field in a struct will be encoded/decoded.
|
||||
|
||||
Embedded fields are encoded as if they exist in the top-level struct, with
|
||||
some caveats. See Encode documentation.
|
||||
|
||||
## Exported Package API
|
||||
|
||||
```go
|
||||
const CborStreamBytes byte = 0x5f ...
|
||||
const GenVersion = 16
|
||||
var SelfExt = &extFailWrapper{}
|
||||
var GoRpc goRpc
|
||||
var MsgpackSpecRpc msgpackSpecRpc
|
||||
func GenHelperDecoder(d *Decoder) (gd genHelperDecoder, dd genHelperDecDriver)
|
||||
func GenHelperEncoder(e *Encoder) (ge genHelperEncoder, ee genHelperEncDriver)
|
||||
type BasicHandle struct{ ... }
|
||||
type BincHandle struct{ ... }
|
||||
type BytesExt interface{ ... }
|
||||
type CborHandle struct{ ... }
|
||||
type DecodeOptions struct{ ... }
|
||||
type Decoder struct{ ... }
|
||||
func NewDecoder(r io.Reader, h Handle) *Decoder
|
||||
func NewDecoderBytes(in []byte, h Handle) *Decoder
|
||||
type EncodeOptions struct{ ... }
|
||||
type Encoder struct{ ... }
|
||||
func NewEncoder(w io.Writer, h Handle) *Encoder
|
||||
func NewEncoderBytes(out *[]byte, h Handle) *Encoder
|
||||
type Ext interface{ ... }
|
||||
type Handle interface{ ... }
|
||||
type InterfaceExt interface{ ... }
|
||||
type JsonHandle struct{ ... }
|
||||
type MapBySlice interface{ ... }
|
||||
type MissingFielder interface{ ... }
|
||||
type MsgpackHandle struct{ ... }
|
||||
type MsgpackSpecRpcMultiArgs []interface{}
|
||||
type RPCOptions struct{ ... }
|
||||
type Raw []byte
|
||||
type RawExt struct{ ... }
|
||||
type Rpc interface{ ... }
|
||||
type Selfer interface{ ... }
|
||||
type SimpleHandle struct{ ... }
|
||||
type TypeInfos struct{ ... }
|
||||
func NewTypeInfos(tags []string) *TypeInfos
|
||||
```
|
||||
7
vendor/github.com/ugorji/go/codec/0_importpath.go
generated
vendored
7
vendor/github.com/ugorji/go/codec/0_importpath.go
generated
vendored
@@ -1,7 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec // import "github.com/ugorji/go/codec"
|
||||
|
||||
// This establishes that this package must be imported as github.com/ugorji/go/codec.
|
||||
// It makes forking easier, and plays well with pre-module releases of go.
|
||||
2
vendor/github.com/ugorji/go/codec/bench/.gitignore
generated
vendored
2
vendor/github.com/ugorji/go/codec/bench/.gitignore
generated
vendored
@@ -1,2 +0,0 @@
|
||||
*_generated_*.*
|
||||
|
||||
153
vendor/github.com/ugorji/go/codec/bench/README.md
generated
vendored
153
vendor/github.com/ugorji/go/codec/bench/README.md
generated
vendored
@@ -1,153 +0,0 @@
|
||||
# go-codec-bench
|
||||
|
||||
This is a comparison of different binary and text encodings.
|
||||
|
||||
We compare the codecs provided by github.com/ugorji/go/codec package,
|
||||
against other libraries:
|
||||
|
||||
[github.com/ugorji/go/codec](http://github.com/ugorji/go) provides:
|
||||
|
||||
- msgpack: [http://github.com/msgpack/msgpack]
|
||||
- binc: [http://github.com/ugorji/binc]
|
||||
- cbor: [http://cbor.io] [http://tools.ietf.org/html/rfc7049]
|
||||
- simple:
|
||||
- json: [http://json.org] [http://tools.ietf.org/html/rfc7159]
|
||||
|
||||
Other codecs compared include:
|
||||
|
||||
- [gopkg.in/vmihailenco/msgpack.v2](http://gopkg.in/vmihailenco/msgpack.v2)
|
||||
- [gopkg.in/mgo.v2/bson](http://gopkg.in/mgo.v2/bson)
|
||||
- [github.com/davecgh/go-xdr/xdr2](https://godoc.org/github.com/davecgh/go-xdr/xdr)
|
||||
- [github.com/Sereal/Sereal/Go/sereal](https://godoc.org/github.com/Sereal/Sereal/Go/sereal)
|
||||
- [code.google.com/p/cbor/go](http://code.google.com/p/cbor/go)
|
||||
- [github.com/tinylib/msgp](http://github.com/tinylib/msgp)
|
||||
- [github.com/tinylib/msgp](http://godoc.org/github.com/tinylib/msgp)
|
||||
- [github.com/pquerna/ffjson/ffjson](http://godoc.org/github.com/pquerna/ffjson/ffjson)
|
||||
- [bitbucket.org/bodhisnarkva/cbor/go](http://godoc.org/bitbucket.org/bodhisnarkva/cbor/go)
|
||||
- [github.com/json-iterator/go](http://godoc.org/github.com/json-iterator/go)
|
||||
- [github.com/mailru/easyjson](http://godoc.org/github.com/mailru/easyjson)
|
||||
|
||||
# Data
|
||||
|
||||
The data being serialized is a `TestStruc` randomly generated values.
|
||||
See https://github.com/ugorji/go-codec-bench/blob/master/codec/values_test.go for the
|
||||
definition of the TestStruc.
|
||||
|
||||
# Run Benchmarks
|
||||
|
||||
See https://github.com/ugorji/go-codec-bench/blob/master/codec/bench.sh
|
||||
for how to download the external libraries which we benchmark against,
|
||||
generate the files for the types when needed,
|
||||
and run the suite of tests.
|
||||
|
||||
The 3 suite of benchmarks are
|
||||
|
||||
- CodecSuite
|
||||
- XSuite
|
||||
- CodecXSuite
|
||||
|
||||
```
|
||||
# Note that `bench.sh` may be in the codec sub-directory, and should be run from there.
|
||||
|
||||
# download the code and all its dependencies
|
||||
./bench.sh -d
|
||||
|
||||
# code-generate files needed for benchmarks against ffjson, easyjson, msgp, etc
|
||||
./bench.sh -c
|
||||
|
||||
# run the full suite of tests
|
||||
./bench.sh -s
|
||||
|
||||
# Below, see how to just run some specific suite of tests, knowing the right tags and flags ...
|
||||
# See bench.sh for different iterations
|
||||
|
||||
# Run suite of tests in default mode (selectively using unsafe in specific areas)
|
||||
go test -tags "alltests x" -bench "CodecXSuite" -benchmem
|
||||
# Run suite of tests in safe mode (no usage of unsafe)
|
||||
go test -tags "alltests x safe" -bench "CodecXSuite" -benchmem
|
||||
# Run suite of tests in codecgen mode, including all tests which are generated (msgp, ffjson, etc)
|
||||
go test -tags "alltests x generated" -bench "CodecXGenSuite" -benchmem
|
||||
|
||||
```
|
||||
|
||||
# Issues
|
||||
|
||||
The following issues are seen currently (11/20/2014):
|
||||
|
||||
- _code.google.com/p/cbor/go_ fails on encoding and decoding the test struct
|
||||
- _github.com/davecgh/go-xdr/xdr2_ fails on encoding and decoding the test struct
|
||||
- _github.com/Sereal/Sereal/Go/sereal_ fails on decoding the serialized test struct
|
||||
|
||||
# Representative Benchmark Results
|
||||
|
||||
Please see the [benchmarking blog post for detailed representative results](http://ugorji.net/blog/benchmarking-serialization-in-go).
|
||||
|
||||
A snapshot of some results on my 2016 MacBook Pro is below.
|
||||
**Note: errors are truncated, and lines re-arranged, for readability**.
|
||||
|
||||
Below are results of running the entire suite on 2017-11-20 (ie running ./bench.sh -s).
|
||||
|
||||
What you should notice:
|
||||
|
||||
- Results get better with codecgen, showing about 20-50% performance improvement.
|
||||
Users should carefully weigh the performance improvements against the
|
||||
usability and binary-size increases, as performance is already extremely good
|
||||
without the codecgen path.
|
||||
|
||||
See https://github.com/ugorji/go-codec-bench/blob/master/bench.out.txt for latest run of bench.sh as of 2017-11-20
|
||||
|
||||
* snippet of bench.out.txt, running without codecgen *
|
||||
```
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Msgpack____Encode-8 10000 183961 ns/op 10224 B/op 75 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Binc_______Encode-8 10000 206362 ns/op 12551 B/op 80 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Simple_____Encode-8 10000 193966 ns/op 10224 B/op 75 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Cbor_______Encode-8 10000 192666 ns/op 10224 B/op 75 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Json_______Encode-8 3000 475767 ns/op 10352 B/op 75 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Std_Json___Encode-8 3000 525223 ns/op 256049 B/op 835 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Gob________Encode-8 5000 270550 ns/op 333548 B/op 959 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__JsonIter___Encode-8 3000 478130 ns/op 183552 B/op 3262 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Bson_______Encode-8 2000 747360 ns/op 715539 B/op 5629 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__VMsgpack___Encode-8 2000 637388 ns/op 320385 B/op 542 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Sereal_____Encode-8 5000 361369 ns/op 294541 B/op 4286 allocs/op
|
||||
-------------------------------
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Msgpack____Decode-8 5000 370340 ns/op 120352 B/op 1210 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Binc_______Decode-8 3000 443650 ns/op 126144 B/op 1263 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Simple_____Decode-8 3000 381155 ns/op 120352 B/op 1210 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Cbor_______Decode-8 5000 370754 ns/op 120352 B/op 1210 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Json_______Decode-8 2000 719658 ns/op 159289 B/op 1478 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Std_Json___Decode-8 1000 2204258 ns/op 276336 B/op 6959 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Gob________Decode-8 5000 383884 ns/op 256684 B/op 3261 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__JsonIter___Decode-8 2000 907079 ns/op 301520 B/op 7769 allocs/op
|
||||
BenchmarkCodecXSuite/options-false.../Benchmark__Bson_______Decode-8 2000 1146851 ns/op 373121 B/op 15703 allocs/op
|
||||
```
|
||||
|
||||
* snippet of bench.out.txt, running with codecgen *
|
||||
```
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Msgpack____Encode-8 10000 124729 ns/op 6224 B/op 7 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Binc_______Encode-8 10000 119745 ns/op 6256 B/op 7 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Simple_____Encode-8 10000 132501 ns/op 6224 B/op 7 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Cbor_______Encode-8 10000 129706 ns/op 6224 B/op 7 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Json_______Encode-8 3000 436958 ns/op 6352 B/op 7 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Std_Json___Encode-8 3000 539884 ns/op 256049 B/op 835 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Gob________Encode-8 5000 270663 ns/op 333548 B/op 959 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__JsonIter___Encode-8 3000 476215 ns/op 183552 B/op 3262 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Bson_______Encode-8 2000 741688 ns/op 715539 B/op 5629 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__VMsgpack___Encode-8 2000 649516 ns/op 320385 B/op 542 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Msgp_______Encode-8 30000 57573 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Easyjson___Encode-8 5000 366701 ns/op 92762 B/op 14 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Ffjson_____Encode-8 3000 568665 ns/op 219803 B/op 1569 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Sereal_____Encode-8 5000 365595 ns/op 296303 B/op 4285 allocs/op
|
||||
-------------------------------
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Msgpack____Decode-8 10000 244013 ns/op 131912 B/op 1112 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Binc_______Decode-8 5000 280478 ns/op 131944 B/op 1112 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Simple_____Decode-8 5000 247863 ns/op 131912 B/op 1112 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Cbor_______Decode-8 10000 244624 ns/op 131912 B/op 1112 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Json_______Decode-8 3000 571572 ns/op 170824 B/op 1376 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Std_Json___Decode-8 1000 2224320 ns/op 276337 B/op 6959 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Gob________Decode-8 5000 387137 ns/op 256683 B/op 3261 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__JsonIter___Decode-8 2000 913324 ns/op 301472 B/op 7769 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Bson_______Decode-8 2000 1139852 ns/op 373121 B/op 15703 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Msgp_______Decode-8 10000 124270 ns/op 112688 B/op 1058 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Easyjson___Decode-8 3000 521070 ns/op 184176 B/op 1371 allocs/op
|
||||
BenchmarkCodecXGenSuite/options-false.../Benchmark__Ffjson_____Decode-8 2000 970256 ns/op 161798 B/op 1927 allocs/op
|
||||
```
|
||||
232
vendor/github.com/ugorji/go/codec/bench/bench.sh
generated
vendored
232
vendor/github.com/ugorji/go/codec/bench/bench.sh
generated
vendored
@@ -1,232 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# download the code and all its dependencies
|
||||
_go_get() {
|
||||
go get -u \
|
||||
"github.com/ugorji/go/codec" "github.com/ugorji/go/codec"/codecgen \
|
||||
github.com/tinylib/msgp/msgp github.com/tinylib/msgp \
|
||||
github.com/pquerna/ffjson/ffjson github.com/pquerna/ffjson \
|
||||
github.com/Sereal/Sereal/Go/sereal \
|
||||
bitbucket.org/bodhisnarkva/cbor/go \
|
||||
github.com/davecgh/go-xdr/xdr2 \
|
||||
gopkg.in/mgo.v2/bson \
|
||||
gopkg.in/vmihailenco/msgpack.v2 \
|
||||
github.com/json-iterator/go \
|
||||
go.mongodb.org/mongo-driver/bson \
|
||||
github.com/mailru/easyjson/...
|
||||
}
|
||||
|
||||
# add generated tag to the top of each file
|
||||
_prependbt() {
|
||||
cat > ${2} <<EOF
|
||||
// +build generated
|
||||
|
||||
EOF
|
||||
cat ${1} >> ${2}
|
||||
rm -f ${1}
|
||||
}
|
||||
|
||||
# To run the full suite of benchmarks, including executing against the external frameworks
|
||||
# listed above, you MUST first run code generation for the frameworks that support it.
|
||||
#
|
||||
# If you want to run the benchmarks against code generated values.
|
||||
# Then first generate the code generated values from values_test.go named typed.
|
||||
# we cannot normally read a _test.go file, so temporarily copy it into a readable file.
|
||||
_gen() {
|
||||
local zsfx="_generated_test.go"
|
||||
# local z=`pwd`
|
||||
# z=${z%%/src/*}
|
||||
# Note: ensure you run the codecgen for this codebase
|
||||
cp values_test.go v.go &&
|
||||
echo "codecgen ..." &&
|
||||
codecgen -nx -rt codecgen -t 'codecgen generated' -o values_codecgen${zsfx} -d 19780 v.go &&
|
||||
echo "msgp ... " &&
|
||||
msgp -unexported -tests=false -o=m9.go -file=v.go &&
|
||||
_prependbt m9.go values_msgp${zsfx} &&
|
||||
echo "easyjson ... " &&
|
||||
easyjson -all -no_std_marshalers -omit_empty -output_filename e9.go v.go &&
|
||||
_prependbt e9.go values_easyjson${zsfx} &&
|
||||
echo "ffjson ... " &&
|
||||
ffjson -force-regenerate -reset-fields -w f9.go v.go &&
|
||||
_prependbt f9.go values_ffjson${zsfx} &&
|
||||
sed -i '' -e 's+ MarshalJSON(+ _MarshalJSON(+g' values_ffjson${zsfx} &&
|
||||
sed -i '' -e 's+ UnmarshalJSON(+ _UnmarshalJSON(+g' values_ffjson${zsfx} &&
|
||||
rm -f easyjson-bootstrap*.go ffjson-inception* &&
|
||||
rm -f v.go &&
|
||||
echo "... DONE"
|
||||
}
|
||||
|
||||
# run the full suite of tests
|
||||
#
|
||||
# Basically, its a sequence of
|
||||
# go test -tags "alltests x safe codecgen generated" -bench "CodecSuite or AllSuite or XSuite" -benchmem
|
||||
#
|
||||
|
||||
_suite_tests() {
|
||||
if [[ "${do_x}" = "1" ]]; then
|
||||
printf "\n==== X Baseline ====\n"
|
||||
go test "${zargs[@]}" -tags x -v
|
||||
else
|
||||
printf "\n==== Baseline ====\n"
|
||||
go test "${zargs[@]}" -v
|
||||
fi
|
||||
if [[ "${do_x}" = "1" ]]; then
|
||||
printf "\n==== X Generated ====\n"
|
||||
go test "${zargs[@]}" -tags "x generated" -v
|
||||
else
|
||||
printf "\n==== Generated ====\n"
|
||||
go test "${zargs[@]}" -tags "generated" -v
|
||||
fi
|
||||
}
|
||||
|
||||
_suite_tests_strip_file_line() {
|
||||
# sed -e 's/^\([^a-zA-Z0-9]\+\)[a-zA-Z0-9_]\+\.go:[0-9]\+:/\1/'
|
||||
sed -e 's/[a-zA-Z0-9_]*.go:[0-9]*://g'
|
||||
}
|
||||
|
||||
_suite_any() {
|
||||
local x="$1"
|
||||
local g="$2"
|
||||
local b="$3"
|
||||
shift; shift; shift
|
||||
local a=( "" "safe" "notfastpath" "notfastpath safe" "codecgen" "codecgen safe")
|
||||
if [[ "$g" = "g" ]]; then a=( "generated" "generated safe"); fi
|
||||
for i in "${a[@]}"; do
|
||||
echo ">>>> bench TAGS: 'alltests $x $i' SUITE: $b"
|
||||
go test "${zargs[@]}" -tags "alltests $x $i" -bench "$b" -benchmem "$@"
|
||||
done
|
||||
}
|
||||
|
||||
# _suite() {
|
||||
# local t="alltests x"
|
||||
# local a=( "" "safe" "notfastpath" "notfastpath safe" "codecgen" "codecgen safe")
|
||||
# for i in "${a[@]}"
|
||||
# do
|
||||
# echo ">>>> bench TAGS: '$t $i' SUITE: BenchmarkCodecXSuite"
|
||||
# go test "${zargs[@]}" -tags "$t $i" -bench BenchmarkCodecXSuite -benchmem "$@"
|
||||
# done
|
||||
# }
|
||||
|
||||
# _suite_gen() {
|
||||
# local t="alltests x"
|
||||
# local b=( "generated" "generated safe")
|
||||
# for i in "${b[@]}"
|
||||
# do
|
||||
# echo ">>>> bench TAGS: '$t $i' SUITE: BenchmarkCodecXGenSuite"
|
||||
# go test "${zargs[@]}" -tags "$t $i" -bench BenchmarkCodecXGenSuite -benchmem "$@"
|
||||
# done
|
||||
# }
|
||||
|
||||
# _suite_json() {
|
||||
# local t="alltests x"
|
||||
# local a=( "" "safe" "notfastpath" "notfastpath safe" "codecgen" "codecgen safe")
|
||||
# for i in "${a[@]}"
|
||||
# do
|
||||
# echo ">>>> bench TAGS: '$t $i' SUITE: BenchmarkCodecQuickAllJsonSuite"
|
||||
# go test "${zargs[@]}" -tags "$t $i" -bench BenchmarkCodecQuickAllJsonSuite -benchmem "$@"
|
||||
# done
|
||||
# }
|
||||
|
||||
# _suite_very_quick_json() {
|
||||
# # Quickly get numbers for json, stdjson, jsoniter and json (codecgen)"
|
||||
# echo ">>>> very quick json bench"
|
||||
# go test "${zargs[@]}" -tags "alltests x" -bench "__(Json|Std_Json|JsonIter)__" -benchmem "$@"
|
||||
# echo
|
||||
# go test "${zargs[@]}" -tags "alltests codecgen" -bench "__Json____" -benchmem "$@"
|
||||
# }
|
||||
|
||||
_suite_very_quick_json_via_suite() {
|
||||
# Quickly get numbers for json, stdjson, jsoniter and json (codecgen)"
|
||||
echo ">>>> very quick json bench"
|
||||
local prefix="BenchmarkCodecVeryQuickAllJsonSuite/json-all-bd1......../"
|
||||
go test "${zargs[@]}" -tags "alltests x" -bench BenchmarkCodecVeryQuickAllJsonSuite -benchmem "$@" |
|
||||
sed -e "s+^$prefix++"
|
||||
echo "---- CODECGEN RESULTS ----"
|
||||
go test "${zargs[@]}" -tags "x generated" -bench "__(Json|Easyjson)__" -benchmem "$@"
|
||||
}
|
||||
|
||||
_suite_very_quick_json_non_suite() {
|
||||
# Quickly get numbers for json, stdjson, jsoniter and json (codecgen)"
|
||||
echo ">>>> very quick json bench"
|
||||
for j in "En" "De"; do
|
||||
echo "---- codecgen ----"
|
||||
# go test "${zargs[@]}" -tags "generated" -bench "__(Json|Easyjson)__.*${j}" -benchmem "$@"
|
||||
go test "${zargs[@]}" -tags "x generated" -bench "__(Json|Easyjson)__.*${j}" -benchmem "$@"
|
||||
echo "---- no codecgen ----"
|
||||
# go test "${zargs[@]}" -tags "" -bench "__(Json|Std_Json|JsonIter)__.*${j}" -benchmem "$@"
|
||||
go test "${zargs[@]}" -tags "x" -bench "__(Json|Std_Json|JsonIter)__.*${j}" -benchmem "$@"
|
||||
echo
|
||||
done
|
||||
}
|
||||
|
||||
_suite_very_quick_json_only_profile() {
|
||||
local a="Json"
|
||||
case "$1" in
|
||||
Json|Cbor|Msgpack|Simple|Binc) a="${1}"; shift ;;
|
||||
esac
|
||||
local b="${1}"
|
||||
go test "${zargs[@]}" -tags "alltests" -bench "__${a}__.*${b}" \
|
||||
-benchmem -benchtime 4s \
|
||||
-cpuprofile cpu.out -memprofile mem.out -memprofilerate 1
|
||||
}
|
||||
|
||||
_suite_trim_output() {
|
||||
grep -v -E "^(goos:|goarch:|pkg:|PASS|ok|=== RUN|--- PASS)"
|
||||
}
|
||||
|
||||
_usage() {
|
||||
printf "usage: bench.sh -[dcbsgjqp] for \n"
|
||||
printf "\t-d download\n"
|
||||
printf "\t-c code-generate\n"
|
||||
printf "\t-tx tests (show stats for each format and whether encoded == decoded); if x, do external also\n"
|
||||
printf "\t-sgx run test suite for codec; if g, use generated files; if x, do external also\n"
|
||||
printf "\t-jqp run test suite for [json, json-quick, json-profile]\n"
|
||||
}
|
||||
|
||||
_main() {
|
||||
if [[ "$1" == "" || "$1" == "-h" || "$1" == "-?" ]]
|
||||
then
|
||||
_usage
|
||||
return 1
|
||||
fi
|
||||
local zargs=("-count" "1")
|
||||
local args=()
|
||||
local do_x="0"
|
||||
local do_g="0"
|
||||
while getopts "dcbsjqptxklg" flag
|
||||
do
|
||||
case "$flag" in
|
||||
d|c|b|s|j|q|p|t|x|k|l|g) args+=( "$flag" ) ;;
|
||||
*) _usage; return 1 ;;
|
||||
esac
|
||||
done
|
||||
shift "$((OPTIND-1))"
|
||||
|
||||
[[ " ${args[*]} " == *"x"* ]] && do_x="1"
|
||||
[[ " ${args[*]} " == *"g"* ]] && do_g="1"
|
||||
[[ " ${args[*]} " == *"k"* ]] && zargs+=("-gcflags" "all=-B")
|
||||
[[ " ${args[*]} " == *"l"* ]] && zargs+=("-gcflags" "all=-l=4")
|
||||
[[ " ${args[*]} " == *"d"* ]] && _go_get "$@"
|
||||
[[ " ${args[*]} " == *"c"* ]] && _gen "$@"
|
||||
|
||||
[[ " ${args[*]} " == *"s"* && "${do_x}" == 0 && "${do_g}" == 0 ]] && _suite_any - - BenchmarkCodecSuite "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"s"* && "${do_x}" == 0 && "${do_g}" == 1 ]] && _suite_any - g BenchmarkCodecSuite "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"s"* && "${do_x}" == 1 && "${do_g}" == 0 ]] && _suite_any x - BenchmarkCodecXSuite "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"s"* && "${do_x}" == 1 && "${do_g}" == 1 ]] && _suite_any x g BenchmarkCodecXGenSuite "$@" | _suite_trim_output
|
||||
|
||||
[[ " ${args[*]} " == *"j"* ]] && _suite_any x - BenchmarkCodecQuickAllJsonSuite "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"q"* ]] && _suite_very_quick_json_non_suite "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"p"* ]] && _suite_very_quick_json_only_profile "$@" | _suite_trim_output
|
||||
[[ " ${args[*]} " == *"t"* ]] && _suite_tests "$@" | _suite_trim_output | _suite_tests_strip_file_line
|
||||
|
||||
true
|
||||
# shift $((OPTIND-1))
|
||||
}
|
||||
|
||||
if [ "." = `dirname $0` ]
|
||||
then
|
||||
_main "$@"
|
||||
else
|
||||
echo "bench.sh must be run from the directory it resides in"
|
||||
_usage
|
||||
fi
|
||||
275
vendor/github.com/ugorji/go/codec/bench/bench_test.go
generated
vendored
275
vendor/github.com/ugorji/go/codec/bench/bench_test.go
generated
vendored
@@ -1,275 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// bench_test is the "helper" file for all benchmarking tests.
|
||||
//
|
||||
// There are also benchmarks which depend on just codec and the stdlib,
|
||||
// and benchmarks which depend on external libraries.
|
||||
// It is an explicit goal that you can run benchmarks without external
|
||||
// dependencies (which is why the 'x' build tag was explicitly introduced).
|
||||
//
|
||||
// There are 2 ways of running tests:
|
||||
// - generated
|
||||
// - not generated
|
||||
//
|
||||
// Consequently, we have 4 groups:
|
||||
// - codec_bench (gen, !gen)
|
||||
// - stdlib_bench (!gen only)
|
||||
// - x_bench (!gen only)
|
||||
// - x_bench_gen (gen only)
|
||||
//
|
||||
// We also have 4 matching suite files.
|
||||
// - z_all_bench (rename later to z_all_codec_bench???)
|
||||
// - z_all_stdlib_bench
|
||||
// - z_all_x_bench
|
||||
// - z_all_x_bench_gen
|
||||
//
|
||||
// Finally, we have a single test (TestBenchInit) that
|
||||
// will log information about whether each format can
|
||||
// encode or not, how long to encode (unscientifically),
|
||||
// and the encode size.
|
||||
//
|
||||
// This test MUST be run always, as it calls init() internally
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Sample way to run:
|
||||
// go test -bi -bv -bd=1 -benchmem -bench=.
|
||||
|
||||
const (
|
||||
benchUnscientificRes = true
|
||||
benchVerify = true
|
||||
benchRecover = true
|
||||
benchShowJsonOnError = true
|
||||
)
|
||||
|
||||
var (
|
||||
benchTs *TestStruc
|
||||
approxSize int
|
||||
benchCheckers []benchChecker
|
||||
)
|
||||
|
||||
type benchEncFn func(interface{}, []byte) ([]byte, error)
|
||||
type benchDecFn func([]byte, interface{}) error
|
||||
type benchIntfFn func() interface{}
|
||||
|
||||
type benchChecker struct {
|
||||
name string
|
||||
encodefn benchEncFn
|
||||
decodefn benchDecFn
|
||||
}
|
||||
|
||||
func init() {
|
||||
testPreInitFns = append(testPreInitFns, benchPreInit)
|
||||
// testPostInitFns = append(testPostInitFns, codecbenchPostInit)
|
||||
}
|
||||
|
||||
func benchPreInit() {
|
||||
benchTs = newTestStruc(testDepth, testNumRepeatString, true, !testSkipIntf, testMapStringKeyOnly)
|
||||
approxSize = approxDataSize(reflect.ValueOf(benchTs)) * 3 / 2 // multiply by 1.5 to appease msgp, and prevent alloc
|
||||
// bytesLen := 1024 * 4 * (testDepth + 1) * (testDepth + 1)
|
||||
// if bytesLen < approxSize {
|
||||
// bytesLen = approxSize
|
||||
// }
|
||||
}
|
||||
|
||||
func benchReinit() {
|
||||
benchCheckers = nil
|
||||
}
|
||||
|
||||
func benchmarkDivider() {
|
||||
// logTv(nil, "-------------------------------\n")
|
||||
println()
|
||||
}
|
||||
|
||||
// func Test0(t *testing.T) {
|
||||
// testOnce.Do(testInitAll)
|
||||
// }
|
||||
|
||||
func TestBenchInit(t *testing.T) {
|
||||
testOnce.Do(testInitAll)
|
||||
if !testing.Verbose() {
|
||||
return
|
||||
}
|
||||
// t.Logf("..............................................")
|
||||
t.Logf("BENCHMARK INIT: %v", time.Now())
|
||||
// t.Logf("To run full benchmark comparing encodings, use: \"go test -bench=.\"")
|
||||
t.Logf("Benchmark: ")
|
||||
t.Logf("\tStruct recursive Depth: %d", testDepth)
|
||||
if approxSize > 0 {
|
||||
t.Logf("\tApproxDeepSize Of benchmark Struct: %d bytes", approxSize)
|
||||
}
|
||||
if benchUnscientificRes {
|
||||
t.Logf("Benchmark One-Pass Run (with Unscientific Encode/Decode times): ")
|
||||
} else {
|
||||
t.Logf("Benchmark One-Pass Run:")
|
||||
}
|
||||
for _, bc := range benchCheckers {
|
||||
doBenchCheck(t, bc.name, bc.encodefn, bc.decodefn)
|
||||
}
|
||||
if testVerbose {
|
||||
t.Logf("..............................................")
|
||||
t.Logf("<<<<====>>>> depth: %v, ts: %#v\n", testDepth, benchTs)
|
||||
}
|
||||
runtime.GC()
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
|
||||
var vBenchTs = TestStruc{}
|
||||
|
||||
func fnBenchNewTs() interface{} {
|
||||
vBenchTs = TestStruc{}
|
||||
return &vBenchTs
|
||||
// return new(TestStruc)
|
||||
}
|
||||
|
||||
// const benchCheckDoDeepEqual = false
|
||||
|
||||
func benchRecoverPanic(t *testing.B) {
|
||||
if benchRecover {
|
||||
if r := recover(); r != nil {
|
||||
t.Logf("(recovered) panic: %v\n", r)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchRecoverPanicT(t *testing.T) {
|
||||
if benchRecover {
|
||||
if r := recover(); r != nil {
|
||||
t.Logf("(recovered) panic: %v\n", r)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func doBenchCheck(t *testing.T, name string, encfn benchEncFn, decfn benchDecFn) {
|
||||
// if benchUnscientificRes {
|
||||
// t.Logf("-------------- %s ----------------", name)
|
||||
// }
|
||||
defer benchRecoverPanicT(t)
|
||||
runtime.GC()
|
||||
tnow := time.Now()
|
||||
buf, err := encfn(benchTs, nil)
|
||||
if err != nil {
|
||||
t.Logf("\t%10s: **** Error encoding benchTs: %v", name, err)
|
||||
return
|
||||
}
|
||||
encDur := time.Since(tnow)
|
||||
encLen := len(buf)
|
||||
runtime.GC()
|
||||
if !benchUnscientificRes {
|
||||
t.Logf("\t%10s: len: %d bytes\n", name, encLen)
|
||||
return
|
||||
}
|
||||
tnow = time.Now()
|
||||
var ts2 TestStruc
|
||||
if err = decfn(buf, &ts2); err != nil {
|
||||
t.Logf("\t%10s: **** Error decoding into new TestStruc: %v", name, err)
|
||||
return
|
||||
}
|
||||
decDur := time.Since(tnow)
|
||||
// if benchCheckDoDeepEqual {
|
||||
if benchVerify {
|
||||
err = deepEqual(benchTs, &ts2)
|
||||
if err == nil {
|
||||
t.Logf("\t%10s: len: %d bytes,\t encode: %v,\t decode: %v,\tencoded == decoded", name, encLen, encDur, decDur)
|
||||
} else {
|
||||
t.Logf("\t%10s: len: %d bytes,\t encode: %v,\t decode: %v,\tencoded != decoded: %v", name, encLen, encDur, decDur, err)
|
||||
// if benchShowJsonOnError && strings.Contains(name, "json") {
|
||||
// fmt.Printf("\n\n%s\n\n", buf)
|
||||
// //fmt.Printf("\n\n%#v\n\n", benchTs)
|
||||
// //fmt.Printf("\n\n%#v\n\n", &ts2)
|
||||
// return
|
||||
// }
|
||||
|
||||
// if strings.Contains(name, "json") {
|
||||
// println(">>>>>")
|
||||
// f1, _ := os.Create("1.out")
|
||||
// f2, _ := os.Create("2.out")
|
||||
// f3, _ := os.Create("3.json")
|
||||
// buf3, _ := json.MarshalIndent(&ts2, "", "\t")
|
||||
// spew.Config.SortKeys = true
|
||||
// spew.Config.SpewKeys = true
|
||||
// println("^^^^^^^^^^^^^^")
|
||||
// spew.Fdump(f1, benchTs)
|
||||
// println("^^^^^^^^^^^^^^")
|
||||
// spew.Fdump(f2, &ts2)
|
||||
// println("^^^^^^^^^^^^^^")
|
||||
// f3.Write(buf3)
|
||||
// f1.Close()
|
||||
// f2.Close()
|
||||
// f3.Close()
|
||||
// }
|
||||
// t.Logf("\t: err: %v,\n benchTs: %#v\n\n, ts2: %#v\n\n", err, benchTs, ts2) // TODO: remove
|
||||
// t.Logf("BenchVerify: Error comparing en|decoded TestStruc: %v", err)
|
||||
// return
|
||||
// t.Logf("BenchVerify: Error comparing benchTs: %v\n--------\n%v\n--------\n%v", err, benchTs, ts2)
|
||||
// if strings.Contains(name, "json") {
|
||||
// t.Logf("\n\tDECODED FROM\n--------\n%s", buf)
|
||||
// }
|
||||
}
|
||||
} else {
|
||||
t.Logf("\t%10s: len: %d bytes,\t encode: %v,\t decode: %v", name, encLen, encDur, decDur)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func fnBenchmarkEncode(b *testing.B, encName string, ts interface{}, encfn benchEncFn) {
|
||||
defer benchRecoverPanic(b)
|
||||
testOnce.Do(testInitAll)
|
||||
var err error
|
||||
bs := make([]byte, 0, approxSize)
|
||||
runtime.GC()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err = encfn(ts, bs); err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
b.Logf("Error encoding benchTs: %s: %v", encName, err)
|
||||
b.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func fnBenchmarkDecode(b *testing.B, encName string, ts interface{},
|
||||
encfn benchEncFn, decfn benchDecFn, newfn benchIntfFn,
|
||||
) {
|
||||
defer benchRecoverPanic(b)
|
||||
testOnce.Do(testInitAll)
|
||||
bs := make([]byte, 0, approxSize)
|
||||
buf, err := encfn(ts, bs)
|
||||
if err != nil {
|
||||
b.Logf("Error encoding benchTs: %s: %v", encName, err)
|
||||
b.FailNow()
|
||||
}
|
||||
// if false && benchVerify { // do not do benchVerify during decode
|
||||
// // ts2 := newfn()
|
||||
// ts1 := ts.(*TestStruc)
|
||||
// ts2 := new(TestStruc)
|
||||
// if err = decfn(buf, ts2); err != nil {
|
||||
// failT(b, "BenchVerify: Error decoding benchTs: %s: %v", encName, err)
|
||||
// }
|
||||
// if err = deepEqual(ts1, ts2); err != nil {
|
||||
// failT(b, "BenchVerify: Error comparing benchTs: %s: %v", encName, err)
|
||||
// }
|
||||
// }
|
||||
runtime.GC()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
ts = newfn()
|
||||
if err = decfn(buf, ts); err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
b.Logf("Error decoding into new TestStruc: %s: %v", encName, err)
|
||||
b.FailNow()
|
||||
}
|
||||
}
|
||||
109
vendor/github.com/ugorji/go/codec/bench/codec_bench_test.go
generated
vendored
109
vendor/github.com/ugorji/go/codec/bench/codec_bench_test.go
generated
vendored
@@ -1,109 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func init() {
|
||||
testPreInitFns = append(testPreInitFns, codecBenchPreInit)
|
||||
// testPostInitFns = append(testPostInitFns, codecbenchPostInit)
|
||||
}
|
||||
|
||||
func codecBenchPreInit() {
|
||||
benchCheckers = append(benchCheckers,
|
||||
benchChecker{"msgpack", fnMsgpackEncodeFn, fnMsgpackDecodeFn},
|
||||
benchChecker{"binc", fnBincEncodeFn, fnBincDecodeFn},
|
||||
benchChecker{"simple", fnSimpleEncodeFn, fnSimpleDecodeFn},
|
||||
benchChecker{"cbor", fnCborEncodeFn, fnCborDecodeFn},
|
||||
benchChecker{"json", fnJsonEncodeFn, fnJsonDecodeFn},
|
||||
)
|
||||
}
|
||||
|
||||
// ------------ tests below
|
||||
|
||||
func fnMsgpackEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
return sTestCodecEncode(ts, bsIn, fnBenchmarkByteBuf, testMsgpackH, &testMsgpackH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnMsgpackDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sTestCodecDecode(buf, ts, testMsgpackH, &testMsgpackH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnBincEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
return sTestCodecEncode(ts, bsIn, fnBenchmarkByteBuf, testBincH, &testBincH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnBincDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sTestCodecDecode(buf, ts, testBincH, &testBincH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnSimpleEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
return sTestCodecEncode(ts, bsIn, fnBenchmarkByteBuf, testSimpleH, &testSimpleH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnSimpleDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sTestCodecDecode(buf, ts, testSimpleH, &testSimpleH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnCborEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
return sTestCodecEncode(ts, bsIn, fnBenchmarkByteBuf, testCborH, &testCborH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnCborDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sTestCodecDecode(buf, ts, testCborH, &testCborH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnJsonEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
return sTestCodecEncode(ts, bsIn, fnBenchmarkByteBuf, testJsonH, &testJsonH.BasicHandle)
|
||||
}
|
||||
|
||||
func fnJsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sTestCodecDecode(buf, ts, testJsonH, &testJsonH.BasicHandle)
|
||||
}
|
||||
|
||||
// ----------- ENCODE ------------------
|
||||
|
||||
func Benchmark__Msgpack____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "msgpack", benchTs, fnMsgpackEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Binc_______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "binc", benchTs, fnBincEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Simple_____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "simple", benchTs, fnSimpleEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Cbor_______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "cbor", benchTs, fnCborEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Json_______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "json", benchTs, fnJsonEncodeFn)
|
||||
}
|
||||
|
||||
// ----------- DECODE ------------------
|
||||
|
||||
func Benchmark__Msgpack____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "msgpack", benchTs, fnMsgpackEncodeFn, fnMsgpackDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Binc_______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "binc", benchTs, fnBincEncodeFn, fnBincDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Simple_____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "simple", benchTs, fnSimpleEncodeFn, fnSimpleDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Cbor_______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "cbor", benchTs, fnCborEncodeFn, fnCborDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Json_______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "json", benchTs, fnJsonEncodeFn, fnJsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
14
vendor/github.com/ugorji/go/codec/bench/doc.go
generated
vendored
14
vendor/github.com/ugorji/go/codec/bench/doc.go
generated
vendored
@@ -1,14 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package codec provides a
|
||||
High Performance, Feature-Rich Idiomatic Go 1.4+ codec/encoding library
|
||||
for binc, msgpack, cbor, json.
|
||||
|
||||
Here, we have the benchmark files comparing against other encoding libraries.
|
||||
|
||||
See README.md for more details.
|
||||
*/
|
||||
package codec
|
||||
|
||||
349
vendor/github.com/ugorji/go/codec/bench/shared_test.go
generated
vendored
349
vendor/github.com/ugorji/go/codec/bench/shared_test.go
generated
vendored
@@ -1,349 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// This file sets up the variables used, including testInitFns.
|
||||
// Each file should add initialization that should be performed
|
||||
// after flags are parsed.
|
||||
//
|
||||
// init is a multi-step process:
|
||||
// - setup vars (handled by init functions in each file)
|
||||
// - parse flags
|
||||
// - setup derived vars (handled by pre-init registered functions - registered in init function)
|
||||
// - post init (handled by post-init registered functions - registered in init function)
|
||||
// This way, no one has to manage carefully control the initialization
|
||||
// using file names, etc.
|
||||
//
|
||||
// Tests which require external dependencies need the -tag=x parameter.
|
||||
// They should be run as:
|
||||
// go test -tags=x -run=. <other parameters ...>
|
||||
// Benchmarks should also take this parameter, to include the sereal, xdr, etc.
|
||||
// To run against codecgen, etc, make sure you pass extra parameters.
|
||||
// Example usage:
|
||||
// go test "-tags=x codecgen" -bench=. <other parameters ...>
|
||||
//
|
||||
// To fully test everything:
|
||||
// go test -tags=x -benchtime=100ms -tv -bg -bi -brw -bu -v -run=. -bench=.
|
||||
|
||||
// Handling flags
|
||||
// codec_test.go will define a set of global flags for testing, including:
|
||||
// - Use Reset
|
||||
// - Use IO reader/writer (vs direct bytes)
|
||||
// - Set Canonical
|
||||
// - Set InternStrings
|
||||
// - Use Symbols
|
||||
//
|
||||
// This way, we can test them all by running same set of tests with a different
|
||||
// set of flags.
|
||||
//
|
||||
// Following this, all the benchmarks will utilize flags set by codec_test.go
|
||||
// and will not redefine these "global" flags.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"sync"
|
||||
)
|
||||
|
||||
import . "github.com/ugorji/go/codec"
|
||||
|
||||
type testHED struct {
|
||||
H Handle
|
||||
E *Encoder
|
||||
D *Decoder
|
||||
}
|
||||
|
||||
type ioReaderWrapper struct {
|
||||
r io.Reader
|
||||
}
|
||||
|
||||
func (x ioReaderWrapper) Read(p []byte) (n int, err error) {
|
||||
return x.r.Read(p)
|
||||
}
|
||||
|
||||
type ioWriterWrapper struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (x ioWriterWrapper) Write(p []byte) (n int, err error) {
|
||||
return x.w.Write(p)
|
||||
}
|
||||
|
||||
var (
|
||||
// testNoopH = NoopHandle(8)
|
||||
testMsgpackH = &MsgpackHandle{}
|
||||
testBincH = &BincHandle{}
|
||||
testSimpleH = &SimpleHandle{}
|
||||
testCborH = &CborHandle{}
|
||||
testJsonH = &JsonHandle{}
|
||||
|
||||
testHandles []Handle
|
||||
testPreInitFns []func()
|
||||
testPostInitFns []func()
|
||||
|
||||
testOnce sync.Once
|
||||
|
||||
testHEDs []testHED
|
||||
)
|
||||
|
||||
// flag variables used by tests (and bench)
|
||||
var (
|
||||
testVerbose bool
|
||||
|
||||
//depth of 0 maps to ~400bytes json-encoded string, 1 maps to ~1400 bytes, etc
|
||||
//For depth>1, we likely trigger stack growth for encoders, making benchmarking unreliable.
|
||||
testDepth int
|
||||
|
||||
testMaxInitLen int
|
||||
|
||||
testInitDebug bool
|
||||
testUseReset bool
|
||||
testSkipIntf bool
|
||||
testUseMust bool
|
||||
|
||||
testUseIoEncDec int
|
||||
testUseIoWrapper bool
|
||||
|
||||
testNumRepeatString int
|
||||
|
||||
testRpcBufsize int
|
||||
testMapStringKeyOnly bool
|
||||
)
|
||||
|
||||
// variables that are not flags, but which can configure the handles
|
||||
var (
|
||||
testEncodeOptions EncodeOptions
|
||||
testDecodeOptions DecodeOptions
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetOutput(ioutil.Discard) // don't allow things log to standard out/err
|
||||
testHEDs = make([]testHED, 0, 32)
|
||||
testHandles = append(testHandles,
|
||||
// testNoopH,
|
||||
testMsgpackH, testBincH, testSimpleH, testCborH, testJsonH)
|
||||
// JSON should do HTMLCharsAsIs by default
|
||||
testJsonH.HTMLCharsAsIs = true
|
||||
// set ExplicitRelease on each handle
|
||||
testMsgpackH.ExplicitRelease = true
|
||||
testBincH.ExplicitRelease = true
|
||||
testSimpleH.ExplicitRelease = true
|
||||
testCborH.ExplicitRelease = true
|
||||
testJsonH.ExplicitRelease = true
|
||||
|
||||
testInitFlags()
|
||||
benchInitFlags()
|
||||
}
|
||||
|
||||
func testInitFlags() {
|
||||
// delete(testDecOpts.ExtFuncs, timeTyp)
|
||||
flag.BoolVar(&testVerbose, "tv", false, "Text Extra Verbose Logging if -v if set")
|
||||
flag.BoolVar(&testInitDebug, "tg", false, "Test Init Debug")
|
||||
flag.IntVar(&testUseIoEncDec, "ti", -1, "Use IO Reader/Writer for Marshal/Unmarshal ie >= 0")
|
||||
flag.BoolVar(&testUseIoWrapper, "tiw", false, "Wrap the IO Reader/Writer with a base pass-through reader/writer")
|
||||
|
||||
flag.BoolVar(&testSkipIntf, "tf", false, "Skip Interfaces")
|
||||
flag.BoolVar(&testUseReset, "tr", false, "Use Reset")
|
||||
flag.IntVar(&testNumRepeatString, "trs", 8, "Create string variables by repeating a string N times")
|
||||
flag.BoolVar(&testUseMust, "tm", true, "Use Must(En|De)code")
|
||||
|
||||
flag.IntVar(&testMaxInitLen, "tx", 0, "Max Init Len")
|
||||
|
||||
flag.IntVar(&testDepth, "tsd", 0, "Test Struc Depth")
|
||||
flag.BoolVar(&testMapStringKeyOnly, "tsk", false, "use maps with string keys only")
|
||||
}
|
||||
|
||||
func benchInitFlags() {
|
||||
// flags reproduced here for compatibility (duplicate some in testInitFlags)
|
||||
flag.BoolVar(&testMapStringKeyOnly, "bs", false, "use maps with string keys only")
|
||||
flag.IntVar(&testDepth, "bd", 1, "Bench Depth")
|
||||
}
|
||||
|
||||
func testHEDGet(h Handle) *testHED {
|
||||
for i := range testHEDs {
|
||||
v := &testHEDs[i]
|
||||
if v.H == h {
|
||||
return v
|
||||
}
|
||||
}
|
||||
testHEDs = append(testHEDs, testHED{h, NewEncoder(nil, h), NewDecoder(nil, h)})
|
||||
return &testHEDs[len(testHEDs)-1]
|
||||
}
|
||||
|
||||
func testReinit() {
|
||||
testOnce = sync.Once{}
|
||||
testHEDs = nil
|
||||
}
|
||||
|
||||
func testInitAll() {
|
||||
// only parse it once.
|
||||
if !flag.Parsed() {
|
||||
flag.Parse()
|
||||
}
|
||||
for _, f := range testPreInitFns {
|
||||
f()
|
||||
}
|
||||
for _, f := range testPostInitFns {
|
||||
f()
|
||||
}
|
||||
}
|
||||
|
||||
func sTestCodecEncode(ts interface{}, bsIn []byte, fn func([]byte) *bytes.Buffer,
|
||||
h Handle, bh *BasicHandle) (bs []byte, err error) {
|
||||
// bs = make([]byte, 0, approxSize)
|
||||
var e *Encoder
|
||||
var buf *bytes.Buffer
|
||||
if testUseReset {
|
||||
e = testHEDGet(h).E
|
||||
} else {
|
||||
e = NewEncoder(nil, h)
|
||||
}
|
||||
var oldWriteBufferSize int
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf = fn(bsIn)
|
||||
// set the encode options for using a buffer
|
||||
oldWriteBufferSize = bh.WriterBufferSize
|
||||
bh.WriterBufferSize = testUseIoEncDec
|
||||
if testUseIoWrapper {
|
||||
e.Reset(ioWriterWrapper{buf})
|
||||
} else {
|
||||
e.Reset(buf)
|
||||
}
|
||||
} else {
|
||||
bs = bsIn
|
||||
e.ResetBytes(&bs)
|
||||
}
|
||||
if testUseMust {
|
||||
e.MustEncode(ts)
|
||||
} else {
|
||||
err = e.Encode(ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
bs = buf.Bytes()
|
||||
bh.WriterBufferSize = oldWriteBufferSize
|
||||
}
|
||||
if !testUseReset {
|
||||
e.Release()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func sTestCodecDecode(bs []byte, ts interface{}, h Handle, bh *BasicHandle) (err error) {
|
||||
var d *Decoder
|
||||
// var buf *bytes.Reader
|
||||
if testUseReset {
|
||||
d = testHEDGet(h).D
|
||||
} else {
|
||||
d = NewDecoder(nil, h)
|
||||
}
|
||||
var oldReadBufferSize int
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := bytes.NewReader(bs)
|
||||
oldReadBufferSize = bh.ReaderBufferSize
|
||||
bh.ReaderBufferSize = testUseIoEncDec
|
||||
if testUseIoWrapper {
|
||||
d.Reset(ioReaderWrapper{buf})
|
||||
} else {
|
||||
d.Reset(buf)
|
||||
}
|
||||
} else {
|
||||
d.ResetBytes(bs)
|
||||
}
|
||||
if testUseMust {
|
||||
d.MustDecode(ts)
|
||||
} else {
|
||||
err = d.Decode(ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
bh.ReaderBufferSize = oldReadBufferSize
|
||||
}
|
||||
if !testUseReset {
|
||||
d.Release()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// // --- functions below are used by both benchmarks and tests
|
||||
|
||||
// // log message only when testVerbose = true (ie go test ... -- -tv).
|
||||
// //
|
||||
// // These are for intormational messages that do not necessarily
|
||||
// // help with diagnosing a failure, or which are too large.
|
||||
// func logTv(x interface{}, format string, args ...interface{}) {
|
||||
// if !testVerbose {
|
||||
// return
|
||||
// }
|
||||
// if t, ok := x.(testing.TB); ok { // only available from go 1.9
|
||||
// t.Helper()
|
||||
// }
|
||||
// logT(x, format, args...)
|
||||
// }
|
||||
|
||||
// // logT logs messages when running as go test -v
|
||||
// //
|
||||
// // Use it for diagnostics messages that help diagnost failure,
|
||||
// // and when the output is not too long ie shorter than like 100 characters.
|
||||
// //
|
||||
// // In general, any logT followed by failT should call this.
|
||||
// func logT(x interface{}, format string, args ...interface{}) {
|
||||
// if x == nil {
|
||||
// if len(format) == 0 || format[len(format)-1] != '\n' {
|
||||
// format = format + "\n"
|
||||
// }
|
||||
// fmt.Printf(format, args...)
|
||||
// return
|
||||
// }
|
||||
// if t, ok := x.(testing.TB); ok { // only available from go 1.9
|
||||
// t.Helper()
|
||||
// t.Logf(format, args...)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func failTv(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// if testVerbose {
|
||||
// failTMsg(x, args...)
|
||||
// }
|
||||
// x.FailNow()
|
||||
// }
|
||||
|
||||
// func failT(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// failTMsg(x, args...)
|
||||
// x.FailNow()
|
||||
// }
|
||||
|
||||
// func failTMsg(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// if len(args) > 0 {
|
||||
// if format, ok := args[0].(string); ok {
|
||||
// logT(x, format, args[1:]...)
|
||||
// } else if len(args) == 1 {
|
||||
// logT(x, "%v", args[0])
|
||||
// } else {
|
||||
// logT(x, "%v", args)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// --- functions below are used only by benchmarks alone
|
||||
|
||||
func fnBenchmarkByteBuf(bsIn []byte) (buf *bytes.Buffer) {
|
||||
// var buf bytes.Buffer
|
||||
// buf.Grow(approxSize)
|
||||
buf = bytes.NewBuffer(bsIn)
|
||||
buf.Truncate(0)
|
||||
return
|
||||
}
|
||||
|
||||
// func benchFnCodecEncode(ts interface{}, bsIn []byte, h Handle) (bs []byte, err error) {
|
||||
// return testCodecEncode(ts, bsIn, fnBenchmarkByteBuf, h)
|
||||
// }
|
||||
|
||||
// func benchFnCodecDecode(bs []byte, ts interface{}, h Handle) (err error) {
|
||||
// return testCodecDecode(bs, ts, h)
|
||||
// }
|
||||
93
vendor/github.com/ugorji/go/codec/bench/stdlib_bench_test.go
generated
vendored
93
vendor/github.com/ugorji/go/codec/bench/stdlib_bench_test.go
generated
vendored
@@ -1,93 +0,0 @@
|
||||
// +build !generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/gob"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func init() {
|
||||
testPreInitFns = append(testPreInitFns, stdlibBenchPreInit)
|
||||
// testPostInitFns = append(testPostInitFns, codecbenchPostInit)
|
||||
}
|
||||
|
||||
func stdlibBenchPreInit() {
|
||||
benchCheckers = append(benchCheckers,
|
||||
benchChecker{"std-json", fnStdJsonEncodeFn, fnStdJsonDecodeFn},
|
||||
benchChecker{"gob", fnGobEncodeFn, fnGobDecodeFn},
|
||||
benchChecker{"std-xml", fnStdXmlEncodeFn, fnStdXmlDecodeFn},
|
||||
)
|
||||
}
|
||||
|
||||
// ------------ tests below
|
||||
|
||||
func fnGobEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
err := gob.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
|
||||
func fnGobDecodeFn(buf []byte, ts interface{}) error {
|
||||
return gob.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
|
||||
func fnStdXmlEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
err := xml.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
|
||||
func fnStdXmlDecodeFn(buf []byte, ts interface{}) error {
|
||||
return xml.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
|
||||
func fnStdJsonEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
err := json.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
return json.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnStdJsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
if testUseIoEncDec >= 0 {
|
||||
return json.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
return json.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
// ----------- ENCODE ------------------
|
||||
|
||||
func Benchmark__Std_Json___Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "std-json", benchTs, fnStdJsonEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Gob________Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "gob", benchTs, fnGobEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Std_Xml____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "std-xml", benchTs, fnStdXmlEncodeFn)
|
||||
}
|
||||
|
||||
// ----------- DECODE ------------------
|
||||
|
||||
func Benchmark__Std_Json___Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "std-json", benchTs, fnStdJsonEncodeFn, fnStdJsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Gob________Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "gob", benchTs, fnGobEncodeFn, fnGobDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Std_Xml____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "std-xml", benchTs, fnStdXmlEncodeFn, fnStdXmlDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
1
vendor/github.com/ugorji/go/codec/bench/values_test.go
generated
vendored
1
vendor/github.com/ugorji/go/codec/bench/values_test.go
generated
vendored
@@ -1 +0,0 @@
|
||||
../values_test.go
|
||||
125
vendor/github.com/ugorji/go/codec/bench/x_bench_gen_test.go
generated
vendored
125
vendor/github.com/ugorji/go/codec/bench/x_bench_gen_test.go
generated
vendored
@@ -1,125 +0,0 @@
|
||||
// +build x
|
||||
// +build generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mailru/easyjson"
|
||||
"github.com/pquerna/ffjson/ffjson"
|
||||
"github.com/tinylib/msgp/msgp"
|
||||
)
|
||||
|
||||
/*
|
||||
To update all these, use:
|
||||
go get -u github.com/tinylib/msgp/msgp github.com/tinylib/msgp \
|
||||
github.com/pquerna/ffjson/ffjson github.com/pquerna/ffjson \
|
||||
github.com/mailru/easyjson/...
|
||||
|
||||
Known Issues with external libraries:
|
||||
- msgp io.R/W support doesn't work. It throws error
|
||||
|
||||
*/
|
||||
|
||||
func init() {
|
||||
testPreInitFns = append(testPreInitFns, benchXGenPreInit)
|
||||
}
|
||||
|
||||
func benchXGenPreInit() {
|
||||
benchCheckers = append(benchCheckers,
|
||||
benchChecker{"msgp", fnMsgpEncodeFn, fnMsgpDecodeFn},
|
||||
benchChecker{"easyjson", fnEasyjsonEncodeFn, fnEasyjsonDecodeFn},
|
||||
benchChecker{"ffjson", fnFfjsonEncodeFn, fnFfjsonDecodeFn},
|
||||
)
|
||||
}
|
||||
|
||||
func fnEasyjsonEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
ts2, ok := ts.(easyjson.Marshaler)
|
||||
if !ok || ts2 == nil {
|
||||
return nil, errors.New("easyjson: input is not a easyjson.Marshaler")
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := bytes.NewBuffer(bsIn[:0]) // new(bytes.Buffer)
|
||||
_, err := easyjson.MarshalToWriter(ts2, buf)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
return easyjson.Marshal(ts2)
|
||||
// return ts.(json.Marshaler).MarshalJSON()
|
||||
}
|
||||
|
||||
func fnEasyjsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
ts2, ok := ts.(easyjson.Unmarshaler)
|
||||
if !ok {
|
||||
return errors.New("easyjson: input is not a easyjson.Unmarshaler")
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
return easyjson.UnmarshalFromReader(bytes.NewReader(buf), ts2)
|
||||
}
|
||||
return easyjson.Unmarshal(buf, ts2)
|
||||
// return ts.(json.Unmarshaler).UnmarshalJSON(buf)
|
||||
}
|
||||
|
||||
func fnFfjsonEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
return ffjson.Marshal(ts)
|
||||
// return ts.(json.Marshaler).MarshalJSON()
|
||||
}
|
||||
|
||||
func fnFfjsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
return ffjson.Unmarshal(buf, ts)
|
||||
// return ts.(json.Unmarshaler).UnmarshalJSON(buf)
|
||||
}
|
||||
|
||||
func fnMsgpEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
if _, ok := ts.(msgp.Encodable); !ok {
|
||||
return nil, fmt.Errorf("msgp: input of type %T is not a msgp.Encodable", ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
err := ts.(msgp.Encodable).EncodeMsg(msgp.NewWriter(buf))
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
return ts.(msgp.Marshaler).MarshalMsg(bsIn[:0]) // msgp appends to slice.
|
||||
}
|
||||
|
||||
func fnMsgpDecodeFn(buf []byte, ts interface{}) (err error) {
|
||||
if _, ok := ts.(msgp.Decodable); !ok {
|
||||
return fmt.Errorf("msgp: input of type %T is not a msgp.Decodable", ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
err = ts.(msgp.Decodable).DecodeMsg(msgp.NewReader(bytes.NewReader(buf)))
|
||||
return
|
||||
}
|
||||
_, err = ts.(msgp.Unmarshaler).UnmarshalMsg(buf)
|
||||
return
|
||||
}
|
||||
|
||||
func Benchmark__Msgp_______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "msgp", benchTs, fnMsgpEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Msgp_______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "msgp", benchTs, fnMsgpEncodeFn, fnMsgpDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Easyjson___Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "easyjson", benchTs, fnEasyjsonEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Easyjson___Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "easyjson", benchTs, fnEasyjsonEncodeFn, fnEasyjsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Ffjson_____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "ffjson", benchTs, fnFfjsonEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Ffjson_____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "ffjson", benchTs, fnFfjsonEncodeFn, fnFfjsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
190
vendor/github.com/ugorji/go/codec/bench/x_bench_test.go
generated
vendored
190
vendor/github.com/ugorji/go/codec/bench/x_bench_test.go
generated
vendored
@@ -1,190 +0,0 @@
|
||||
// +build x
|
||||
// +build !generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
gcbor "bitbucket.org/bodhisnarkva/cbor/go" // gcbor "code.google.com/p/cbor/go"
|
||||
"github.com/Sereal/Sereal/Go/sereal"
|
||||
xdr "github.com/davecgh/go-xdr/xdr2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"go.mongodb.org/mongo-driver/bson" // "github.com/mongodb/mongo-go-driver/bson"
|
||||
mgobson "gopkg.in/mgo.v2/bson" //"labix.org/v2/mgo/bson"
|
||||
vmsgpack "gopkg.in/vmihailenco/msgpack.v2" //"github.com/vmihailenco/msgpack"
|
||||
)
|
||||
|
||||
/*
|
||||
To update all these, use:
|
||||
go get -u github.com/tinylib/msgp/msgp github.com/tinylib/msgp \
|
||||
github.com/pquerna/ffjson/ffjson github.com/pquerna/ffjson \
|
||||
github.com/Sereal/Sereal/Go/sereal \
|
||||
bitbucket.org/bodhisnarkva/cbor/go \
|
||||
github.com/davecgh/go-xdr/xdr2 \
|
||||
gopkg.in/mgo.v2/bson \
|
||||
gopkg.in/vmihailenco/msgpack.v2 \
|
||||
github.com/json-iterator/go \
|
||||
github.com/mailru/easyjson/...
|
||||
|
||||
Known Issues with external libraries:
|
||||
- msgp io.R/W support doesn't work. It throws error
|
||||
|
||||
*/
|
||||
|
||||
func init() {
|
||||
testPreInitFns = append(testPreInitFns, benchXPreInit)
|
||||
_ = bson.NewDecoder
|
||||
}
|
||||
|
||||
func benchXPreInit() {
|
||||
benchCheckers = append(benchCheckers,
|
||||
benchChecker{"json-iter", fnJsonIterEncodeFn, fnJsonIterDecodeFn},
|
||||
benchChecker{"v-msgpack", fnVMsgpackEncodeFn, fnVMsgpackDecodeFn},
|
||||
benchChecker{"bson", fnBsonEncodeFn, fnBsonDecodeFn},
|
||||
benchChecker{"mgobson", fnMgobsonEncodeFn, fnMgobsonDecodeFn},
|
||||
// place codecs with issues at the end, so as not to make results too ugly
|
||||
benchChecker{"gcbor", fnGcborEncodeFn, fnGcborDecodeFn}, // this logs fat ugly message, but we log.SetOutput(ioutil.Discard)
|
||||
benchChecker{"xdr", fnXdrEncodeFn, fnXdrDecodeFn},
|
||||
benchChecker{"sereal", fnSerealEncodeFn, fnSerealDecodeFn},
|
||||
)
|
||||
}
|
||||
|
||||
func fnVMsgpackEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := bytes.NewBuffer(bsIn[:0]) // new(bytes.Buffer)
|
||||
err := vmsgpack.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
return vmsgpack.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnVMsgpackDecodeFn(buf []byte, ts interface{}) error {
|
||||
if testUseIoEncDec >= 0 {
|
||||
return vmsgpack.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
return vmsgpack.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
func fnBsonEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
return bson.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnBsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
return bson.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
func fnMgobsonEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
return mgobson.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnMgobsonDecodeFn(buf []byte, ts interface{}) error {
|
||||
return mgobson.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
func fnJsonIterEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := bytes.NewBuffer(bsIn[:0]) // new(bytes.Buffer)
|
||||
err := jsoniter.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
return jsoniter.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnJsonIterDecodeFn(buf []byte, ts interface{}) error {
|
||||
if testUseIoEncDec >= 0 {
|
||||
return jsoniter.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
return jsoniter.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
func fnXdrEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
i, err := xdr.Marshal(buf, ts)
|
||||
return buf.Bytes()[:i], err
|
||||
}
|
||||
|
||||
func fnXdrDecodeFn(buf []byte, ts interface{}) error {
|
||||
_, err := xdr.Unmarshal(bytes.NewReader(buf), ts)
|
||||
return err
|
||||
}
|
||||
|
||||
func fnSerealEncodeFn(ts interface{}, bsIn []byte) ([]byte, error) {
|
||||
return sereal.Marshal(ts)
|
||||
}
|
||||
|
||||
func fnSerealDecodeFn(buf []byte, ts interface{}) error {
|
||||
return sereal.Unmarshal(buf, ts)
|
||||
}
|
||||
|
||||
func fnGcborEncodeFn(ts interface{}, bsIn []byte) (bs []byte, err error) {
|
||||
buf := fnBenchmarkByteBuf(bsIn)
|
||||
err = gcbor.NewEncoder(buf).Encode(ts)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
|
||||
func fnGcborDecodeFn(buf []byte, ts interface{}) error {
|
||||
return gcbor.NewDecoder(bytes.NewReader(buf)).Decode(ts)
|
||||
}
|
||||
|
||||
func Benchmark__JsonIter___Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "jsoniter", benchTs, fnJsonIterEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__JsonIter___Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "jsoniter", benchTs, fnJsonIterEncodeFn, fnJsonIterDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
// Place codecs with issues at the bottom, so as not to make results look too ugly.
|
||||
|
||||
func Benchmark__Mgobson____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "mgobson", benchTs, fnMgobsonEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Mgobson____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "mgobson", benchTs, fnMgobsonEncodeFn, fnMgobsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Bson_______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "bson", benchTs, fnBsonEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Bson_______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "bson", benchTs, fnBsonEncodeFn, fnBsonDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__VMsgpack___Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "v-msgpack", benchTs, fnVMsgpackEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__VMsgpack___Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "v-msgpack", benchTs, fnVMsgpackEncodeFn, fnVMsgpackDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Gcbor______Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "gcbor", benchTs, fnGcborEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Gcbor______Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "gcbor", benchTs, fnGcborEncodeFn, fnGcborDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Xdr________Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "xdr", benchTs, fnXdrEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Xdr________Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "xdr", benchTs, fnXdrEncodeFn, fnXdrDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
|
||||
func Benchmark__Sereal_____Encode(b *testing.B) {
|
||||
fnBenchmarkEncode(b, "sereal", benchTs, fnSerealEncodeFn)
|
||||
}
|
||||
|
||||
func Benchmark__Sereal_____Decode(b *testing.B) {
|
||||
fnBenchmarkDecode(b, "sereal", benchTs, fnSerealEncodeFn, fnSerealDecodeFn, fnBenchNewTs)
|
||||
}
|
||||
207
vendor/github.com/ugorji/go/codec/bench/z_all_bench_test.go
generated
vendored
207
vendor/github.com/ugorji/go/codec/bench/z_all_bench_test.go
generated
vendored
@@ -1,207 +0,0 @@
|
||||
// +build alltests
|
||||
// +build go1.7
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// see notes in z_all_test.go
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
import . "github.com/ugorji/go/codec"
|
||||
|
||||
var benchmarkGroupOnce sync.Once
|
||||
|
||||
var benchmarkGroupSave struct {
|
||||
testUseIoEncDec int
|
||||
testUseReset bool
|
||||
|
||||
testDepth int
|
||||
testMapStringKeyOnly bool
|
||||
}
|
||||
|
||||
func benchmarkGroupInitAll() {
|
||||
testInitAll() // calls flag.Parse
|
||||
benchmarkGroupSave.testUseIoEncDec = testUseIoEncDec
|
||||
benchmarkGroupSave.testUseReset = testUseReset
|
||||
|
||||
benchmarkGroupSave.testDepth = testDepth
|
||||
benchmarkGroupSave.testMapStringKeyOnly = testMapStringKeyOnly
|
||||
}
|
||||
|
||||
func benchmarkGroupReset() {
|
||||
testUseIoEncDec = benchmarkGroupSave.testUseIoEncDec
|
||||
testUseReset = benchmarkGroupSave.testUseReset
|
||||
|
||||
testDepth = benchmarkGroupSave.testDepth
|
||||
testMapStringKeyOnly = benchmarkGroupSave.testMapStringKeyOnly
|
||||
|
||||
testDecodeOptions.ZeroCopy = true
|
||||
}
|
||||
|
||||
func benchmarkOneFn(fns []func(*testing.B)) func(*testing.B) {
|
||||
switch len(fns) {
|
||||
case 0:
|
||||
return nil
|
||||
case 1:
|
||||
return fns[0]
|
||||
default:
|
||||
return func(t *testing.B) {
|
||||
for _, f := range fns {
|
||||
f(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSuiteNoop(b *testing.B) {
|
||||
testOnce.Do(testInitAll)
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSuite(t *testing.B, fns ...func(t *testing.B)) {
|
||||
benchmarkGroupOnce.Do(benchmarkGroupInitAll)
|
||||
|
||||
f := benchmarkOneFn(fns)
|
||||
// find . -name "*_test.go" | xargs grep -e 'flag.' | cut -d '&' -f 2 | cut -d ',' -f 1 | grep -e '^bench'
|
||||
|
||||
testReinit() // so flag.Parse() is called first, and never called again
|
||||
benchReinit()
|
||||
|
||||
testDecodeOptions = DecodeOptions{}
|
||||
testEncodeOptions = EncodeOptions{}
|
||||
|
||||
benchmarkGroupReset()
|
||||
|
||||
testReinit()
|
||||
benchReinit()
|
||||
t.Run("options-false...", f)
|
||||
|
||||
benchmarkGroupReset()
|
||||
|
||||
testUseIoEncDec = 1024
|
||||
testReinit()
|
||||
benchReinit()
|
||||
t.Run("use-bufio-!bytes", f)
|
||||
|
||||
benchmarkGroupReset()
|
||||
|
||||
testUseReset = true
|
||||
testReinit()
|
||||
benchReinit()
|
||||
t.Run("reset-enc-dec...", f)
|
||||
|
||||
benchmarkGroupReset()
|
||||
}
|
||||
|
||||
func benchmarkVeryQuickSuite(t *testing.B, name string, fns ...func(t *testing.B)) {
|
||||
benchmarkDivider()
|
||||
benchmarkGroupOnce.Do(benchmarkGroupInitAll)
|
||||
benchmarkGroupReset()
|
||||
|
||||
// bd=1 2 | ti=-1, 1024 |
|
||||
|
||||
testUseIoEncDec = -1
|
||||
// testDepth = depth
|
||||
testReinit()
|
||||
benchReinit()
|
||||
|
||||
t.Run(name+"-bd"+strconv.Itoa(testDepth)+"........", benchmarkOneFn(fns))
|
||||
benchmarkGroupReset()
|
||||
}
|
||||
|
||||
func benchmarkQuickSuite(t *testing.B, name string, fns ...func(t *testing.B)) {
|
||||
benchmarkVeryQuickSuite(t, name, fns...)
|
||||
|
||||
// encoded size of TestStruc is between 20K and 30K for bd=1 // consider buffer=1024 * 16 * testDepth
|
||||
testUseIoEncDec = 1024 // (value of defEncByteBufSize): use smaller buffer, and more flushes - it's ok.
|
||||
// testDepth = depth
|
||||
testReinit()
|
||||
benchReinit()
|
||||
t.Run(name+"-bd"+strconv.Itoa(testDepth)+"-buf"+strconv.Itoa(testUseIoEncDec), benchmarkOneFn(fns))
|
||||
|
||||
testUseIoEncDec = 0
|
||||
// testDepth = depth
|
||||
testReinit()
|
||||
benchReinit()
|
||||
t.Run(name+"-bd"+strconv.Itoa(testDepth)+"-io.....", benchmarkOneFn(fns))
|
||||
|
||||
benchmarkGroupReset()
|
||||
}
|
||||
|
||||
/*
|
||||
z='bench_test.go'
|
||||
find . -name "$z" | xargs grep -e '^func Benchmark.*Encode' | \
|
||||
cut -d '(' -f 1 | cut -d ' ' -f 2 | \
|
||||
while read f; do echo "t.Run(\"$f\", $f)"; done &&
|
||||
echo &&
|
||||
find . -name "$z" | xargs grep -e '^func Benchmark.*Decode' | \
|
||||
cut -d '(' -f 1 | cut -d ' ' -f 2 | \
|
||||
while read f; do echo "t.Run(\"$f\", $f)"; done
|
||||
*/
|
||||
|
||||
func benchmarkCodecGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Encode", Benchmark__Msgpack____Encode)
|
||||
t.Run("Benchmark__Binc_______Encode", Benchmark__Binc_______Encode)
|
||||
t.Run("Benchmark__Simple_____Encode", Benchmark__Simple_____Encode)
|
||||
t.Run("Benchmark__Cbor_______Encode", Benchmark__Cbor_______Encode)
|
||||
t.Run("Benchmark__Json_______Encode", Benchmark__Json_______Encode)
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Decode", Benchmark__Msgpack____Decode)
|
||||
t.Run("Benchmark__Binc_______Decode", Benchmark__Binc_______Decode)
|
||||
t.Run("Benchmark__Simple_____Decode", Benchmark__Simple_____Decode)
|
||||
t.Run("Benchmark__Cbor_______Decode", Benchmark__Cbor_______Decode)
|
||||
t.Run("Benchmark__Json_______Decode", Benchmark__Json_______Decode)
|
||||
}
|
||||
|
||||
func BenchmarkCodecSuite(t *testing.B) { benchmarkSuite(t, benchmarkCodecGroup) }
|
||||
|
||||
func benchmarkJsonEncodeGroup(t *testing.B) {
|
||||
t.Run("Benchmark__Json_______Encode", Benchmark__Json_______Encode)
|
||||
}
|
||||
|
||||
func benchmarkJsonDecodeGroup(t *testing.B) {
|
||||
t.Run("Benchmark__Json_______Decode", Benchmark__Json_______Decode)
|
||||
}
|
||||
|
||||
func benchmarkCborEncodeGroup(t *testing.B) {
|
||||
t.Run("Benchmark__Cbor_______Encode", Benchmark__Cbor_______Encode)
|
||||
}
|
||||
|
||||
func benchmarkCborDecodeGroup(t *testing.B) {
|
||||
t.Run("Benchmark__Cbor_______Decode", Benchmark__Cbor_______Decode)
|
||||
}
|
||||
|
||||
func BenchmarkCodecQuickSuite(t *testing.B) {
|
||||
benchmarkQuickSuite(t, "cbor", benchmarkCborEncodeGroup)
|
||||
benchmarkQuickSuite(t, "cbor", benchmarkCborDecodeGroup)
|
||||
benchmarkQuickSuite(t, "json", benchmarkJsonEncodeGroup)
|
||||
benchmarkQuickSuite(t, "json", benchmarkJsonDecodeGroup)
|
||||
|
||||
// depths := [...]int{1, 4}
|
||||
// for _, d := range depths {
|
||||
// benchmarkQuickSuite(t, d, benchmarkJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, d, benchmarkJsonDecodeGroup)
|
||||
// }
|
||||
|
||||
// benchmarkQuickSuite(t, 1, benchmarkJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, 1, benchmarkJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkJsonDecodeGroup)
|
||||
|
||||
// benchmarkQuickSuite(t, 1, benchmarkJsonEncodeGroup, benchmarkJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkJsonEncodeGroup, benchmarkJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, benchmarkJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, benchmarkJsonDecodeGroup)
|
||||
}
|
||||
23
vendor/github.com/ugorji/go/codec/bench/z_all_stdlib_bench_test.go
generated
vendored
23
vendor/github.com/ugorji/go/codec/bench/z_all_stdlib_bench_test.go
generated
vendored
@@ -1,23 +0,0 @@
|
||||
// +build alltests
|
||||
// +build go1.7
|
||||
// +build !generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import "testing"
|
||||
|
||||
func benchmarkStdlibGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Std_Json___Encode", Benchmark__Std_Json___Encode)
|
||||
t.Run("Benchmark__Gob________Encode", Benchmark__Gob________Encode)
|
||||
// t.Run("Benchmark__Std_Xml____Encode", Benchmark__Std_Xml____Encode)
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Std_Json___Decode", Benchmark__Std_Json___Decode)
|
||||
t.Run("Benchmark__Gob________Decode", Benchmark__Gob________Decode)
|
||||
// t.Run("Benchmark__Std_Xml____Decode", Benchmark__Std_Xml____Decode)
|
||||
}
|
||||
|
||||
func BenchmarkStdlibSuite(t *testing.B) { benchmarkSuite(t, benchmarkStdlibGroup) }
|
||||
37
vendor/github.com/ugorji/go/codec/bench/z_all_x_bench_gen_test.go
generated
vendored
37
vendor/github.com/ugorji/go/codec/bench/z_all_x_bench_gen_test.go
generated
vendored
@@ -1,37 +0,0 @@
|
||||
// +build alltests
|
||||
// +build go1.7
|
||||
// +build x
|
||||
// +build generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// see notes in z_all_bench_test.go
|
||||
|
||||
import "testing"
|
||||
|
||||
func benchmarkCodecXGenGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Encode", Benchmark__Msgpack____Encode)
|
||||
t.Run("Benchmark__Binc_______Encode", Benchmark__Binc_______Encode)
|
||||
t.Run("Benchmark__Simple_____Encode", Benchmark__Simple_____Encode)
|
||||
t.Run("Benchmark__Cbor_______Encode", Benchmark__Cbor_______Encode)
|
||||
t.Run("Benchmark__Json_______Encode", Benchmark__Json_______Encode)
|
||||
t.Run("Benchmark__Msgp_______Encode", Benchmark__Msgp_______Encode)
|
||||
t.Run("Benchmark__Easyjson___Encode", Benchmark__Easyjson___Encode)
|
||||
t.Run("Benchmark__Ffjson_____Encode", Benchmark__Ffjson_____Encode)
|
||||
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Decode", Benchmark__Msgpack____Decode)
|
||||
t.Run("Benchmark__Binc_______Decode", Benchmark__Binc_______Decode)
|
||||
t.Run("Benchmark__Simple_____Decode", Benchmark__Simple_____Decode)
|
||||
t.Run("Benchmark__Cbor_______Decode", Benchmark__Cbor_______Decode)
|
||||
t.Run("Benchmark__Json_______Decode", Benchmark__Json_______Decode)
|
||||
t.Run("Benchmark__Msgp_______Decode", Benchmark__Msgp_______Decode)
|
||||
t.Run("Benchmark__Easyjson___Decode", Benchmark__Easyjson___Decode)
|
||||
t.Run("Benchmark__Ffjson_____Decode", Benchmark__Ffjson_____Decode)
|
||||
}
|
||||
|
||||
func BenchmarkCodecXGenSuite(t *testing.B) { benchmarkSuite(t, benchmarkCodecXGenGroup) }
|
||||
128
vendor/github.com/ugorji/go/codec/bench/z_all_x_bench_test.go
generated
vendored
128
vendor/github.com/ugorji/go/codec/bench/z_all_x_bench_test.go
generated
vendored
@@ -1,128 +0,0 @@
|
||||
// +build alltests
|
||||
// +build go1.7
|
||||
// +build x
|
||||
// +build !generated
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// see notes in z_all_bench_test.go
|
||||
|
||||
import "testing"
|
||||
|
||||
// Note: The following cannot parse TestStruc effectively,
|
||||
// even with changes to remove arrays and minimize integer size to fit into int64 space.
|
||||
//
|
||||
// So we exclude them, listed below:
|
||||
// encode: gcbor, xdr
|
||||
// decode: gcbor, vmsgpack, xdr, sereal
|
||||
|
||||
func benchmarkXGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__JsonIter___Encode", Benchmark__JsonIter___Encode)
|
||||
t.Run("Benchmark__Bson_______Encode", Benchmark__Bson_______Encode)
|
||||
t.Run("Benchmark__Mgobson____Encode", Benchmark__Mgobson____Encode)
|
||||
t.Run("Benchmark__VMsgpack___Encode", Benchmark__VMsgpack___Encode)
|
||||
// t.Run("Benchmark__Gcbor______Encode", Benchmark__Gcbor______Encode)
|
||||
// t.Run("Benchmark__Xdr________Encode", Benchmark__Xdr________Encode)
|
||||
t.Run("Benchmark__Sereal_____Encode", Benchmark__Sereal_____Encode)
|
||||
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__JsonIter___Decode", Benchmark__JsonIter___Decode)
|
||||
t.Run("Benchmark__Bson_______Decode", Benchmark__Bson_______Decode)
|
||||
t.Run("Benchmark__Mgobson____Decode", Benchmark__Mgobson____Decode)
|
||||
// t.Run("Benchmark__VMsgpack___Decode", Benchmark__VMsgpack___Decode)
|
||||
// t.Run("Benchmark__Gcbor______Decode", Benchmark__Gcbor______Decode)
|
||||
// t.Run("Benchmark__Xdr________Decode", Benchmark__Xdr________Decode)
|
||||
// t.Run("Benchmark__Sereal_____Decode", Benchmark__Sereal_____Decode)
|
||||
}
|
||||
|
||||
func benchmarkCodecXGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Encode", Benchmark__Msgpack____Encode)
|
||||
t.Run("Benchmark__Binc_______Encode", Benchmark__Binc_______Encode)
|
||||
t.Run("Benchmark__Simple_____Encode", Benchmark__Simple_____Encode)
|
||||
t.Run("Benchmark__Cbor_______Encode", Benchmark__Cbor_______Encode)
|
||||
t.Run("Benchmark__Json_______Encode", Benchmark__Json_______Encode)
|
||||
t.Run("Benchmark__Std_Json___Encode", Benchmark__Std_Json___Encode)
|
||||
t.Run("Benchmark__Gob________Encode", Benchmark__Gob________Encode)
|
||||
// t.Run("Benchmark__Std_Xml____Encode", Benchmark__Std_Xml____Encode)
|
||||
t.Run("Benchmark__JsonIter___Encode", Benchmark__JsonIter___Encode)
|
||||
t.Run("Benchmark__Bson_______Encode", Benchmark__Bson_______Encode)
|
||||
t.Run("Benchmark__Mgobson____Encode", Benchmark__Mgobson____Encode)
|
||||
t.Run("Benchmark__VMsgpack___Encode", Benchmark__VMsgpack___Encode)
|
||||
// t.Run("Benchmark__Gcbor______Encode", Benchmark__Gcbor______Encode)
|
||||
// t.Run("Benchmark__Xdr________Encode", Benchmark__Xdr________Encode)
|
||||
t.Run("Benchmark__Sereal_____Encode", Benchmark__Sereal_____Encode)
|
||||
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Msgpack____Decode", Benchmark__Msgpack____Decode)
|
||||
t.Run("Benchmark__Binc_______Decode", Benchmark__Binc_______Decode)
|
||||
t.Run("Benchmark__Simple_____Decode", Benchmark__Simple_____Decode)
|
||||
t.Run("Benchmark__Cbor_______Decode", Benchmark__Cbor_______Decode)
|
||||
t.Run("Benchmark__Json_______Decode", Benchmark__Json_______Decode)
|
||||
t.Run("Benchmark__Std_Json___Decode", Benchmark__Std_Json___Decode)
|
||||
t.Run("Benchmark__Gob________Decode", Benchmark__Gob________Decode)
|
||||
// t.Run("Benchmark__Std_Xml____Decode", Benchmark__Std_Xml____Decode)
|
||||
t.Run("Benchmark__JsonIter___Decode", Benchmark__JsonIter___Decode)
|
||||
t.Run("Benchmark__Bson_______Decode", Benchmark__Bson_______Decode)
|
||||
t.Run("Benchmark__Mgobson____Decode", Benchmark__Mgobson____Decode)
|
||||
// t.Run("Benchmark__VMsgpack___Decode", Benchmark__VMsgpack___Decode)
|
||||
// t.Run("Benchmark__Gcbor______Decode", Benchmark__Gcbor______Decode)
|
||||
// t.Run("Benchmark__Xdr________Decode", Benchmark__Xdr________Decode)
|
||||
// t.Run("Benchmark__Sereal_____Decode", Benchmark__Sereal_____Decode)
|
||||
}
|
||||
|
||||
var benchmarkXSkipMsg = `>>>> Skipping - these cannot (en|de)code TestStruc - encode (gcbor, xdr, xml), decode (gcbor, vmsgpack, xdr, sereal, xml)`
|
||||
|
||||
func BenchmarkXSuite(t *testing.B) {
|
||||
println(benchmarkXSkipMsg)
|
||||
benchmarkSuite(t, benchmarkXGroup)
|
||||
}
|
||||
|
||||
func BenchmarkCodecXSuite(t *testing.B) {
|
||||
println(benchmarkXSkipMsg)
|
||||
benchmarkSuite(t, benchmarkCodecXGroup)
|
||||
}
|
||||
|
||||
func benchmarkAllJsonEncodeGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Json_______Encode", Benchmark__Json_______Encode)
|
||||
t.Run("Benchmark__Std_Json___Encode", Benchmark__Std_Json___Encode)
|
||||
t.Run("Benchmark__JsonIter___Encode", Benchmark__JsonIter___Encode)
|
||||
}
|
||||
|
||||
func benchmarkAllJsonDecodeGroup(t *testing.B) {
|
||||
benchmarkDivider()
|
||||
t.Run("Benchmark__Json_______Decode", Benchmark__Json_______Decode)
|
||||
t.Run("Benchmark__Std_Json___Decode", Benchmark__Std_Json___Decode)
|
||||
t.Run("Benchmark__JsonIter___Decode", Benchmark__JsonIter___Decode)
|
||||
}
|
||||
|
||||
func BenchmarkCodecVeryQuickAllJsonSuite(t *testing.B) {
|
||||
benchmarkVeryQuickSuite(t, "json-all", benchmarkAllJsonEncodeGroup, benchmarkAllJsonDecodeGroup)
|
||||
}
|
||||
|
||||
func BenchmarkCodecQuickAllJsonSuite(t *testing.B) {
|
||||
benchmarkQuickSuite(t, "json-all", benchmarkAllJsonEncodeGroup, benchmarkAllJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, "json-all", benchmarkAllJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, "json-all", benchmarkAllJsonDecodeGroup)
|
||||
|
||||
// depths := [...]int{1, 4}
|
||||
// for _, d := range depths {
|
||||
// benchmarkQuickSuite(t, d, benchmarkAllJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, d, benchmarkAllJsonDecodeGroup)
|
||||
// }
|
||||
|
||||
// benchmarkQuickSuite(t, 1, benchmarkAllJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkAllJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, 1, benchmarkAllJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkAllJsonDecodeGroup)
|
||||
|
||||
// benchmarkQuickSuite(t, 1, benchmarkAllJsonEncodeGroup, benchmarkAllJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, 4, benchmarkAllJsonEncodeGroup, benchmarkAllJsonDecodeGroup)
|
||||
// benchmarkQuickSuite(t, benchmarkAllJsonEncodeGroup)
|
||||
// benchmarkQuickSuite(t, benchmarkAllJsonDecodeGroup)
|
||||
}
|
||||
1255
vendor/github.com/ugorji/go/codec/binc.go
generated
vendored
1255
vendor/github.com/ugorji/go/codec/binc.go
generated
vendored
File diff suppressed because it is too large
Load Diff
272
vendor/github.com/ugorji/go/codec/build.sh
generated
vendored
272
vendor/github.com/ugorji/go/codec/build.sh
generated
vendored
@@ -1,272 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run all the different permutations of all the tests and other things
|
||||
# This helps ensure that nothing gets broken.
|
||||
|
||||
_tests() {
|
||||
local vet="" # TODO: make it off
|
||||
local gover=$( go version | cut -f 3 -d ' ' )
|
||||
# note that codecgen requires fastpath, so you cannot do "codecgen notfastpath"
|
||||
local a=( "" "safe" "notfastpath" "notfastpath safe" "codecgen" "codecgen safe" )
|
||||
for i in "${a[@]}"
|
||||
do
|
||||
echo ">>>> TAGS: $i"
|
||||
local i2=${i:-default}
|
||||
case $gover in
|
||||
go1.[0-6]*) go test ${zargs[*]} -tags "$i" "$@" ;;
|
||||
*) go vet -printfuncs "errorf" "$@" &&
|
||||
go test ${zargs[*]} -vet "$vet" -tags "alltests $i" -run "Suite" -coverprofile "${i2// /-}.cov.out" "$@" ;;
|
||||
esac
|
||||
if [[ "$?" != 0 ]]; then return 1; fi
|
||||
done
|
||||
echo "++++++++ TEST SUITES ALL PASSED ++++++++"
|
||||
}
|
||||
|
||||
|
||||
# is a generation needed?
|
||||
_ng() {
|
||||
local a="$1"
|
||||
if [[ ! -e "$a" ]]; then echo 1; return; fi
|
||||
for i in `ls -1 *.go.tmpl gen.go values_test.go`
|
||||
do
|
||||
if [[ "$a" -ot "$i" ]]; then echo 1; return; fi
|
||||
done
|
||||
}
|
||||
|
||||
_prependbt() {
|
||||
cat > ${2} <<EOF
|
||||
// +build generated
|
||||
|
||||
EOF
|
||||
cat ${1} >> ${2}
|
||||
rm -f ${1}
|
||||
}
|
||||
|
||||
# _build generates fast-path.go and gen-helper.go.
|
||||
_build() {
|
||||
if ! [[ "${zforce}" || $(_ng "fast-path.generated.go") || $(_ng "gen-helper.generated.go") || $(_ng "gen.generated.go") ]]; then return 0; fi
|
||||
|
||||
if [ "${zbak}" ]; then
|
||||
_zts=`date '+%m%d%Y_%H%M%S'`
|
||||
_gg=".generated.go"
|
||||
[ -e "gen-helper${_gg}" ] && mv gen-helper${_gg} gen-helper${_gg}__${_zts}.bak
|
||||
[ -e "fast-path${_gg}" ] && mv fast-path${_gg} fast-path${_gg}__${_zts}.bak
|
||||
[ -e "gen${_gg}" ] && mv gen${_gg} gen${_gg}__${_zts}.bak
|
||||
fi
|
||||
rm -f gen-helper.generated.go fast-path.generated.go gen.generated.go \
|
||||
*safe.generated.go *_generated_test.go *.generated_ffjson_expose.go
|
||||
|
||||
cat > gen.generated.go <<EOF
|
||||
// +build codecgen.exec
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED FROM gen-dec-(map|array).go.tmpl
|
||||
|
||||
const genDecMapTmpl = \`
|
||||
EOF
|
||||
cat >> gen.generated.go < gen-dec-map.go.tmpl
|
||||
cat >> gen.generated.go <<EOF
|
||||
\`
|
||||
|
||||
const genDecListTmpl = \`
|
||||
EOF
|
||||
cat >> gen.generated.go < gen-dec-array.go.tmpl
|
||||
cat >> gen.generated.go <<EOF
|
||||
\`
|
||||
|
||||
const genEncChanTmpl = \`
|
||||
EOF
|
||||
cat >> gen.generated.go < gen-enc-chan.go.tmpl
|
||||
cat >> gen.generated.go <<EOF
|
||||
\`
|
||||
EOF
|
||||
cat > gen-from-tmpl.codec.generated.go <<EOF
|
||||
package codec
|
||||
import "io"
|
||||
func GenInternalGoFile(r io.Reader, w io.Writer) error {
|
||||
return genInternalGoFile(r, w)
|
||||
}
|
||||
EOF
|
||||
cat > gen-from-tmpl.generated.go <<EOF
|
||||
//+build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "${zpkg}"
|
||||
import "os"
|
||||
|
||||
func run(fnameIn, fnameOut string) {
|
||||
println("____ " + fnameIn + " --> " + fnameOut + " ______")
|
||||
fin, err := os.Open(fnameIn)
|
||||
if err != nil { panic(err) }
|
||||
defer fin.Close()
|
||||
fout, err := os.Create(fnameOut)
|
||||
if err != nil { panic(err) }
|
||||
defer fout.Close()
|
||||
err = codec.GenInternalGoFile(fin, fout)
|
||||
if err != nil { panic(err) }
|
||||
}
|
||||
|
||||
func main() {
|
||||
run("fast-path.go.tmpl", "fast-path.generated.go")
|
||||
run("gen-helper.go.tmpl", "gen-helper.generated.go")
|
||||
run("mammoth-test.go.tmpl", "mammoth_generated_test.go")
|
||||
run("mammoth2-test.go.tmpl", "mammoth2_generated_test.go")
|
||||
// run("sort-slice.go.tmpl", "sort-slice.generated.go")
|
||||
}
|
||||
EOF
|
||||
|
||||
sed -e 's+// __DO_NOT_REMOVE__NEEDED_FOR_REPLACING__IMPORT_PATH__FOR_CODEC_BENCH__+import . "github.com/ugorji/go/codec"+' \
|
||||
shared_test.go > bench/shared_test.go
|
||||
|
||||
# explicitly return 0 if this passes, else return 1
|
||||
go run -tags "prebuild" prebuild.go || return 1
|
||||
go run -tags "notfastpath safe codecgen.exec" gen-from-tmpl.generated.go || return 1
|
||||
rm -f gen-from-tmpl.*generated.go
|
||||
return 0
|
||||
}
|
||||
|
||||
_codegenerators() {
|
||||
local c5="_generated_test.go"
|
||||
local c7="$PWD/codecgen"
|
||||
local c8="$c7/__codecgen"
|
||||
local c9="codecgen-scratch.go"
|
||||
|
||||
if ! [[ $zforce || $(_ng "values_codecgen${c5}") ]]; then return 0; fi
|
||||
|
||||
# Note: ensure you run the codecgen for this codebase/directory i.e. ./codecgen/codecgen
|
||||
true &&
|
||||
echo "codecgen ... " &&
|
||||
if [[ $zforce || ! -f "$c8" || "$c7/gen.go" -nt "$c8" ]]; then
|
||||
echo "rebuilding codecgen ... " && ( cd codecgen && go build -o $c8 ${zargs[*]} . )
|
||||
fi &&
|
||||
$c8 -rt codecgen -t 'codecgen generated' -o values_codecgen${c5} -d 19780 $zfin $zfin2 &&
|
||||
cp mammoth2_generated_test.go $c9 &&
|
||||
$c8 -t 'codecgen,!notfastpath generated,!notfastpath' -o mammoth2_codecgen${c5} -d 19781 mammoth2_generated_test.go &&
|
||||
rm -f $c9 &&
|
||||
echo "generators done!"
|
||||
}
|
||||
|
||||
_prebuild() {
|
||||
echo "prebuild: zforce: $zforce"
|
||||
local d="$PWD"
|
||||
local zfin="test_values.generated.go"
|
||||
local zfin2="test_values_flex.generated.go"
|
||||
local zpkg="github.com/ugorji/go/codec"
|
||||
# zpkg=${d##*/src/}
|
||||
# zgobase=${d%%/src/*}
|
||||
# rm -f *_generated_test.go
|
||||
rm -f codecgen-*.go &&
|
||||
_build &&
|
||||
cp $d/values_test.go $d/$zfin &&
|
||||
cp $d/values_flex_test.go $d/$zfin2 &&
|
||||
_codegenerators &&
|
||||
if [[ "$(type -t _codegenerators_external )" = "function" ]]; then _codegenerators_external ; fi &&
|
||||
if [[ $zforce ]]; then go install ${zargs[*]} .; fi &&
|
||||
echo "prebuild done successfully"
|
||||
rm -f $d/$zfin $d/$zfin2
|
||||
# unset zfin zfin2 zpkg
|
||||
}
|
||||
|
||||
_make() {
|
||||
local makeforce=${zforce}
|
||||
zforce=1
|
||||
(cd codecgen && go install ${zargs[*]} .) && _prebuild && go install ${zargs[*]} .
|
||||
zforce=${makeforce}
|
||||
}
|
||||
|
||||
_clean() {
|
||||
rm -f gen-from-tmpl.*generated.go \
|
||||
codecgen-*.go \
|
||||
test_values.generated.go test_values_flex.generated.go
|
||||
}
|
||||
|
||||
_release() {
|
||||
local reply
|
||||
read -p "Pre-release validation takes a few minutes and MUST be run from within GOPATH/src. Confirm y/n? " -n 1 -r reply
|
||||
echo
|
||||
if [[ ! $reply =~ ^[Yy]$ ]]; then return 1; fi
|
||||
|
||||
# expects GOROOT, GOROOT_BOOTSTRAP to have been set.
|
||||
if [[ -z "${GOROOT// }" || -z "${GOROOT_BOOTSTRAP// }" ]]; then return 1; fi
|
||||
# (cd $GOROOT && git checkout -f master && git pull && git reset --hard)
|
||||
(cd $GOROOT && git pull)
|
||||
local f=`pwd`/make.release.out
|
||||
cat > $f <<EOF
|
||||
========== `date` ===========
|
||||
EOF
|
||||
# # go 1.6 and below kept giving memory errors on Mac OS X during SDK build or go run execution,
|
||||
# # that is fine, as we only explicitly test the last 3 releases and tip (2 years).
|
||||
local makeforce=${zforce}
|
||||
zforce=1
|
||||
for i in 1.10 1.11 1.12 master
|
||||
do
|
||||
echo "*********** $i ***********" >>$f
|
||||
if [[ "$i" != "master" ]]; then i="release-branch.go$i"; fi
|
||||
(false ||
|
||||
(echo "===== BUILDING GO SDK for branch: $i ... =====" &&
|
||||
cd $GOROOT &&
|
||||
git checkout -f $i && git reset --hard && git clean -f . &&
|
||||
cd src && ./make.bash >>$f 2>&1 && sleep 1 ) ) &&
|
||||
echo "===== GO SDK BUILD DONE =====" &&
|
||||
_prebuild &&
|
||||
echo "===== PREBUILD DONE with exit: $? =====" &&
|
||||
_tests "$@"
|
||||
if [[ "$?" != 0 ]]; then return 1; fi
|
||||
done
|
||||
zforce=${makeforce}
|
||||
echo "++++++++ RELEASE TEST SUITES ALL PASSED ++++++++"
|
||||
}
|
||||
|
||||
_usage() {
|
||||
cat <<EOF
|
||||
primary usage: $0
|
||||
-[tmpfnld] -> [tests, make, prebuild (force), inlining diagnostics, mid-stack inlining, race detector]
|
||||
-v -> verbose
|
||||
EOF
|
||||
if [[ "$(type -t _usage_run)" = "function" ]]; then _usage_run ; fi
|
||||
}
|
||||
|
||||
_main() {
|
||||
if [[ -z "$1" ]]; then _usage; return 1; fi
|
||||
local x
|
||||
local zforce
|
||||
local zargs=()
|
||||
local zverbose=()
|
||||
local zbenchflags=""
|
||||
OPTIND=1
|
||||
while getopts ":ctmnrgpfvlyzdb:" flag
|
||||
do
|
||||
case "x$flag" in
|
||||
'xf') zforce=1 ;;
|
||||
'xv') zverbose+=(1) ;;
|
||||
'xl') zargs+=("-gcflags"); zargs+=("-l=4") ;;
|
||||
'xn') zargs+=("-gcflags"); zargs+=("-m=2") ;;
|
||||
'xd') zargs+=("-race") ;;
|
||||
'xb') x='b'; zbenchflags=${OPTARG} ;;
|
||||
x\?) _usage; return 1 ;;
|
||||
*) x=$flag ;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND-1))
|
||||
# echo ">>>> _main: extra args: $@"
|
||||
case "x$x" in
|
||||
'xt') _tests "$@" ;;
|
||||
'xm') _make "$@" ;;
|
||||
'xr') _release "$@" ;;
|
||||
'xg') _go ;;
|
||||
'xp') _prebuild "$@" ;;
|
||||
'xc') _clean "$@" ;;
|
||||
'xy') _analyze_extra "$@" ;;
|
||||
'xz') _analyze "$@" ;;
|
||||
'xb') _bench "$@" ;;
|
||||
esac
|
||||
# unset zforce zargs zbenchflags
|
||||
}
|
||||
|
||||
[ "." = `dirname $0` ] && _main "$@"
|
||||
|
||||
884
vendor/github.com/ugorji/go/codec/cbor.go
generated
vendored
884
vendor/github.com/ugorji/go/codec/cbor.go
generated
vendored
@@ -1,884 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"math"
|
||||
"time"
|
||||
)
|
||||
|
||||
// major
|
||||
const (
|
||||
cborMajorUint byte = iota
|
||||
cborMajorNegInt
|
||||
cborMajorBytes
|
||||
cborMajorString
|
||||
cborMajorArray
|
||||
cborMajorMap
|
||||
cborMajorTag
|
||||
cborMajorSimpleOrFloat
|
||||
)
|
||||
|
||||
// simple
|
||||
const (
|
||||
cborBdFalse byte = 0xf4 + iota
|
||||
cborBdTrue
|
||||
cborBdNil
|
||||
cborBdUndefined
|
||||
cborBdExt
|
||||
cborBdFloat16
|
||||
cborBdFloat32
|
||||
cborBdFloat64
|
||||
)
|
||||
|
||||
// indefinite
|
||||
const (
|
||||
cborBdIndefiniteBytes byte = 0x5f
|
||||
cborBdIndefiniteString byte = 0x7f
|
||||
cborBdIndefiniteArray byte = 0x9f
|
||||
cborBdIndefiniteMap byte = 0xbf
|
||||
cborBdBreak byte = 0xff
|
||||
)
|
||||
|
||||
// These define some in-stream descriptors for
|
||||
// manual encoding e.g. when doing explicit indefinite-length
|
||||
const (
|
||||
CborStreamBytes byte = 0x5f
|
||||
CborStreamString byte = 0x7f
|
||||
CborStreamArray byte = 0x9f
|
||||
CborStreamMap byte = 0xbf
|
||||
CborStreamBreak byte = 0xff
|
||||
)
|
||||
|
||||
// base values
|
||||
const (
|
||||
cborBaseUint byte = 0x00
|
||||
cborBaseNegInt byte = 0x20
|
||||
cborBaseBytes byte = 0x40
|
||||
cborBaseString byte = 0x60
|
||||
cborBaseArray byte = 0x80
|
||||
cborBaseMap byte = 0xa0
|
||||
cborBaseTag byte = 0xc0
|
||||
cborBaseSimple byte = 0xe0
|
||||
)
|
||||
|
||||
// const (
|
||||
// cborSelfDesrTag byte = 0xd9
|
||||
// cborSelfDesrTag2 byte = 0xd9
|
||||
// cborSelfDesrTag3 byte = 0xf7
|
||||
// )
|
||||
|
||||
var (
|
||||
cbordescSimpleNames = map[byte]string{
|
||||
cborBdNil: "nil",
|
||||
cborBdFalse: "false",
|
||||
cborBdTrue: "true",
|
||||
cborBdFloat16: "float",
|
||||
cborBdFloat32: "float",
|
||||
cborBdFloat64: "float",
|
||||
cborBdIndefiniteBytes: "bytes*",
|
||||
cborBdIndefiniteString: "string*",
|
||||
cborBdIndefiniteArray: "array*",
|
||||
cborBdIndefiniteMap: "map*",
|
||||
}
|
||||
cbordescMajorNames = map[byte]string{
|
||||
cborMajorUint: "(u)int",
|
||||
cborMajorNegInt: "int",
|
||||
cborMajorBytes: "bytes",
|
||||
cborMajorString: "string",
|
||||
cborMajorArray: "array",
|
||||
cborMajorMap: "map",
|
||||
cborMajorTag: "tag",
|
||||
cborMajorSimpleOrFloat: "simple",
|
||||
}
|
||||
)
|
||||
|
||||
func cbordesc(bd byte) (s string) {
|
||||
bm := bd >> 5
|
||||
if bm == cborMajorSimpleOrFloat {
|
||||
s = cbordescSimpleNames[bd]
|
||||
if s == "" {
|
||||
s = "unknown(simple)"
|
||||
}
|
||||
} else {
|
||||
s = cbordescMajorNames[bm]
|
||||
if s == "" {
|
||||
s = "unknown"
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
// switch bd >> 5 {
|
||||
// case cborMajorUint:
|
||||
// return "(u)int"
|
||||
// case cborMajorNegInt:
|
||||
// return "int"
|
||||
// case cborMajorBytes:
|
||||
// return "bytes"
|
||||
// case cborMajorString:
|
||||
// return "string"
|
||||
// case cborMajorArray:
|
||||
// return "array"
|
||||
// case cborMajorMap:
|
||||
// return "map"
|
||||
// case cborMajorTag:
|
||||
// return "tag"
|
||||
// case cborMajorSimpleOrFloat: // default
|
||||
// switch bd {
|
||||
// case cborBdNil:
|
||||
// return "nil"
|
||||
// case cborBdFalse:
|
||||
// return "false"
|
||||
// case cborBdTrue:
|
||||
// return "true"
|
||||
// case cborBdFloat16, cborBdFloat32, cborBdFloat64:
|
||||
// return "float"
|
||||
// case cborBdIndefiniteBytes:
|
||||
// return "bytes*"
|
||||
// case cborBdIndefiniteString:
|
||||
// return "string*"
|
||||
// case cborBdIndefiniteArray:
|
||||
// return "array*"
|
||||
// case cborBdIndefiniteMap:
|
||||
// return "map*"
|
||||
// default:
|
||||
// return "unknown(simple)"
|
||||
// }
|
||||
// }
|
||||
// return "unknown"
|
||||
}
|
||||
|
||||
// -------------------
|
||||
|
||||
type cborEncDriver struct {
|
||||
noBuiltInTypes
|
||||
encDriverNoopContainerWriter
|
||||
h *CborHandle
|
||||
x [8]byte
|
||||
_ [6]uint64 // padding
|
||||
e Encoder
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) encoder() *Encoder {
|
||||
return &e.e
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeNil() {
|
||||
e.e.encWr.writen1(cborBdNil)
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeBool(b bool) {
|
||||
if b {
|
||||
e.e.encWr.writen1(cborBdTrue)
|
||||
} else {
|
||||
e.e.encWr.writen1(cborBdFalse)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeFloat32(f float32) {
|
||||
e.e.encWr.writen1(cborBdFloat32)
|
||||
bigenHelper{e.x[:4], e.e.w()}.writeUint32(math.Float32bits(f))
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeFloat64(f float64) {
|
||||
e.e.encWr.writen1(cborBdFloat64)
|
||||
bigenHelper{e.x[:8], e.e.w()}.writeUint64(math.Float64bits(f))
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) encUint(v uint64, bd byte) {
|
||||
if v <= 0x17 {
|
||||
e.e.encWr.writen1(byte(v) + bd)
|
||||
} else if v <= math.MaxUint8 {
|
||||
e.e.encWr.writen2(bd+0x18, uint8(v))
|
||||
} else if v <= math.MaxUint16 {
|
||||
e.e.encWr.writen1(bd + 0x19)
|
||||
bigenHelper{e.x[:2], e.e.w()}.writeUint16(uint16(v))
|
||||
} else if v <= math.MaxUint32 {
|
||||
e.e.encWr.writen1(bd + 0x1a)
|
||||
bigenHelper{e.x[:4], e.e.w()}.writeUint32(uint32(v))
|
||||
} else { // if v <= math.MaxUint64 {
|
||||
e.e.encWr.writen1(bd + 0x1b)
|
||||
bigenHelper{e.x[:8], e.e.w()}.writeUint64(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeInt(v int64) {
|
||||
if v < 0 {
|
||||
e.encUint(uint64(-1-v), cborBaseNegInt)
|
||||
} else {
|
||||
e.encUint(uint64(v), cborBaseUint)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeUint(v uint64) {
|
||||
e.encUint(v, cborBaseUint)
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) encLen(bd byte, length int) {
|
||||
e.encUint(uint64(length), bd)
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeTime(t time.Time) {
|
||||
if t.IsZero() {
|
||||
e.EncodeNil()
|
||||
} else if e.h.TimeRFC3339 {
|
||||
e.encUint(0, cborBaseTag)
|
||||
e.encStringBytesS(cborBaseString, t.Format(time.RFC3339Nano))
|
||||
} else {
|
||||
e.encUint(1, cborBaseTag)
|
||||
t = t.UTC().Round(time.Microsecond)
|
||||
sec, nsec := t.Unix(), uint64(t.Nanosecond())
|
||||
if nsec == 0 {
|
||||
e.EncodeInt(sec)
|
||||
} else {
|
||||
e.EncodeFloat64(float64(sec) + float64(nsec)/1e9)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeExt(rv interface{}, xtag uint64, ext Ext) {
|
||||
e.encUint(uint64(xtag), cborBaseTag)
|
||||
if ext == SelfExt {
|
||||
rv2 := baseRV(rv)
|
||||
e.e.encodeValue(rv2, e.h.fnNoExt(rv2.Type()))
|
||||
} else if v := ext.ConvertExt(rv); v == nil {
|
||||
e.EncodeNil()
|
||||
} else {
|
||||
e.e.encode(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeRawExt(re *RawExt) {
|
||||
e.encUint(uint64(re.Tag), cborBaseTag)
|
||||
// only encodes re.Value (never re.Data)
|
||||
if re.Value != nil {
|
||||
e.e.encode(re.Value)
|
||||
} else {
|
||||
e.EncodeNil()
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) WriteArrayStart(length int) {
|
||||
if e.h.IndefiniteLength {
|
||||
e.e.encWr.writen1(cborBdIndefiniteArray)
|
||||
} else {
|
||||
e.encLen(cborBaseArray, length)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) WriteMapStart(length int) {
|
||||
if e.h.IndefiniteLength {
|
||||
e.e.encWr.writen1(cborBdIndefiniteMap)
|
||||
} else {
|
||||
e.encLen(cborBaseMap, length)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) WriteMapEnd() {
|
||||
if e.h.IndefiniteLength {
|
||||
e.e.encWr.writen1(cborBdBreak)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) WriteArrayEnd() {
|
||||
if e.h.IndefiniteLength {
|
||||
e.e.encWr.writen1(cborBdBreak)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeString(v string) {
|
||||
if e.h.StringToRaw {
|
||||
e.EncodeStringBytesRaw(bytesView(v))
|
||||
return
|
||||
}
|
||||
e.encStringBytesS(cborBaseString, v)
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) EncodeStringBytesRaw(v []byte) {
|
||||
if v == nil {
|
||||
e.EncodeNil()
|
||||
} else {
|
||||
e.encStringBytesS(cborBaseBytes, stringView(v))
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) encStringBytesS(bb byte, v string) {
|
||||
if e.h.IndefiniteLength {
|
||||
if bb == cborBaseBytes {
|
||||
e.e.encWr.writen1(cborBdIndefiniteBytes)
|
||||
} else {
|
||||
e.e.encWr.writen1(cborBdIndefiniteString)
|
||||
}
|
||||
var vlen uint = uint(len(v))
|
||||
blen := vlen / 4
|
||||
if blen == 0 {
|
||||
blen = 64
|
||||
} else if blen > 1024 {
|
||||
blen = 1024
|
||||
}
|
||||
for i := uint(0); i < vlen; {
|
||||
var v2 string
|
||||
i2 := i + blen
|
||||
if i2 >= i && i2 < vlen {
|
||||
v2 = v[i:i2]
|
||||
} else {
|
||||
v2 = v[i:]
|
||||
}
|
||||
e.encLen(bb, len(v2))
|
||||
e.e.encWr.writestr(v2)
|
||||
i = i2
|
||||
}
|
||||
e.e.encWr.writen1(cborBdBreak)
|
||||
} else {
|
||||
e.encLen(bb, len(v))
|
||||
e.e.encWr.writestr(v)
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------
|
||||
|
||||
type cborDecDriver struct {
|
||||
decDriverNoopContainerReader
|
||||
h *CborHandle
|
||||
bdRead bool
|
||||
bd byte
|
||||
st bool // skip tags
|
||||
fnil bool // found nil
|
||||
noBuiltInTypes
|
||||
_ [6]uint64 // padding cache-aligned
|
||||
d Decoder
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decoder() *Decoder {
|
||||
return &d.d
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) readNextBd() {
|
||||
d.bd = d.d.decRd.readn1()
|
||||
d.bdRead = true
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) advanceNil() (null bool) {
|
||||
d.fnil = false
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
if d.bd == cborBdNil || d.bd == cborBdUndefined {
|
||||
d.bdRead = false
|
||||
d.fnil = true
|
||||
null = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// skipTags is called to skip any tags in the stream.
|
||||
//
|
||||
// Since any value can be tagged, then we should call skipTags
|
||||
// before any value is decoded.
|
||||
//
|
||||
// By definition, skipTags should not be called before
|
||||
// checking for break, or nil or undefined.
|
||||
func (d *cborDecDriver) skipTags() {
|
||||
for d.bd>>5 == cborMajorTag {
|
||||
d.decUint()
|
||||
d.bd = d.d.decRd.readn1()
|
||||
}
|
||||
}
|
||||
|
||||
// func (d *cborDecDriver) uncacheRead() {
|
||||
// if d.bdRead {
|
||||
// d.d.decRd.unreadn1()
|
||||
// d.bdRead = false
|
||||
// }
|
||||
// }
|
||||
|
||||
func (d *cborDecDriver) ContainerType() (vt valueType) {
|
||||
d.fnil = false
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
if d.bd == cborBdNil {
|
||||
d.bdRead = false // always consume nil after seeing it in container type
|
||||
d.fnil = true
|
||||
return valueTypeNil
|
||||
} else if d.bd == cborBdIndefiniteBytes || (d.bd>>5 == cborMajorBytes) {
|
||||
return valueTypeBytes
|
||||
} else if d.bd == cborBdIndefiniteString || (d.bd>>5 == cborMajorString) {
|
||||
return valueTypeString
|
||||
} else if d.bd == cborBdIndefiniteArray || (d.bd>>5 == cborMajorArray) {
|
||||
return valueTypeArray
|
||||
} else if d.bd == cborBdIndefiniteMap || (d.bd>>5 == cborMajorMap) {
|
||||
return valueTypeMap
|
||||
}
|
||||
return valueTypeUnset
|
||||
}
|
||||
|
||||
// func (d *cborDecDriver) Nil() bool {
|
||||
// return d.fnil
|
||||
// }
|
||||
|
||||
func (d *cborDecDriver) TryNil() bool {
|
||||
return d.advanceNil()
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) CheckBreak() (v bool) {
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
if d.bd == cborBdBreak {
|
||||
d.bdRead = false
|
||||
v = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decUint() (ui uint64) {
|
||||
v := d.bd & 0x1f
|
||||
if v <= 0x17 {
|
||||
ui = uint64(v)
|
||||
} else {
|
||||
if v == 0x18 {
|
||||
ui = uint64(d.d.decRd.readn1())
|
||||
} else if v == 0x19 {
|
||||
ui = uint64(bigen.Uint16(d.d.decRd.readx(2)))
|
||||
} else if v == 0x1a {
|
||||
ui = uint64(bigen.Uint32(d.d.decRd.readx(4)))
|
||||
} else if v == 0x1b {
|
||||
ui = uint64(bigen.Uint64(d.d.decRd.readx(8)))
|
||||
} else {
|
||||
d.d.errorf("invalid descriptor decoding uint: %x/%s", d.bd, cbordesc(d.bd))
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decCheckInteger() (neg bool) {
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
major := d.bd >> 5
|
||||
if major == cborMajorUint {
|
||||
} else if major == cborMajorNegInt {
|
||||
neg = true
|
||||
} else {
|
||||
d.d.errorf("invalid integer; got major %v from descriptor %x/%s, expected %v or %v",
|
||||
major, d.bd, cbordesc(d.bd), cborMajorUint, cborMajorNegInt)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func cborDecInt64(ui uint64, neg bool) (i int64) {
|
||||
// check if this number can be converted to an int without overflow
|
||||
if neg {
|
||||
i = -(chkOvf.SignedIntV(ui + 1))
|
||||
} else {
|
||||
i = chkOvf.SignedIntV(ui)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decLen() int {
|
||||
return int(d.decUint())
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decAppendIndefiniteBytes(bs []byte) []byte {
|
||||
d.bdRead = false
|
||||
for !d.CheckBreak() {
|
||||
if major := d.bd >> 5; major != cborMajorBytes && major != cborMajorString {
|
||||
d.d.errorf("invalid indefinite string/bytes; got major %v, expected %x/%s",
|
||||
major, d.bd, cbordesc(d.bd))
|
||||
}
|
||||
n := uint(d.decLen())
|
||||
oldLen := uint(len(bs))
|
||||
newLen := oldLen + n
|
||||
if newLen > uint(cap(bs)) {
|
||||
bs2 := make([]byte, newLen, 2*uint(cap(bs))+n)
|
||||
copy(bs2, bs)
|
||||
bs = bs2
|
||||
} else {
|
||||
bs = bs[:newLen]
|
||||
}
|
||||
d.d.decRd.readb(bs[oldLen:newLen])
|
||||
// bs = append(bs, d.d.decRd.readn()...)
|
||||
d.bdRead = false
|
||||
}
|
||||
d.bdRead = false
|
||||
return bs
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeInt64() (i int64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
neg := d.decCheckInteger()
|
||||
ui := d.decUint()
|
||||
d.bdRead = false
|
||||
return cborDecInt64(ui, neg)
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeUint64() (ui uint64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.decCheckInteger() {
|
||||
d.d.errorf("cannot assign negative signed value to unsigned type")
|
||||
}
|
||||
ui = d.decUint()
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeFloat64() (f float64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
switch d.bd {
|
||||
case cborBdFloat16:
|
||||
f = float64(math.Float32frombits(halfFloatToFloatBits(bigen.Uint16(d.d.decRd.readx(2)))))
|
||||
case cborBdFloat32:
|
||||
f = float64(math.Float32frombits(bigen.Uint32(d.d.decRd.readx(4))))
|
||||
case cborBdFloat64:
|
||||
f = math.Float64frombits(bigen.Uint64(d.d.decRd.readx(8)))
|
||||
default:
|
||||
major := d.bd >> 5
|
||||
if major == cborMajorUint {
|
||||
f = float64(cborDecInt64(d.decUint(), false))
|
||||
} else if major == cborMajorNegInt {
|
||||
f = float64(cborDecInt64(d.decUint(), true))
|
||||
} else {
|
||||
d.d.errorf("invalid float descriptor; got %d/%s, expected float16/32/64 or (-)int",
|
||||
d.bd, cbordesc(d.bd))
|
||||
}
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
// bool can be decoded from bool only (single byte).
|
||||
func (d *cborDecDriver) DecodeBool() (b bool) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
if d.bd == cborBdTrue {
|
||||
b = true
|
||||
} else if d.bd == cborBdFalse {
|
||||
} else {
|
||||
d.d.errorf("not bool - %s %x/%s", msgBadDesc, d.bd, cbordesc(d.bd))
|
||||
return
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) ReadMapStart() (length int) {
|
||||
if d.advanceNil() {
|
||||
return decContainerLenNil
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
d.bdRead = false
|
||||
if d.bd == cborBdIndefiniteMap {
|
||||
return decContainerLenUnknown
|
||||
}
|
||||
if d.bd>>5 != cborMajorMap {
|
||||
d.d.errorf("error reading map; got major type: %x, expected %x/%s",
|
||||
d.bd>>5, cborMajorMap, cbordesc(d.bd))
|
||||
}
|
||||
return d.decLen()
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) ReadArrayStart() (length int) {
|
||||
if d.advanceNil() {
|
||||
return decContainerLenNil
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
d.bdRead = false
|
||||
if d.bd == cborBdIndefiniteArray {
|
||||
return decContainerLenUnknown
|
||||
}
|
||||
if d.bd>>5 != cborMajorArray {
|
||||
d.d.errorf("invalid array; got major type: %x, expect: %x/%s",
|
||||
d.bd>>5, cborMajorArray, cbordesc(d.bd))
|
||||
}
|
||||
return d.decLen()
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeBytes(bs []byte, zerocopy bool) (bsOut []byte) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.st {
|
||||
d.skipTags()
|
||||
}
|
||||
if d.bd == cborBdIndefiniteBytes || d.bd == cborBdIndefiniteString {
|
||||
d.bdRead = false
|
||||
if bs == nil {
|
||||
if zerocopy {
|
||||
return d.decAppendIndefiniteBytes(d.d.b[:0])
|
||||
}
|
||||
return d.decAppendIndefiniteBytes(zeroByteSlice)
|
||||
}
|
||||
return d.decAppendIndefiniteBytes(bs[:0])
|
||||
}
|
||||
if d.bd == cborBdIndefiniteArray {
|
||||
d.bdRead = false
|
||||
if zerocopy && len(bs) == 0 {
|
||||
bs = d.d.b[:]
|
||||
}
|
||||
if bs == nil {
|
||||
bs = []byte{}
|
||||
} else {
|
||||
bs = bs[:0]
|
||||
}
|
||||
for !d.CheckBreak() {
|
||||
bs = append(bs, uint8(chkOvf.UintV(d.DecodeUint64(), 8)))
|
||||
}
|
||||
return bs
|
||||
}
|
||||
if d.bd>>5 == cborMajorArray {
|
||||
d.bdRead = false
|
||||
if zerocopy && len(bs) == 0 {
|
||||
bs = d.d.b[:]
|
||||
}
|
||||
slen := d.decLen()
|
||||
bs = usableByteSlice(bs, slen)
|
||||
for i := 0; i < len(bs); i++ {
|
||||
bs[i] = uint8(chkOvf.UintV(d.DecodeUint64(), 8))
|
||||
}
|
||||
return bs
|
||||
}
|
||||
clen := d.decLen()
|
||||
d.bdRead = false
|
||||
if d.d.bytes && (zerocopy || d.h.ZeroCopy) {
|
||||
return d.d.decRd.rb.readx(uint(clen))
|
||||
}
|
||||
if zerocopy && len(bs) == 0 {
|
||||
bs = d.d.b[:]
|
||||
}
|
||||
return decByteSlice(d.d.r(), clen, d.h.MaxInitLen, bs)
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeStringAsBytes() (s []byte) {
|
||||
return d.DecodeBytes(d.d.b[:], true)
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeTime() (t time.Time) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.bd>>5 != cborMajorTag {
|
||||
d.d.errorf("error reading tag; expected major type: %x, got: %x", cborMajorTag, d.bd>>5)
|
||||
}
|
||||
xtag := d.decUint()
|
||||
d.bdRead = false
|
||||
return d.decodeTime(xtag)
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) decodeTime(xtag uint64) (t time.Time) {
|
||||
switch xtag {
|
||||
case 0:
|
||||
var err error
|
||||
if t, err = time.Parse(time.RFC3339, stringView(d.DecodeStringAsBytes())); err != nil {
|
||||
d.d.errorv(err)
|
||||
}
|
||||
case 1:
|
||||
f1, f2 := math.Modf(d.DecodeFloat64())
|
||||
t = time.Unix(int64(f1), int64(f2*1e9))
|
||||
default:
|
||||
d.d.errorf("invalid tag for time.Time - expecting 0 or 1, got 0x%x", xtag)
|
||||
}
|
||||
t = t.UTC().Round(time.Microsecond)
|
||||
return
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.bd>>5 != cborMajorTag {
|
||||
d.d.errorf("error reading tag; expected major type: %x, got: %x", cborMajorTag, d.bd>>5)
|
||||
}
|
||||
realxtag := d.decUint()
|
||||
d.bdRead = false
|
||||
if ext == nil {
|
||||
re := rv.(*RawExt)
|
||||
re.Tag = realxtag
|
||||
d.d.decode(&re.Value)
|
||||
} else if xtag != realxtag {
|
||||
d.d.errorf("Wrong extension tag. Got %b. Expecting: %v", realxtag, xtag)
|
||||
return
|
||||
} else if ext == SelfExt {
|
||||
rv2 := baseRV(rv)
|
||||
d.d.decodeValue(rv2, d.h.fnNoExt(rv2.Type()))
|
||||
} else {
|
||||
d.d.interfaceExtConvertAndDecode(rv, ext)
|
||||
}
|
||||
d.bdRead = false
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) DecodeNaked() {
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
|
||||
d.fnil = false
|
||||
n := d.d.naked()
|
||||
var decodeFurther bool
|
||||
|
||||
switch d.bd >> 5 {
|
||||
case cborMajorUint:
|
||||
if d.h.SignedInteger {
|
||||
n.v = valueTypeInt
|
||||
n.i = d.DecodeInt64()
|
||||
} else {
|
||||
n.v = valueTypeUint
|
||||
n.u = d.DecodeUint64()
|
||||
}
|
||||
case cborMajorNegInt:
|
||||
n.v = valueTypeInt
|
||||
n.i = d.DecodeInt64()
|
||||
case cborMajorBytes:
|
||||
fauxUnionReadRawBytes(d, &d.d, n, d.h.RawToString)
|
||||
case cborMajorString:
|
||||
n.v = valueTypeString
|
||||
n.s = string(d.DecodeStringAsBytes())
|
||||
case cborMajorArray:
|
||||
n.v = valueTypeArray
|
||||
decodeFurther = true
|
||||
case cborMajorMap:
|
||||
n.v = valueTypeMap
|
||||
decodeFurther = true
|
||||
case cborMajorTag:
|
||||
n.v = valueTypeExt
|
||||
n.u = d.decUint()
|
||||
n.l = nil
|
||||
if n.u == 0 || n.u == 1 {
|
||||
d.bdRead = false
|
||||
n.v = valueTypeTime
|
||||
n.t = d.decodeTime(n.u)
|
||||
} else if d.st && d.h.getExtForTag(n.u) == nil {
|
||||
// d.skipTags() // no need to call this - tags already skipped
|
||||
d.bdRead = false
|
||||
d.DecodeNaked()
|
||||
return // return when done (as true recursive function)
|
||||
}
|
||||
case cborMajorSimpleOrFloat:
|
||||
switch d.bd {
|
||||
case cborBdNil, cborBdUndefined:
|
||||
n.v = valueTypeNil
|
||||
d.fnil = true
|
||||
case cborBdFalse:
|
||||
n.v = valueTypeBool
|
||||
n.b = false
|
||||
case cborBdTrue:
|
||||
n.v = valueTypeBool
|
||||
n.b = true
|
||||
case cborBdFloat16, cborBdFloat32, cborBdFloat64:
|
||||
n.v = valueTypeFloat
|
||||
n.f = d.DecodeFloat64()
|
||||
case cborBdIndefiniteBytes:
|
||||
fauxUnionReadRawBytes(d, &d.d, n, d.h.RawToString)
|
||||
case cborBdIndefiniteString:
|
||||
n.v = valueTypeString
|
||||
n.s = string(d.DecodeStringAsBytes())
|
||||
case cborBdIndefiniteArray:
|
||||
n.v = valueTypeArray
|
||||
decodeFurther = true
|
||||
case cborBdIndefiniteMap:
|
||||
n.v = valueTypeMap
|
||||
decodeFurther = true
|
||||
default:
|
||||
d.d.errorf("decodeNaked: Unrecognized d.bd: 0x%x", d.bd)
|
||||
}
|
||||
default: // should never happen
|
||||
d.d.errorf("decodeNaked: Unrecognized d.bd: 0x%x", d.bd)
|
||||
}
|
||||
if !decodeFurther {
|
||||
d.bdRead = false
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------
|
||||
|
||||
// CborHandle is a Handle for the CBOR encoding format,
|
||||
// defined at http://tools.ietf.org/html/rfc7049 and documented further at http://cbor.io .
|
||||
//
|
||||
// CBOR is comprehensively supported, including support for:
|
||||
// - indefinite-length arrays/maps/bytes/strings
|
||||
// - (extension) tags in range 0..0xffff (0 .. 65535)
|
||||
// - half, single and double-precision floats
|
||||
// - all numbers (1, 2, 4 and 8-byte signed and unsigned integers)
|
||||
// - nil, true, false, ...
|
||||
// - arrays and maps, bytes and text strings
|
||||
//
|
||||
// None of the optional extensions (with tags) defined in the spec are supported out-of-the-box.
|
||||
// Users can implement them as needed (using SetExt), including spec-documented ones:
|
||||
// - timestamp, BigNum, BigFloat, Decimals,
|
||||
// - Encoded Text (e.g. URL, regexp, base64, MIME Message), etc.
|
||||
type CborHandle struct {
|
||||
binaryEncodingType
|
||||
// noElemSeparators
|
||||
BasicHandle
|
||||
|
||||
// IndefiniteLength=true, means that we encode using indefinitelength
|
||||
IndefiniteLength bool
|
||||
|
||||
// TimeRFC3339 says to encode time.Time using RFC3339 format.
|
||||
// If unset, we encode time.Time using seconds past epoch.
|
||||
TimeRFC3339 bool
|
||||
|
||||
// SkipUnexpectedTags says to skip over any tags for which extensions are
|
||||
// not defined. This is in keeping with the cbor spec on "Optional Tagging of Items".
|
||||
//
|
||||
// Furthermore, this allows the skipping over of the Self Describing Tag 0xd9d9f7.
|
||||
SkipUnexpectedTags bool
|
||||
|
||||
_ [7]uint64 // padding (cache-aligned)
|
||||
}
|
||||
|
||||
// Name returns the name of the handle: cbor
|
||||
func (h *CborHandle) Name() string { return "cbor" }
|
||||
|
||||
func (h *CborHandle) newEncDriver() encDriver {
|
||||
var e = &cborEncDriver{h: h}
|
||||
e.e.e = e
|
||||
e.e.init(h)
|
||||
e.reset()
|
||||
return e
|
||||
}
|
||||
|
||||
func (h *CborHandle) newDecDriver() decDriver {
|
||||
d := &cborDecDriver{h: h, st: h.SkipUnexpectedTags}
|
||||
d.d.d = d
|
||||
d.d.cbor = true
|
||||
d.d.init(h)
|
||||
d.reset()
|
||||
return d
|
||||
}
|
||||
|
||||
func (e *cborEncDriver) reset() {
|
||||
}
|
||||
|
||||
func (d *cborDecDriver) reset() {
|
||||
d.bd = 0
|
||||
d.bdRead = false
|
||||
d.fnil = false
|
||||
d.st = d.h.SkipUnexpectedTags
|
||||
}
|
||||
|
||||
var _ decDriver = (*cborDecDriver)(nil)
|
||||
var _ encDriver = (*cborEncDriver)(nil)
|
||||
368
vendor/github.com/ugorji/go/codec/cbor_test.go
generated
vendored
368
vendor/github.com/ugorji/go/codec/cbor_test.go
generated
vendored
@@ -1,368 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"math"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCborIndefiniteLength(t *testing.T) {
|
||||
oldMapType := testCborH.MapType
|
||||
defer func() {
|
||||
testCborH.MapType = oldMapType
|
||||
}()
|
||||
testCborH.MapType = testMapStrIntfTyp
|
||||
// var (
|
||||
// M1 map[string][]byte
|
||||
// M2 map[uint64]bool
|
||||
// L1 []interface{}
|
||||
// S1 []string
|
||||
// B1 []byte
|
||||
// )
|
||||
var v, vv interface{}
|
||||
// define it (v), encode it using indefinite lengths, decode it (vv), compare v to vv
|
||||
v = map[string]interface{}{
|
||||
"one-byte-key": []byte{1, 2, 3, 4, 5, 6},
|
||||
"two-string-key": "two-value",
|
||||
"three-list-key": []interface{}{true, false, uint64(1), int64(-1)},
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
// buf.Reset()
|
||||
e := NewEncoder(&buf, testCborH)
|
||||
buf.WriteByte(cborBdIndefiniteMap)
|
||||
//----
|
||||
buf.WriteByte(cborBdIndefiniteString)
|
||||
e.MustEncode("one-")
|
||||
e.MustEncode("byte-")
|
||||
e.MustEncode("key")
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
buf.WriteByte(cborBdIndefiniteBytes)
|
||||
e.MustEncode([]byte{1, 2, 3})
|
||||
e.MustEncode([]byte{4, 5, 6})
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
//----
|
||||
buf.WriteByte(cborBdIndefiniteString)
|
||||
e.MustEncode("two-")
|
||||
e.MustEncode("string-")
|
||||
e.MustEncode("key")
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
buf.WriteByte(cborBdIndefiniteString)
|
||||
e.MustEncode([]byte("two-")) // encode as bytes, to check robustness of code
|
||||
e.MustEncode([]byte("value"))
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
//----
|
||||
buf.WriteByte(cborBdIndefiniteString)
|
||||
e.MustEncode("three-")
|
||||
e.MustEncode("list-")
|
||||
e.MustEncode("key")
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
buf.WriteByte(cborBdIndefiniteArray)
|
||||
e.MustEncode(true)
|
||||
e.MustEncode(false)
|
||||
e.MustEncode(uint64(1))
|
||||
e.MustEncode(int64(-1))
|
||||
buf.WriteByte(cborBdBreak)
|
||||
|
||||
buf.WriteByte(cborBdBreak) // close map
|
||||
|
||||
NewDecoderBytes(buf.Bytes(), testCborH).MustDecode(&vv)
|
||||
if err := deepEqual(v, vv); err != nil {
|
||||
t.Logf("-------- Before and After marshal do not match: Error: %v", err)
|
||||
if testVerbose {
|
||||
t.Logf(" ....... GOLDEN: (%T) %#v", v, v)
|
||||
t.Logf(" ....... DECODED: (%T) %#v", vv, vv)
|
||||
}
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
type testCborGolden struct {
|
||||
Base64 string `codec:"cbor"`
|
||||
Hex string `codec:"hex"`
|
||||
Roundtrip bool `codec:"roundtrip"`
|
||||
Decoded interface{} `codec:"decoded"`
|
||||
Diagnostic string `codec:"diagnostic"`
|
||||
Skip bool `codec:"skip"`
|
||||
}
|
||||
|
||||
// Some tests are skipped because they include numbers outside the range of int64/uint64
|
||||
func TestCborGoldens(t *testing.T) {
|
||||
oldMapType := testCborH.MapType
|
||||
defer func() {
|
||||
testCborH.MapType = oldMapType
|
||||
}()
|
||||
testCborH.MapType = testMapStrIntfTyp
|
||||
// decode test-cbor-goldens.json into a list of []*testCborGolden
|
||||
// for each one,
|
||||
// - decode hex into []byte bs
|
||||
// - decode bs into interface{} v
|
||||
// - compare both using deepequal
|
||||
// - for any miss, record it
|
||||
var gs []*testCborGolden
|
||||
f, err := os.Open("test-cbor-goldens.json")
|
||||
if err != nil {
|
||||
t.Logf("error opening test-cbor-goldens.json: %v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
defer f.Close()
|
||||
jh := new(JsonHandle)
|
||||
jh.MapType = testMapStrIntfTyp
|
||||
// d := NewDecoder(f, jh)
|
||||
d := NewDecoder(bufio.NewReader(f), jh)
|
||||
// err = d.Decode(&gs)
|
||||
d.MustDecode(&gs)
|
||||
if err != nil {
|
||||
t.Logf("error json decoding test-cbor-goldens.json: %v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
tagregex := regexp.MustCompile(`[\d]+\(.+?\)`)
|
||||
hexregex := regexp.MustCompile(`h'([0-9a-fA-F]*)'`)
|
||||
for i, g := range gs {
|
||||
// fmt.Printf("%v, skip: %v, isTag: %v, %s\n", i, g.Skip, tagregex.MatchString(g.Diagnostic), g.Diagnostic)
|
||||
// skip tags or simple or those with prefix, as we can't verify them.
|
||||
if g.Skip || strings.HasPrefix(g.Diagnostic, "simple(") || tagregex.MatchString(g.Diagnostic) {
|
||||
// fmt.Printf("%v: skipped\n", i)
|
||||
if testVerbose {
|
||||
t.Logf("[%v] skipping because skip=true OR unsupported simple value or Tag Value", i)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// println("++++++++++++", i, "g.Diagnostic", g.Diagnostic)
|
||||
if hexregex.MatchString(g.Diagnostic) {
|
||||
// println(i, "g.Diagnostic matched hex")
|
||||
if s2 := g.Diagnostic[2 : len(g.Diagnostic)-1]; s2 == "" {
|
||||
g.Decoded = zeroByteSlice
|
||||
} else if bs2, err2 := hex.DecodeString(s2); err2 == nil {
|
||||
g.Decoded = bs2
|
||||
}
|
||||
// fmt.Printf("%v: hex: %v\n", i, g.Decoded)
|
||||
}
|
||||
bs, err := hex.DecodeString(g.Hex)
|
||||
if err != nil {
|
||||
t.Logf("[%v] error hex decoding %s [%v]: %v", i, g.Hex, g.Hex, err)
|
||||
t.FailNow()
|
||||
}
|
||||
var v interface{}
|
||||
NewDecoderBytes(bs, testCborH).MustDecode(&v)
|
||||
if _, ok := v.(RawExt); ok {
|
||||
continue
|
||||
}
|
||||
// check the diagnostics to compare
|
||||
switch g.Diagnostic {
|
||||
case "Infinity":
|
||||
b := math.IsInf(v.(float64), 1)
|
||||
testCborError(t, i, math.Inf(1), v, nil, &b)
|
||||
case "-Infinity":
|
||||
b := math.IsInf(v.(float64), -1)
|
||||
testCborError(t, i, math.Inf(-1), v, nil, &b)
|
||||
case "NaN":
|
||||
// println(i, "checking NaN")
|
||||
b := math.IsNaN(v.(float64))
|
||||
testCborError(t, i, math.NaN(), v, nil, &b)
|
||||
case "undefined":
|
||||
b := v == nil
|
||||
testCborError(t, i, nil, v, nil, &b)
|
||||
default:
|
||||
v0 := g.Decoded
|
||||
// testCborCoerceJsonNumber(rv4i(&v0))
|
||||
testCborError(t, i, v0, v, deepEqual(v0, v), nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testCborError(t *testing.T, i int, v0, v1 interface{}, err error, equal *bool) {
|
||||
if err == nil && equal == nil {
|
||||
// fmt.Printf("%v testCborError passed (err and equal nil)\n", i)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Logf("[%v] deepEqual error: %v", i, err)
|
||||
if testVerbose {
|
||||
t.Logf(" ....... GOLDEN: (%T) %#v", v0, v0)
|
||||
t.Logf(" ....... DECODED: (%T) %#v", v1, v1)
|
||||
}
|
||||
t.FailNow()
|
||||
}
|
||||
if equal != nil && !*equal {
|
||||
t.Logf("[%v] values not equal", i)
|
||||
if testVerbose {
|
||||
t.Logf(" ....... GOLDEN: (%T) %#v", v0, v0)
|
||||
t.Logf(" ....... DECODED: (%T) %#v", v1, v1)
|
||||
}
|
||||
t.FailNow()
|
||||
}
|
||||
// fmt.Printf("%v testCborError passed (checks passed)\n", i)
|
||||
}
|
||||
|
||||
func TestCborHalfFloat(t *testing.T) {
|
||||
m := map[uint16]float64{
|
||||
// using examples from
|
||||
// https://en.wikipedia.org/wiki/Half-precision_floating-point_format
|
||||
0x3c00: 1,
|
||||
0x3c01: 1 + math.Pow(2, -10),
|
||||
0xc000: -2,
|
||||
0x7bff: 65504,
|
||||
0x0400: math.Pow(2, -14),
|
||||
0x03ff: math.Pow(2, -14) - math.Pow(2, -24),
|
||||
0x0001: math.Pow(2, -24),
|
||||
0x0000: 0,
|
||||
0x8000: -0.0,
|
||||
}
|
||||
var ba [3]byte
|
||||
ba[0] = cborBdFloat16
|
||||
var res float64
|
||||
for k, v := range m {
|
||||
res = 0
|
||||
bigen.PutUint16(ba[1:], k)
|
||||
testUnmarshalErr(&res, ba[:3], testCborH, t, "-")
|
||||
if res == v {
|
||||
if testVerbose {
|
||||
t.Logf("equal floats: from %x %b, %v", k, k, v)
|
||||
}
|
||||
} else {
|
||||
t.Logf("unequal floats: from %x %b, %v != %v", k, k, res, v)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCborSkipTags(t *testing.T) {
|
||||
type Tcbortags struct {
|
||||
A string
|
||||
M map[string]interface{}
|
||||
// A []interface{}
|
||||
}
|
||||
var b8 [8]byte
|
||||
var w bytesEncAppender
|
||||
w.b = []byte{}
|
||||
|
||||
// To make it easier,
|
||||
// - use tags between math.MaxUint8 and math.MaxUint16 (incl SelfDesc)
|
||||
// - use 1 char strings for key names
|
||||
// - use 3-6 char strings for map keys
|
||||
// - use integers that fit in 2 bytes (between 0x20 and 0xff)
|
||||
|
||||
var tags = [...]uint64{math.MaxUint8 * 2, math.MaxUint8 * 8, 55799, math.MaxUint16 / 2}
|
||||
var tagIdx int
|
||||
var doAddTag bool
|
||||
addTagFn8To16 := func() {
|
||||
if !doAddTag {
|
||||
return
|
||||
}
|
||||
// writes a tag between MaxUint8 and MaxUint16 (culled from cborEncDriver.encUint)
|
||||
w.writen1(cborBaseTag + 0x19)
|
||||
// bigenHelper.writeUint16
|
||||
bigen.PutUint16(b8[:2], uint16(tags[tagIdx%len(tags)]))
|
||||
w.writeb(b8[:2])
|
||||
tagIdx++
|
||||
}
|
||||
|
||||
var v Tcbortags
|
||||
v.A = "cbor"
|
||||
v.M = make(map[string]interface{})
|
||||
v.M["111"] = uint64(111)
|
||||
v.M["111.11"] = 111.11
|
||||
v.M["true"] = true
|
||||
// v.A = append(v.A, 222, 22.22, "true")
|
||||
|
||||
// make stream manually (interspacing tags around it)
|
||||
// WriteMapStart - e.encLen(cborBaseMap, length) - encUint(length, bd)
|
||||
// EncodeStringEnc - e.encStringBytesS(cborBaseString, v)
|
||||
|
||||
fnEncode := func() {
|
||||
w.b = w.b[:0]
|
||||
addTagFn8To16()
|
||||
// write v (Tcbortags, with 3 fields = map with 3 entries)
|
||||
w.writen1(2 + cborBaseMap) // 3 fields = 3 entries
|
||||
// write v.A
|
||||
var s = "A"
|
||||
w.writen1(byte(len(s)) + cborBaseString)
|
||||
w.writestr(s)
|
||||
w.writen1(byte(len(v.A)) + cborBaseString)
|
||||
w.writestr(v.A)
|
||||
//w.writen1(0)
|
||||
|
||||
addTagFn8To16()
|
||||
s = "M"
|
||||
w.writen1(byte(len(s)) + cborBaseString)
|
||||
w.writestr(s)
|
||||
|
||||
addTagFn8To16()
|
||||
w.writen1(byte(len(v.M)) + cborBaseMap)
|
||||
|
||||
addTagFn8To16()
|
||||
s = "111"
|
||||
w.writen1(byte(len(s)) + cborBaseString)
|
||||
w.writestr(s)
|
||||
w.writen2(cborBaseUint+0x18, uint8(111))
|
||||
|
||||
addTagFn8To16()
|
||||
s = "111.11"
|
||||
w.writen1(byte(len(s)) + cborBaseString)
|
||||
w.writestr(s)
|
||||
w.writen1(cborBdFloat64)
|
||||
bigen.PutUint64(b8[:8], math.Float64bits(111.11))
|
||||
w.writeb(b8[:8])
|
||||
|
||||
addTagFn8To16()
|
||||
s = "true"
|
||||
w.writen1(byte(len(s)) + cborBaseString)
|
||||
w.writestr(s)
|
||||
w.writen1(cborBdTrue)
|
||||
}
|
||||
|
||||
var h CborHandle
|
||||
h.SkipUnexpectedTags = true
|
||||
h.Canonical = true
|
||||
|
||||
var gold []byte
|
||||
NewEncoderBytes(&gold, &h).MustEncode(v)
|
||||
// xdebug2f("encoded: gold: %v", gold)
|
||||
|
||||
// w.b is the encoded bytes
|
||||
var v2 Tcbortags
|
||||
doAddTag = false
|
||||
fnEncode()
|
||||
// xdebug2f("manual: no-tags: %v", w.b)
|
||||
|
||||
testDeepEqualErr(gold, w.b, t, "cbor-skip-tags--bytes---")
|
||||
NewDecoderBytes(w.b, &h).MustDecode(&v2)
|
||||
testDeepEqualErr(v, v2, t, "cbor-skip-tags--no-tags-")
|
||||
|
||||
var v3 Tcbortags
|
||||
doAddTag = true
|
||||
fnEncode()
|
||||
// xdebug2f("manual: has-tags: %v", w.b)
|
||||
NewDecoderBytes(w.b, &h).MustDecode(&v3)
|
||||
testDeepEqualErr(v, v2, t, "cbor-skip-tags--has-tags")
|
||||
|
||||
// Github 300 - tests naked path
|
||||
{
|
||||
expected := []interface{}{"x", uint64(0x0)}
|
||||
toDecode := []byte{0x82, 0x61, 0x78, 0x00}
|
||||
|
||||
var raw interface{}
|
||||
|
||||
NewDecoderBytes(toDecode, &h).MustDecode(&raw)
|
||||
testDeepEqualErr(expected, raw, t, "cbor-skip-tags--gh-300---no-skips")
|
||||
|
||||
toDecode = []byte{0xd9, 0xd9, 0xf7, 0x82, 0x61, 0x78, 0x00}
|
||||
raw = nil
|
||||
NewDecoderBytes(toDecode, &h).MustDecode(&raw)
|
||||
testDeepEqualErr(expected, raw, t, "cbor-skip-tags--gh-300--has-skips")
|
||||
}
|
||||
}
|
||||
4178
vendor/github.com/ugorji/go/codec/codec_test.go
generated
vendored
4178
vendor/github.com/ugorji/go/codec/codec_test.go
generated
vendored
File diff suppressed because it is too large
Load Diff
13
vendor/github.com/ugorji/go/codec/codecgen.go
generated
vendored
13
vendor/github.com/ugorji/go/codec/codecgen.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
// +build codecgen generated
|
||||
|
||||
package codec
|
||||
|
||||
// this file is here, to set the codecgen variable to true
|
||||
// when the build tag codecgen is set.
|
||||
//
|
||||
// this allows us do specific things e.g. skip missing fields tests,
|
||||
// when running in codecgen mode.
|
||||
|
||||
func init() {
|
||||
codecgen = true
|
||||
}
|
||||
37
vendor/github.com/ugorji/go/codec/codecgen/README.md
generated
vendored
37
vendor/github.com/ugorji/go/codec/codecgen/README.md
generated
vendored
@@ -1,37 +0,0 @@
|
||||
# codecgen tool
|
||||
|
||||
Generate is given a list of *.go files to parse, and an output file (fout),
|
||||
codecgen will create an output file __file.go__ which
|
||||
contains `codec.Selfer` implementations for the named types found
|
||||
in the files parsed.
|
||||
|
||||
Using codecgen is very straightforward.
|
||||
|
||||
**Download and install the tool**
|
||||
|
||||
`go get -u github.com/ugorji/go/codec/codecgen`
|
||||
|
||||
**Run the tool on your files**
|
||||
|
||||
The command line format is:
|
||||
|
||||
`codecgen [options] (-o outfile) (infile ...)`
|
||||
|
||||
```sh
|
||||
% codecgen -?
|
||||
Usage of codecgen:
|
||||
-c="github.com/ugorji/go/codec": codec path
|
||||
-o="": out file
|
||||
-r=".*": regex for type name to match
|
||||
-nr="": regex for type name to exclude
|
||||
-rt="": tags for go run
|
||||
-t="": build tag to put in file
|
||||
-u=false: Use unsafe, e.g. to avoid unnecessary allocation on []byte->string
|
||||
-x=false: keep temp file
|
||||
|
||||
% codecgen -o values_codecgen.go values.go values2.go moretypedefs.go
|
||||
```
|
||||
|
||||
Please see the [blog article](http://ugorji.net/blog/go-codecgen)
|
||||
for more information on how to use the tool.
|
||||
|
||||
383
vendor/github.com/ugorji/go/codec/codecgen/gen.go
generated
vendored
383
vendor/github.com/ugorji/go/codec/codecgen/gen.go
generated
vendored
@@ -1,383 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// codecgen generates static implementations of the encoder and decoder functions
|
||||
// for a given type, bypassing reflection, and giving some performance benefits in terms of
|
||||
// wall and cpu time, and memory usage.
|
||||
//
|
||||
// Benchmarks (as of Dec 2018) show that codecgen gives about
|
||||
//
|
||||
// - for binary formats (cbor, etc): 25% on encoding and 30% on decoding to/from []byte
|
||||
// - for text formats (json, etc): 15% on encoding and 25% on decoding to/from []byte
|
||||
//
|
||||
// Note that (as of Dec 2018) codecgen completely ignores
|
||||
//
|
||||
// - MissingFielder interface
|
||||
// (if you types implements it, codecgen ignores that)
|
||||
// - decode option PreferArrayOverSlice
|
||||
// (we cannot dynamically create non-static arrays without reflection)
|
||||
//
|
||||
// In explicit package terms: codecgen generates codec.Selfer implementations for a set of types.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"math/rand"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
)
|
||||
|
||||
const genCodecPkg = "codec1978" // keep this in sync with codec.genCodecPkg
|
||||
|
||||
const genFrunMainTmpl = `//+build ignore
|
||||
|
||||
// Code generated - temporary main package for codecgen - DO NOT EDIT.
|
||||
|
||||
package main
|
||||
{{ if .Types }}import "{{ .ImportPath }}"{{ end }}
|
||||
func main() {
|
||||
{{ $.PackageName }}.CodecGenTempWrite{{ .RandString }}()
|
||||
}
|
||||
`
|
||||
|
||||
// const genFrunPkgTmpl = `//+build codecgen
|
||||
const genFrunPkgTmpl = `
|
||||
|
||||
// Code generated - temporary package for codecgen - DO NOT EDIT.
|
||||
|
||||
package {{ $.PackageName }}
|
||||
|
||||
import (
|
||||
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }} "{{ .CodecImportPath }}"{{ end }}
|
||||
"os"
|
||||
"reflect"
|
||||
"bytes"
|
||||
"strings"
|
||||
"go/format"
|
||||
)
|
||||
|
||||
func CodecGenTempWrite{{ .RandString }}() {
|
||||
os.Remove("{{ .OutFile }}")
|
||||
fout, err := os.Create("{{ .OutFile }}")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer fout.Close()
|
||||
|
||||
var typs []reflect.Type
|
||||
var typ reflect.Type
|
||||
var numfields int
|
||||
{{ range $index, $element := .Types }}
|
||||
var t{{ $index }} {{ . }}
|
||||
typ = reflect.TypeOf(t{{ $index }})
|
||||
typs = append(typs, typ)
|
||||
if typ.Kind() == reflect.Struct { numfields += typ.NumField() } else { numfields += 1 }
|
||||
{{ end }}
|
||||
|
||||
// println("initializing {{ .OutFile }}, buf size: {{ .AllFilesSize }}*16",
|
||||
// {{ .AllFilesSize }}*16, "num fields: ", numfields)
|
||||
var out = bytes.NewBuffer(make([]byte, 0, numfields*1024)) // {{ .AllFilesSize }}*16
|
||||
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }}.{{ end }}Gen(out,
|
||||
"{{ .BuildTag }}", "{{ .PackageName }}", "{{ .RandString }}", {{ .NoExtensions }},
|
||||
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }}.{{ end }}NewTypeInfos(strings.Split("{{ .StructTags }}", ",")),
|
||||
typs...)
|
||||
|
||||
bout, err := format.Source(out.Bytes())
|
||||
// println("... lengths: before formatting: ", len(out.Bytes()), ", after formatting", len(bout))
|
||||
if err != nil {
|
||||
fout.Write(out.Bytes())
|
||||
panic(err)
|
||||
}
|
||||
fout.Write(bout)
|
||||
}
|
||||
|
||||
`
|
||||
|
||||
// Generate is given a list of *.go files to parse, and an output file (fout).
|
||||
//
|
||||
// It finds all types T in the files, and it creates 2 tmp files (frun).
|
||||
// - main package file passed to 'go run'
|
||||
// - package level file which calls *genRunner.Selfer to write Selfer impls for each T.
|
||||
// We use a package level file so that it can reference unexported types in the package being worked on.
|
||||
// Tool then executes: "go run __frun__" which creates fout.
|
||||
// fout contains Codec(En|De)codeSelf implementations for every type T.
|
||||
//
|
||||
func Generate(outfile, buildTag, codecPkgPath string,
|
||||
uid int64,
|
||||
goRunTag string, st string,
|
||||
regexName, notRegexName *regexp.Regexp,
|
||||
deleteTempFile, noExtensions bool,
|
||||
infiles ...string) (err error) {
|
||||
// For each file, grab AST, find each type, and write a call to it.
|
||||
if len(infiles) == 0 {
|
||||
return
|
||||
}
|
||||
if codecPkgPath == "" {
|
||||
return errors.New("codec package path cannot be blank")
|
||||
}
|
||||
if outfile == "" {
|
||||
return errors.New("outfile cannot be blank")
|
||||
}
|
||||
if uid < 0 {
|
||||
uid = -uid
|
||||
} else if uid == 0 {
|
||||
rr := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
uid = 101 + rr.Int63n(9777)
|
||||
}
|
||||
// We have to parse dir for package, before opening the temp file for writing (else ImportDir fails).
|
||||
// Also, ImportDir(...) must take an absolute path.
|
||||
lastdir := filepath.Dir(outfile)
|
||||
absdir, err := filepath.Abs(lastdir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
importPath, err := pkgPath(absdir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
type tmplT struct {
|
||||
CodecPkgName string
|
||||
CodecImportPath string
|
||||
ImportPath string
|
||||
OutFile string
|
||||
PackageName string
|
||||
RandString string
|
||||
BuildTag string
|
||||
StructTags string
|
||||
Types []string
|
||||
AllFilesSize int64
|
||||
CodecPkgFiles bool
|
||||
NoExtensions bool
|
||||
}
|
||||
tv := tmplT{
|
||||
CodecPkgName: genCodecPkg,
|
||||
OutFile: outfile,
|
||||
CodecImportPath: codecPkgPath,
|
||||
BuildTag: buildTag,
|
||||
RandString: strconv.FormatInt(uid, 10),
|
||||
StructTags: st,
|
||||
NoExtensions: noExtensions,
|
||||
}
|
||||
tv.ImportPath = importPath
|
||||
if tv.ImportPath == tv.CodecImportPath {
|
||||
tv.CodecPkgFiles = true
|
||||
tv.CodecPkgName = "codec"
|
||||
} else {
|
||||
// HACK: always handle vendoring. It should be typically on in go 1.6, 1.7
|
||||
tv.ImportPath = stripVendor(tv.ImportPath)
|
||||
}
|
||||
astfiles := make([]*ast.File, len(infiles))
|
||||
var fi os.FileInfo
|
||||
for i, infile := range infiles {
|
||||
if filepath.Dir(infile) != lastdir {
|
||||
err = errors.New("all input files must all be in same directory as output file")
|
||||
return
|
||||
}
|
||||
if fi, err = os.Stat(infile); err != nil {
|
||||
return
|
||||
}
|
||||
tv.AllFilesSize += fi.Size()
|
||||
|
||||
fset := token.NewFileSet()
|
||||
astfiles[i], err = parser.ParseFile(fset, infile, nil, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if i == 0 {
|
||||
tv.PackageName = astfiles[i].Name.Name
|
||||
if tv.PackageName == "main" {
|
||||
// codecgen cannot be run on types in the 'main' package.
|
||||
// A temporary 'main' package must be created, and should reference the fully built
|
||||
// package containing the types.
|
||||
// Also, the temporary main package will conflict with the main package which already has a main method.
|
||||
err = errors.New("codecgen cannot be run on types in the 'main' package")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// keep track of types with selfer methods
|
||||
// selferMethods := []string{"CodecEncodeSelf", "CodecDecodeSelf"}
|
||||
selferEncTyps := make(map[string]bool)
|
||||
selferDecTyps := make(map[string]bool)
|
||||
for _, f := range astfiles {
|
||||
for _, d := range f.Decls {
|
||||
// if fd, ok := d.(*ast.FuncDecl); ok && fd.Recv != nil && fd.Recv.NumFields() == 1 {
|
||||
if fd, ok := d.(*ast.FuncDecl); ok && fd.Recv != nil && len(fd.Recv.List) == 1 {
|
||||
recvType := fd.Recv.List[0].Type
|
||||
if ptr, ok := recvType.(*ast.StarExpr); ok {
|
||||
recvType = ptr.X
|
||||
}
|
||||
if id, ok := recvType.(*ast.Ident); ok {
|
||||
switch fd.Name.Name {
|
||||
case "CodecEncodeSelf":
|
||||
selferEncTyps[id.Name] = true
|
||||
case "CodecDecodeSelf":
|
||||
selferDecTyps[id.Name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now find types
|
||||
for _, f := range astfiles {
|
||||
for _, d := range f.Decls {
|
||||
if gd, ok := d.(*ast.GenDecl); ok {
|
||||
for _, dd := range gd.Specs {
|
||||
if td, ok := dd.(*ast.TypeSpec); ok {
|
||||
// if len(td.Name.Name) == 0 || td.Name.Name[0] > 'Z' || td.Name.Name[0] < 'A' {
|
||||
if len(td.Name.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// only generate for:
|
||||
// struct: StructType
|
||||
// primitives (numbers, bool, string): Ident
|
||||
// map: MapType
|
||||
// slice, array: ArrayType
|
||||
// chan: ChanType
|
||||
// do not generate:
|
||||
// FuncType, InterfaceType, StarExpr (ptr), etc
|
||||
//
|
||||
// We generate for all these types (not just structs), because they may be a field
|
||||
// in another struct which doesn't have codecgen run on it, and it will be nice
|
||||
// to take advantage of the fact that the type is a Selfer.
|
||||
switch td.Type.(type) {
|
||||
case *ast.StructType, *ast.Ident, *ast.MapType, *ast.ArrayType, *ast.ChanType:
|
||||
// only add to tv.Types iff
|
||||
// - it matches per the -r parameter
|
||||
// - it doesn't match per the -nr parameter
|
||||
// - it doesn't have any of the Selfer methods in the file
|
||||
if regexName.FindStringIndex(td.Name.Name) != nil &&
|
||||
notRegexName.FindStringIndex(td.Name.Name) == nil &&
|
||||
!selferEncTyps[td.Name.Name] &&
|
||||
!selferDecTyps[td.Name.Name] {
|
||||
tv.Types = append(tv.Types, td.Name.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(tv.Types) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// we cannot use ioutil.TempFile, because we cannot guarantee the file suffix (.go).
|
||||
// Also, we cannot create file in temp directory,
|
||||
// because go run will not work (as it needs to see the types here).
|
||||
// Consequently, create the temp file in the current directory, and remove when done.
|
||||
|
||||
// frun, err = ioutil.TempFile("", "codecgen-")
|
||||
// frunName := filepath.Join(os.TempDir(), "codecgen-"+strconv.FormatInt(time.Now().UnixNano(), 10)+".go")
|
||||
|
||||
frunMainName := filepath.Join(lastdir, "codecgen-main-"+tv.RandString+".generated.go")
|
||||
frunPkgName := filepath.Join(lastdir, "codecgen-pkg-"+tv.RandString+".generated.go")
|
||||
|
||||
// var frunMain, frunPkg *os.File
|
||||
if _, err = gen1(frunMainName, genFrunMainTmpl, &tv); err != nil {
|
||||
return
|
||||
}
|
||||
if _, err = gen1(frunPkgName, genFrunPkgTmpl, &tv); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// remove outfile, so "go run ..." will not think that types in outfile already exist.
|
||||
os.Remove(outfile)
|
||||
|
||||
// execute go run frun
|
||||
cmd := exec.Command("go", "run", "-tags", "codecgen.exec safe "+goRunTag, frunMainName) //, frunPkg.Name())
|
||||
cmd.Dir = lastdir
|
||||
var buf bytes.Buffer
|
||||
cmd.Stdout = &buf
|
||||
cmd.Stderr = &buf
|
||||
if err = cmd.Run(); err != nil {
|
||||
err = fmt.Errorf("error running 'go run %s': %v, console: %s",
|
||||
frunMainName, err, buf.Bytes())
|
||||
return
|
||||
}
|
||||
os.Stdout.Write(buf.Bytes())
|
||||
|
||||
// only delete these files if codecgen ran successfully.
|
||||
// if unsuccessful, these files are here for diagnosis.
|
||||
if deleteTempFile {
|
||||
os.Remove(frunMainName)
|
||||
os.Remove(frunPkgName)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func gen1(frunName, tmplStr string, tv interface{}) (frun *os.File, err error) {
|
||||
os.Remove(frunName)
|
||||
if frun, err = os.Create(frunName); err != nil {
|
||||
return
|
||||
}
|
||||
defer frun.Close()
|
||||
|
||||
t := template.New("")
|
||||
if t, err = t.Parse(tmplStr); err != nil {
|
||||
return
|
||||
}
|
||||
bw := bufio.NewWriter(frun)
|
||||
if err = t.Execute(bw, tv); err != nil {
|
||||
bw.Flush()
|
||||
return
|
||||
}
|
||||
if err = bw.Flush(); err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// copied from ../gen.go (keep in sync).
|
||||
func stripVendor(s string) string {
|
||||
// HACK: Misbehaviour occurs in go 1.5. May have to re-visit this later.
|
||||
// if s contains /vendor/ OR startsWith vendor/, then return everything after it.
|
||||
const vendorStart = "vendor/"
|
||||
const vendorInline = "/vendor/"
|
||||
if i := strings.LastIndex(s, vendorInline); i >= 0 {
|
||||
s = s[i+len(vendorInline):]
|
||||
} else if strings.HasPrefix(s, vendorStart) {
|
||||
s = s[len(vendorStart):]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func main() {
|
||||
o := flag.String("o", "", "out file")
|
||||
c := flag.String("c", genCodecPath, "codec path")
|
||||
t := flag.String("t", "", "build tag to put in file")
|
||||
r := flag.String("r", ".*", "regex for type name to match")
|
||||
nr := flag.String("nr", "^$", "regex for type name to exclude")
|
||||
rt := flag.String("rt", "", "tags for go run")
|
||||
st := flag.String("st", "codec,json", "struct tag keys to introspect")
|
||||
x := flag.Bool("x", false, "keep temp file")
|
||||
_ = flag.Bool("u", false, "Allow unsafe use. ***IGNORED*** - kept for backwards compatibility: ")
|
||||
d := flag.Int64("d", 0, "random identifier for use in generated code")
|
||||
nx := flag.Bool("nx", false, "do not support extensions - support of extensions may cause extra allocation")
|
||||
|
||||
flag.Parse()
|
||||
err := Generate(*o, *t, *c, *d, *rt, *st,
|
||||
regexp.MustCompile(*r), regexp.MustCompile(*nr), !*x, *nx, flag.Args()...)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "codecgen error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
5
vendor/github.com/ugorji/go/codec/codecgen/go.mod
generated
vendored
5
vendor/github.com/ugorji/go/codec/codecgen/go.mod
generated
vendored
@@ -1,5 +0,0 @@
|
||||
module github.com/ugorji/go/codec/codecgen
|
||||
|
||||
require (
|
||||
github.com/ugorji/go/codec v1.1.7
|
||||
)
|
||||
24
vendor/github.com/ugorji/go/codec/codecgen/goversion_pkgpath_gte_go111.go
generated
vendored
24
vendor/github.com/ugorji/go/codec/codecgen/goversion_pkgpath_gte_go111.go
generated
vendored
@@ -1,24 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.11
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"golang.org/x/tools/go/packages"
|
||||
)
|
||||
|
||||
func pkgPath(dir string) (string, error) {
|
||||
pkgs, err := packages.Load(&packages.Config{Dir: dir}, ".")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(pkgs) != 1 {
|
||||
return "", fmt.Errorf("Could not read package (%d package found)", len(pkgs))
|
||||
}
|
||||
pkg := pkgs[0]
|
||||
return pkg.PkgPath, nil
|
||||
}
|
||||
18
vendor/github.com/ugorji/go/codec/codecgen/goversion_pkgpath_lt_go111.go
generated
vendored
18
vendor/github.com/ugorji/go/codec/codecgen/goversion_pkgpath_lt_go111.go
generated
vendored
@@ -1,18 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.11
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/build"
|
||||
)
|
||||
|
||||
func pkgPath(dir string) (string, error) {
|
||||
pkg, err := build.Default.ImportDir(dir, build.AllowBinary)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return pkg.ImportPath, nil
|
||||
}
|
||||
6
vendor/github.com/ugorji/go/codec/codecgen/z.go
generated
vendored
6
vendor/github.com/ugorji/go/codec/codecgen/z.go
generated
vendored
@@ -1,6 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
const genCodecPath = "github.com/ugorji/go/codec"
|
||||
2039
vendor/github.com/ugorji/go/codec/decode.go
generated
vendored
2039
vendor/github.com/ugorji/go/codec/decode.go
generated
vendored
File diff suppressed because it is too large
Load Diff
226
vendor/github.com/ugorji/go/codec/doc.go
generated
vendored
226
vendor/github.com/ugorji/go/codec/doc.go
generated
vendored
@@ -1,226 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package codec provides a
|
||||
High Performance, Feature-Rich Idiomatic Go 1.4+ codec/encoding library
|
||||
for binc, msgpack, cbor, json.
|
||||
|
||||
Supported Serialization formats are:
|
||||
|
||||
- msgpack: https://github.com/msgpack/msgpack
|
||||
- binc: http://github.com/ugorji/binc
|
||||
- cbor: http://cbor.io http://tools.ietf.org/html/rfc7049
|
||||
- json: http://json.org http://tools.ietf.org/html/rfc7159
|
||||
- simple:
|
||||
|
||||
This package will carefully use 'package unsafe' for performance reasons in specific places.
|
||||
You can build without unsafe use by passing the safe or appengine tag
|
||||
i.e. 'go install -tags=safe ...'.
|
||||
|
||||
For detailed usage information, read the primer at http://ugorji.net/blog/go-codec-primer .
|
||||
|
||||
The idiomatic Go support is as seen in other encoding packages in
|
||||
the standard library (ie json, xml, gob, etc).
|
||||
|
||||
Rich Feature Set includes:
|
||||
|
||||
- Simple but extremely powerful and feature-rich API
|
||||
- Support for go 1.4 and above, while selectively using newer APIs for later releases
|
||||
- Excellent code coverage ( > 90% )
|
||||
- Very High Performance.
|
||||
Our extensive benchmarks show us outperforming Gob, Json, Bson, etc by 2-4X.
|
||||
- Careful selected use of 'unsafe' for targeted performance gains.
|
||||
- 100% safe mode supported, where 'unsafe' is not used at all.
|
||||
- Lock-free (sans mutex) concurrency for scaling to 100's of cores
|
||||
- In-place updates during decode, with option to zero value in maps and slices prior to decode
|
||||
- Coerce types where appropriate
|
||||
e.g. decode an int in the stream into a float, decode numbers from formatted strings, etc
|
||||
- Corner Cases:
|
||||
Overflows, nil maps/slices, nil values in streams are handled correctly
|
||||
- Standard field renaming via tags
|
||||
- Support for omitting empty fields during an encoding
|
||||
- Encoding from any value and decoding into pointer to any value
|
||||
(struct, slice, map, primitives, pointers, interface{}, etc)
|
||||
- Extensions to support efficient encoding/decoding of any named types
|
||||
- Support encoding.(Binary|Text)(M|Unm)arshaler interfaces
|
||||
- Support IsZero() bool to determine if a value is a zero value.
|
||||
Analogous to time.Time.IsZero() bool.
|
||||
- Decoding without a schema (into a interface{}).
|
||||
Includes Options to configure what specific map or slice type to use
|
||||
when decoding an encoded list or map into a nil interface{}
|
||||
- Mapping a non-interface type to an interface, so we can decode appropriately
|
||||
into any interface type with a correctly configured non-interface value.
|
||||
- Encode a struct as an array, and decode struct from an array in the data stream
|
||||
- Option to encode struct keys as numbers (instead of strings)
|
||||
(to support structured streams with fields encoded as numeric codes)
|
||||
- Comprehensive support for anonymous fields
|
||||
- Fast (no-reflection) encoding/decoding of common maps and slices
|
||||
- Code-generation for faster performance, supported in go 1.6+
|
||||
- Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
|
||||
- Support indefinite-length formats to enable true streaming
|
||||
(for formats which support it e.g. json, cbor)
|
||||
- Support canonical encoding, where a value is ALWAYS encoded as same sequence of bytes.
|
||||
This mostly applies to maps, where iteration order is non-deterministic.
|
||||
- NIL in data stream decoded as zero value
|
||||
- Never silently skip data when decoding.
|
||||
User decides whether to return an error or silently skip data when keys or indexes
|
||||
in the data stream do not map to fields in the struct.
|
||||
- Detect and error when encoding a cyclic reference (instead of stack overflow shutdown)
|
||||
- Encode/Decode from/to chan types (for iterative streaming support)
|
||||
- Drop-in replacement for encoding/json. `json:` key in struct tag supported.
|
||||
- Provides a RPC Server and Client Codec for net/rpc communication protocol.
|
||||
- Handle unique idiosyncrasies of codecs e.g.
|
||||
- For messagepack, configure how ambiguities in handling raw bytes are resolved
|
||||
- For messagepack, provide rpc server/client codec to support
|
||||
msgpack-rpc protocol defined at:
|
||||
https://github.com/msgpack-rpc/msgpack-rpc/blob/master/spec.md
|
||||
|
||||
Extension Support
|
||||
|
||||
Users can register a function to handle the encoding or decoding of
|
||||
their custom types.
|
||||
|
||||
There are no restrictions on what the custom type can be. Some examples:
|
||||
|
||||
type BisSet []int
|
||||
type BitSet64 uint64
|
||||
type UUID string
|
||||
type MyStructWithUnexportedFields struct { a int; b bool; c []int; }
|
||||
type GifImage struct { ... }
|
||||
|
||||
As an illustration, MyStructWithUnexportedFields would normally be
|
||||
encoded as an empty map because it has no exported fields, while UUID
|
||||
would be encoded as a string. However, with extension support, you can
|
||||
encode any of these however you like.
|
||||
|
||||
There is also seamless support provided for registering an extension (with a tag)
|
||||
but letting the encoding mechanism default to the standard way.
|
||||
|
||||
Custom Encoding and Decoding
|
||||
|
||||
This package maintains symmetry in the encoding and decoding halfs.
|
||||
We determine how to encode or decode by walking this decision tree
|
||||
|
||||
- is there an extension registered for the type?
|
||||
- is type a codec.Selfer?
|
||||
- is format binary, and is type a encoding.BinaryMarshaler and BinaryUnmarshaler?
|
||||
- is format specifically json, and is type a encoding/json.Marshaler and Unmarshaler?
|
||||
- is format text-based, and type an encoding.TextMarshaler and TextUnmarshaler?
|
||||
- else we use a pair of functions based on the "kind" of the type e.g. map, slice, int64, etc
|
||||
|
||||
This symmetry is important to reduce chances of issues happening because the
|
||||
encoding and decoding sides are out of sync e.g. decoded via very specific
|
||||
encoding.TextUnmarshaler but encoded via kind-specific generalized mode.
|
||||
|
||||
Consequently, if a type only defines one-half of the symmetry
|
||||
(e.g. it implements UnmarshalJSON() but not MarshalJSON() ),
|
||||
then that type doesn't satisfy the check and we will continue walking down the
|
||||
decision tree.
|
||||
|
||||
RPC
|
||||
|
||||
RPC Client and Server Codecs are implemented, so the codecs can be used
|
||||
with the standard net/rpc package.
|
||||
|
||||
Usage
|
||||
|
||||
The Handle is SAFE for concurrent READ, but NOT SAFE for concurrent modification.
|
||||
|
||||
The Encoder and Decoder are NOT safe for concurrent use.
|
||||
|
||||
Consequently, the usage model is basically:
|
||||
|
||||
- Create and initialize the Handle before any use.
|
||||
Once created, DO NOT modify it.
|
||||
- Multiple Encoders or Decoders can now use the Handle concurrently.
|
||||
They only read information off the Handle (never write).
|
||||
- However, each Encoder or Decoder MUST not be used concurrently
|
||||
- To re-use an Encoder/Decoder, call Reset(...) on it first.
|
||||
This allows you use state maintained on the Encoder/Decoder.
|
||||
|
||||
Sample usage model:
|
||||
|
||||
// create and configure Handle
|
||||
var (
|
||||
bh codec.BincHandle
|
||||
mh codec.MsgpackHandle
|
||||
ch codec.CborHandle
|
||||
)
|
||||
|
||||
mh.MapType = reflect.TypeOf(map[string]interface{}(nil))
|
||||
|
||||
// configure extensions
|
||||
// e.g. for msgpack, define functions and enable Time support for tag 1
|
||||
// mh.SetExt(reflect.TypeOf(time.Time{}), 1, myExt)
|
||||
|
||||
// create and use decoder/encoder
|
||||
var (
|
||||
r io.Reader
|
||||
w io.Writer
|
||||
b []byte
|
||||
h = &bh // or mh to use msgpack
|
||||
)
|
||||
|
||||
dec = codec.NewDecoder(r, h)
|
||||
dec = codec.NewDecoderBytes(b, h)
|
||||
err = dec.Decode(&v)
|
||||
|
||||
enc = codec.NewEncoder(w, h)
|
||||
enc = codec.NewEncoderBytes(&b, h)
|
||||
err = enc.Encode(v)
|
||||
|
||||
//RPC Server
|
||||
go func() {
|
||||
for {
|
||||
conn, err := listener.Accept()
|
||||
rpcCodec := codec.GoRpc.ServerCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ServerCodec(conn, h)
|
||||
rpc.ServeCodec(rpcCodec)
|
||||
}
|
||||
}()
|
||||
|
||||
//RPC Communication (client side)
|
||||
conn, err = net.Dial("tcp", "localhost:5555")
|
||||
rpcCodec := codec.GoRpc.ClientCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ClientCodec(conn, h)
|
||||
client := rpc.NewClientWithCodec(rpcCodec)
|
||||
|
||||
Running Tests
|
||||
|
||||
To run tests, use the following:
|
||||
|
||||
go test
|
||||
|
||||
To run the full suite of tests, use the following:
|
||||
|
||||
go test -tags alltests -run Suite
|
||||
|
||||
You can run the tag 'safe' to run tests or build in safe mode. e.g.
|
||||
|
||||
go test -tags safe -run Json
|
||||
go test -tags "alltests safe" -run Suite
|
||||
|
||||
Running Benchmarks
|
||||
|
||||
cd bench
|
||||
go test -bench . -benchmem -benchtime 1s
|
||||
|
||||
Please see http://github.com/ugorji/go-codec-bench .
|
||||
|
||||
Caveats
|
||||
|
||||
Struct fields matching the following are ignored during encoding and decoding
|
||||
- struct tag value set to -
|
||||
- func, complex numbers, unsafe pointers
|
||||
- unexported and not embedded
|
||||
- unexported and embedded and not struct kind
|
||||
- unexported and embedded pointers (from go1.10)
|
||||
|
||||
Every other field in a struct will be encoded/decoded.
|
||||
|
||||
Embedded fields are encoded as if they exist in the top-level struct,
|
||||
with some caveats. See Encode documentation.
|
||||
|
||||
*/
|
||||
package codec
|
||||
1376
vendor/github.com/ugorji/go/codec/encode.go
generated
vendored
1376
vendor/github.com/ugorji/go/codec/encode.go
generated
vendored
File diff suppressed because it is too large
Load Diff
8938
vendor/github.com/ugorji/go/codec/fast-path.generated.go
generated
vendored
8938
vendor/github.com/ugorji/go/codec/fast-path.generated.go
generated
vendored
File diff suppressed because it is too large
Load Diff
491
vendor/github.com/ugorji/go/codec/fast-path.go.tmpl
generated
vendored
491
vendor/github.com/ugorji/go/codec/fast-path.go.tmpl
generated
vendored
@@ -1,491 +0,0 @@
|
||||
// +build !notfastpath
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from fast-path.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
// Fast path functions try to create a fast path encode or decode implementation
|
||||
// for common maps and slices.
|
||||
//
|
||||
// We define the functions and register them in this single file
|
||||
// so as not to pollute the encode.go and decode.go, and create a dependency in there.
|
||||
// This file can be omitted without causing a build failure.
|
||||
//
|
||||
// The advantage of fast paths is:
|
||||
// - Many calls bypass reflection altogether
|
||||
//
|
||||
// Currently support
|
||||
// - slice of all builtin types (numeric, bool, string, []byte)
|
||||
// - maps of builtin types to builtin or interface{} type, EXCEPT FOR
|
||||
// keys of type uintptr, int8/16/32, uint16/32, float32/64, bool, interface{}
|
||||
// AND values of type type int8/16/32, uint16/32
|
||||
// This should provide adequate "typical" implementations.
|
||||
//
|
||||
// Note that fast track decode functions must handle values for which an address cannot be obtained.
|
||||
// For example:
|
||||
// m2 := map[string]int{}
|
||||
// p2 := []interface{}{m2}
|
||||
// // decoding into p2 will bomb if fast track functions do not treat like unaddressable.
|
||||
//
|
||||
|
||||
{{/*
|
||||
fastpathEncMapStringUint64R (called by fastpath...switch)
|
||||
EncMapStringUint64V (called by codecgen)
|
||||
|
||||
fastpathEncSliceBoolR: (called by fastpath...switch) (checks f.ti.mbs and calls one of them below)
|
||||
EncSliceBoolV (also called by codecgen)
|
||||
EncAsMapSliceBoolV (delegate when mapbyslice=true)
|
||||
|
||||
fastpathDecSliceIntfR (called by fastpath...switch) (calls Y or N below depending on if it can be updated)
|
||||
DecSliceIntfX (called by codecgen) (calls Y below)
|
||||
DecSliceIntfY (delegate when slice CAN be updated)
|
||||
DecSliceIntfN (delegate when slice CANNOT be updated e.g. from array or non-addressable slice)
|
||||
|
||||
fastpathDecMap...R (called by fastpath...switch) (calls L or X? below)
|
||||
DecMap...X (called by codecgen)
|
||||
DecMap...L (delegated to by both above)
|
||||
*/ -}}
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
const fastpathEnabled = true
|
||||
|
||||
const fastpathMapBySliceErrMsg = "mapBySlice requires even slice length, but got %v"
|
||||
|
||||
type fastpathT struct {}
|
||||
|
||||
var fastpathTV fastpathT
|
||||
|
||||
type fastpathE struct {
|
||||
rtid uintptr
|
||||
rt reflect.Type
|
||||
encfn func(*Encoder, *codecFnInfo, reflect.Value)
|
||||
decfn func(*Decoder, *codecFnInfo, reflect.Value)
|
||||
}
|
||||
|
||||
type fastpathA [{{ .FastpathLen }}]fastpathE
|
||||
|
||||
func (x *fastpathA) index(rtid uintptr) int {
|
||||
// use binary search to grab the index (adapted from sort/search.go)
|
||||
// Note: we use goto (instead of for loop) so this can be inlined.
|
||||
// h, i, j := 0, 0, len(x)
|
||||
var h, i uint
|
||||
var j = uint(len(x))
|
||||
LOOP:
|
||||
if i < j {
|
||||
h = i + (j-i)/2
|
||||
if x[h].rtid < rtid {
|
||||
i = h + 1
|
||||
} else {
|
||||
j = h
|
||||
}
|
||||
goto LOOP
|
||||
}
|
||||
if i < uint(len(x)) && x[i].rtid == rtid {
|
||||
return int(i)
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
type fastpathAslice []fastpathE
|
||||
|
||||
func (x fastpathAslice) Len() int { return len(x) }
|
||||
func (x fastpathAslice) Less(i, j int) bool { return x[uint(i)].rtid < x[uint(j)].rtid }
|
||||
func (x fastpathAslice) Swap(i, j int) { x[uint(i)], x[uint(j)] = x[uint(j)], x[uint(i)] }
|
||||
|
||||
var fastpathAV fastpathA
|
||||
|
||||
// due to possible initialization loop error, make fastpath in an init()
|
||||
func init() {
|
||||
var i uint = 0
|
||||
fn := func(v interface{},
|
||||
fe func(*Encoder, *codecFnInfo, reflect.Value),
|
||||
fd func(*Decoder, *codecFnInfo, reflect.Value)) {
|
||||
xrt := reflect.TypeOf(v)
|
||||
xptr := rt2id(xrt)
|
||||
fastpathAV[i] = fastpathE{xptr, xrt, fe, fd}
|
||||
i++
|
||||
}
|
||||
{{/* do not register []uint8 in fast-path */}}
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8" -}}
|
||||
fn([]{{ .Elem }}(nil), (*Encoder).{{ .MethodNamePfx "fastpathEnc" false }}R, (*Decoder).{{ .MethodNamePfx "fastpathDec" false }}R)
|
||||
{{end}}{{end}}{{end}}{{end}}
|
||||
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey -}}
|
||||
fn(map[{{ .MapKey }}]{{ .Elem }}(nil), (*Encoder).{{ .MethodNamePfx "fastpathEnc" false }}R, (*Decoder).{{ .MethodNamePfx "fastpathDec" false }}R)
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
sort.Sort(fastpathAslice(fastpathAV[:]))
|
||||
}
|
||||
|
||||
// -- encode
|
||||
|
||||
// -- -- fast path type switch
|
||||
func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool {
|
||||
switch v := iv.(type) {
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8" -}}
|
||||
case []{{ .Elem }}:
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||
case *[]{{ .Elem }}:
|
||||
if *v == nil {
|
||||
e.e.EncodeNil()
|
||||
} else {
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e)
|
||||
}
|
||||
{{end}}{{end}}{{end}}{{end -}}
|
||||
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey -}}
|
||||
case map[{{ .MapKey }}]{{ .Elem }}:
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||
if *v == nil {
|
||||
e.e.EncodeNil()
|
||||
} else {
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e)
|
||||
}
|
||||
{{end}}{{end}}{{end -}}
|
||||
|
||||
default:
|
||||
_ = v // workaround https://github.com/golang/go/issues/12927 seen in go1.4
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// -- -- fast path functions
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey -}}
|
||||
func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||
if f.ti.mbs {
|
||||
fastpathTV.{{ .MethodNamePfx "EncAsMap" false }}V(rv2i(rv).([]{{ .Elem }}), e)
|
||||
} else {
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(rv2i(rv).([]{{ .Elem }}), e)
|
||||
}
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v []{{ .Elem }}, e *Encoder) {
|
||||
{{/* if v == nil { e.e.EncodeNil(); return } */ -}}
|
||||
e.arrayStart(len(v))
|
||||
for j := range v {
|
||||
e.arrayElem()
|
||||
{{ encmd .Elem "v[j]"}}
|
||||
}
|
||||
e.arrayEnd()
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "EncAsMap" false }}V(v []{{ .Elem }}, e *Encoder) {
|
||||
{{/* if v == nil { e.e.EncodeNil() } else */ -}}
|
||||
if len(v)%2 == 1 {
|
||||
e.errorf(fastpathMapBySliceErrMsg, len(v))
|
||||
} else {
|
||||
e.mapStart(len(v) / 2)
|
||||
for j := range v {
|
||||
if j%2 == 0 {
|
||||
e.mapElemKey()
|
||||
} else {
|
||||
e.mapElemValue()
|
||||
}
|
||||
{{ encmd .Elem "v[j]"}}
|
||||
}
|
||||
e.mapEnd()
|
||||
}
|
||||
}
|
||||
{{end}}{{end}}{{end -}}
|
||||
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey -}}
|
||||
func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(rv2i(rv).(map[{{ .MapKey }}]{{ .Elem }}), e)
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "Enc" false }}V(v map[{{ .MapKey }}]{{ .Elem }}, e *Encoder) {
|
||||
{{/* if v == nil { e.e.EncodeNil(); return } */ -}}
|
||||
e.mapStart(len(v))
|
||||
if e.h.Canonical { {{/* need to figure out .NoCanonical */}}
|
||||
{{if eq .MapKey "interface{}"}}{{/* out of band */ -}}
|
||||
var mksv []byte = make([]byte, 0, len(v)*16) // temporary byte slice for the encoding
|
||||
e2 := NewEncoderBytes(&mksv, e.hh)
|
||||
v2 := make([]bytesIntf, len(v))
|
||||
var i, l uint {{/* put loop variables outside. seems currently needed for better perf */}}
|
||||
var vp *bytesIntf
|
||||
for k2 := range v {
|
||||
l = uint(len(mksv))
|
||||
e2.MustEncode(k2)
|
||||
vp = &v2[i]
|
||||
vp.v = mksv[l:]
|
||||
vp.i = k2
|
||||
i++
|
||||
}
|
||||
sort.Sort(bytesIntfSlice(v2))
|
||||
for j := range v2 {
|
||||
e.mapElemKey()
|
||||
e.asis(v2[j].v)
|
||||
e.mapElemValue()
|
||||
e.encode(v[v2[j].i])
|
||||
} {{else}}{{ $x := sorttype .MapKey true}}v2 := make([]{{ $x }}, len(v))
|
||||
var i uint
|
||||
for k := range v {
|
||||
v2[i] = {{if eq $x .MapKey}}k{{else}}{{ $x }}(k){{end}}
|
||||
i++
|
||||
}
|
||||
sort.Sort({{ sorttype .MapKey false}}(v2))
|
||||
for _, k2 := range v2 {
|
||||
e.mapElemKey()
|
||||
{{if eq .MapKey "string"}} e.e.EncodeString(k2) {{else}}{{ $y := printf "%s(k2)" .MapKey }}{{if eq $x .MapKey }}{{ $y = "k2" }}{{end}}{{ encmd .MapKey $y }}{{end}}
|
||||
e.mapElemValue()
|
||||
{{ $y := printf "v[%s(k2)]" .MapKey }}{{if eq $x .MapKey }}{{ $y = "v[k2]" }}{{end}}{{ encmd .Elem $y }}
|
||||
} {{end}}
|
||||
} else {
|
||||
for k2, v2 := range v {
|
||||
e.mapElemKey()
|
||||
{{if eq .MapKey "string"}} e.e.EncodeString(k2) {{else}}{{ encmd .MapKey "k2"}}{{end}}
|
||||
e.mapElemValue()
|
||||
{{ encmd .Elem "v2"}}
|
||||
}
|
||||
}
|
||||
e.mapEnd()
|
||||
}
|
||||
{{end}}{{end}}{{end -}}
|
||||
|
||||
// -- decode
|
||||
|
||||
// -- -- fast path type switch
|
||||
func fastpathDecodeTypeSwitch(iv interface{}, d *Decoder) bool {
|
||||
var changed bool
|
||||
var containerLen int
|
||||
switch v := iv.(type) {
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8" -}}
|
||||
case []{{ .Elem }}:
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}N(v, d)
|
||||
case *[]{{ .Elem }}:
|
||||
var v2 []{{ .Elem }}
|
||||
if v2, changed = fastpathTV.{{ .MethodNamePfx "Dec" false }}Y(*v, d); changed {
|
||||
*v = v2
|
||||
}
|
||||
{{end}}{{end}}{{end}}{{end -}}
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}{{/*
|
||||
// maps only change if nil, and in that case, there's no point copying
|
||||
*/ -}}
|
||||
case map[{{ .MapKey }}]{{ .Elem }}:
|
||||
containerLen = d.mapStart()
|
||||
if containerLen != decContainerLenNil {
|
||||
if containerLen != 0 {
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}L(v, containerLen, d)
|
||||
}
|
||||
d.mapEnd()
|
||||
}
|
||||
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||
{{/*
|
||||
containerLen = d.mapStart()
|
||||
if containerLen == 0 {
|
||||
d.mapEnd()
|
||||
} else if containerLen == decContainerLenNil {
|
||||
*v = nil
|
||||
} else {
|
||||
if *v == nil {
|
||||
*v = make(map[{{ .MapKey }}]{{ .Elem }}, decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }}))
|
||||
}
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}L(*v, containerLen, d)
|
||||
}
|
||||
// consider delegating fully to X - encoding *map is uncommon, so ok to pay small function call cost
|
||||
*/ -}}
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}X(v, d)
|
||||
{{end}}{{end}}{{end -}}
|
||||
default:
|
||||
_ = v // workaround https://github.com/golang/go/issues/12927 seen in go1.4
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func fastpathDecodeSetZeroTypeSwitch(iv interface{}) bool {
|
||||
switch v := iv.(type) {
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey -}}
|
||||
case *[]{{ .Elem }}:
|
||||
*v = nil
|
||||
{{end}}{{end}}{{end}}
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey -}}
|
||||
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||
*v = nil
|
||||
{{end}}{{end}}{{end}}
|
||||
default:
|
||||
_ = v // workaround https://github.com/golang/go/issues/12927 seen in go1.4
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// -- -- fast path functions
|
||||
{{range .Values}}{{if not .Primitive}}{{if not .MapKey -}}
|
||||
{{/*
|
||||
Slices can change if they
|
||||
- did not come from an array
|
||||
- are addressable (from a ptr)
|
||||
- are settable (e.g. contained in an interface{})
|
||||
*/}}
|
||||
func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||
if f.seq != seqTypeArray && rv.Kind() == reflect.Ptr {
|
||||
vp := rv2i(rv).(*[]{{ .Elem }})
|
||||
if v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}Y(*vp, d); changed { *vp = v }
|
||||
} else {
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}N(rv2i(rv).([]{{ .Elem }}), d)
|
||||
}
|
||||
}
|
||||
func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *[]{{ .Elem }}, d *Decoder) {
|
||||
if v, changed := f.{{ .MethodNamePfx "Dec" false }}Y(*vp, d); changed { *vp = v }
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "Dec" false }}Y(v []{{ .Elem }}, d *Decoder) (_ []{{ .Elem }}, changed bool) {
|
||||
slh, containerLenS := d.decSliceHelperStart()
|
||||
if slh.IsNil {
|
||||
if v == nil { return }
|
||||
return nil, true
|
||||
}
|
||||
if containerLenS == 0 {
|
||||
if v == nil { v = []{{ .Elem }}{} } else if len(v) != 0 { v = v[:0] }
|
||||
slh.End()
|
||||
return v, true
|
||||
}
|
||||
hasLen := containerLenS > 0
|
||||
var xlen int
|
||||
if hasLen {
|
||||
if containerLenS > cap(v) {
|
||||
xlen = decInferLen(containerLenS, d.h.MaxInitLen, {{ .Size }})
|
||||
if xlen <= cap(v) {
|
||||
v = v[:uint(xlen)]
|
||||
} else {
|
||||
v = make([]{{ .Elem }}, uint(xlen))
|
||||
}
|
||||
changed = true
|
||||
} else if containerLenS != len(v) {
|
||||
v = v[:containerLenS]
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
var j int
|
||||
for j = 0; (hasLen && j < containerLenS) || !(hasLen || d.checkBreak()); j++ {
|
||||
if j == 0 && len(v) == 0 {
|
||||
if hasLen {
|
||||
xlen = decInferLen(containerLenS, d.h.MaxInitLen, {{ .Size }})
|
||||
} else {
|
||||
xlen = 8
|
||||
}
|
||||
v = make([]{{ .Elem }}, uint(xlen))
|
||||
changed = true
|
||||
}
|
||||
{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
|
||||
if j >= len(v) {
|
||||
v = append(v, {{ zerocmd .Elem }})
|
||||
changed = true
|
||||
}
|
||||
slh.ElemContainerState(j)
|
||||
{{ if eq .Elem "interface{}" }}d.decode(&v[uint(j)]){{ else }}v[uint(j)] = {{ decmd .Elem }}{{ end }}
|
||||
}
|
||||
if j < len(v) {
|
||||
v = v[:uint(j)]
|
||||
changed = true
|
||||
} else if j == 0 && v == nil {
|
||||
v = []{{ .Elem }}{}
|
||||
changed = true
|
||||
}
|
||||
slh.End()
|
||||
return v, changed
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "Dec" false }}N(v []{{ .Elem }}, d *Decoder) {
|
||||
slh, containerLenS := d.decSliceHelperStart()
|
||||
if slh.IsNil {
|
||||
return
|
||||
}
|
||||
if containerLenS == 0 {
|
||||
slh.End()
|
||||
return
|
||||
}
|
||||
hasLen := containerLenS > 0
|
||||
for j := 0; (hasLen && j < containerLenS) || !(hasLen || d.checkBreak()); j++ {
|
||||
{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
|
||||
if j >= len(v) {
|
||||
decArrayCannotExpand(slh, hasLen, len(v), j, containerLenS)
|
||||
return
|
||||
}
|
||||
slh.ElemContainerState(j)
|
||||
{{ if eq .Elem "interface{}" -}}
|
||||
d.decode(&v[uint(j)])
|
||||
{{- else -}}
|
||||
v[uint(j)] = {{ decmd .Elem }}
|
||||
{{- end }}
|
||||
}
|
||||
slh.End()
|
||||
}
|
||||
{{end}}{{end}}{{end -}}
|
||||
|
||||
{{range .Values}}{{if not .Primitive}}{{if .MapKey -}}
|
||||
{{/*
|
||||
Maps can change if they are
|
||||
- addressable (from a ptr)
|
||||
- settable (e.g. contained in an interface{})
|
||||
*/ -}}
|
||||
func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||
containerLen := d.mapStart()
|
||||
if containerLen == decContainerLenNil {
|
||||
if rv.Kind() == reflect.Ptr {
|
||||
*(rv2i(rv).(*map[{{ .MapKey }}]{{ .Elem }})) = nil
|
||||
}
|
||||
} else {
|
||||
if rv.Kind() == reflect.Ptr {
|
||||
vp, _ := rv2i(rv).(*map[{{ .MapKey }}]{{ .Elem }})
|
||||
if *vp == nil {
|
||||
*vp = make(map[{{ .MapKey }}]{{ .Elem }}, decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }}))
|
||||
}
|
||||
if containerLen != 0 {
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}L(*vp, containerLen, d)
|
||||
}
|
||||
} else if containerLen != 0 {
|
||||
fastpathTV.{{ .MethodNamePfx "Dec" false }}L(rv2i(rv).(map[{{ .MapKey }}]{{ .Elem }}), containerLen, d)
|
||||
}
|
||||
d.mapEnd()
|
||||
}
|
||||
}
|
||||
func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *map[{{ .MapKey }}]{{ .Elem }}, d *Decoder) {
|
||||
containerLen := d.mapStart()
|
||||
if containerLen == decContainerLenNil {
|
||||
*vp = nil
|
||||
} else {
|
||||
if *vp == nil {
|
||||
*vp = make(map[{{ .MapKey }}]{{ .Elem }}, decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }}))
|
||||
}
|
||||
if containerLen != 0 {
|
||||
f.{{ .MethodNamePfx "Dec" false }}L(*vp, containerLen, d)
|
||||
}
|
||||
d.mapEnd()
|
||||
}
|
||||
}
|
||||
func (fastpathT) {{ .MethodNamePfx "Dec" false }}L(v map[{{ .MapKey }}]{{ .Elem }}, containerLen int, d *Decoder) {
|
||||
{{/* No need to check if containerLen == decContainerLenNil, as that is checked by R and L above */ -}}
|
||||
{{if eq .Elem "interface{}" }}mapGet := v != nil && !d.h.MapValueReset && !d.h.InterfaceReset
|
||||
{{else if eq .Elem "bytes" "[]byte" }}mapGet := v != nil && !d.h.MapValueReset
|
||||
{{end -}}
|
||||
var mk {{ .MapKey }}
|
||||
var mv {{ .Elem }}
|
||||
hasLen := containerLen > 0
|
||||
for j := 0; (hasLen && j < containerLen) || !(hasLen || d.checkBreak()); j++ {
|
||||
d.mapElemKey()
|
||||
{{ if eq .MapKey "interface{}" }}mk = nil
|
||||
d.decode(&mk)
|
||||
if bv, bok := mk.([]byte); bok {
|
||||
mk = d.string(bv) {{/* // maps cannot have []byte as key. switch to string. */}}
|
||||
}{{ else }}mk = {{ decmd .MapKey }}{{ end }}
|
||||
d.mapElemValue()
|
||||
{{ if eq .Elem "interface{}" "[]byte" "bytes" -}}
|
||||
if mapGet { mv = v[mk] } else { mv = nil }
|
||||
{{ end -}}
|
||||
{{ if eq .Elem "interface{}" -}}
|
||||
d.decode(&mv)
|
||||
{{ else if eq .Elem "[]byte" "bytes" -}}
|
||||
mv = d.d.DecodeBytes(mv, false)
|
||||
{{ else -}}
|
||||
mv = {{ decmd .Elem }}
|
||||
{{ end -}}
|
||||
if v != nil { v[mk] = mv }
|
||||
}
|
||||
}
|
||||
{{end}}{{end}}{{end}}
|
||||
41
vendor/github.com/ugorji/go/codec/fast-path.not.go
generated
vendored
41
vendor/github.com/ugorji/go/codec/fast-path.not.go
generated
vendored
@@ -1,41 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build notfastpath
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
const fastpathEnabled = false
|
||||
|
||||
// The generated fast-path code is very large, and adds a few seconds to the build time.
|
||||
// This causes test execution, execution of small tools which use codec, etc
|
||||
// to take a long time.
|
||||
//
|
||||
// To mitigate, we now support the notfastpath tag.
|
||||
// This tag disables fastpath during build, allowing for faster build, test execution,
|
||||
// short-program runs, etc.
|
||||
|
||||
func fastpathDecodeTypeSwitch(iv interface{}, d *Decoder) bool { return false }
|
||||
func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool { return false }
|
||||
func fastpathEncodeTypeSwitchSlice(iv interface{}, e *Encoder) bool { return false }
|
||||
func fastpathEncodeTypeSwitchMap(iv interface{}, e *Encoder) bool { return false }
|
||||
func fastpathDecodeSetZeroTypeSwitch(iv interface{}) bool { return false }
|
||||
|
||||
type fastpathT struct{}
|
||||
type fastpathE struct {
|
||||
rtid uintptr
|
||||
rt reflect.Type
|
||||
encfn func(*Encoder, *codecFnInfo, reflect.Value)
|
||||
decfn func(*Decoder, *codecFnInfo, reflect.Value)
|
||||
}
|
||||
type fastpathA [0]fastpathE
|
||||
|
||||
func (x fastpathA) index(rtid uintptr) int { return -1 }
|
||||
|
||||
var fastpathAV fastpathA
|
||||
var fastpathTV fastpathT
|
||||
|
||||
// ----
|
||||
type TestMammoth2Wrapper struct{} // to allow testMammoth work in notfastpath mode
|
||||
810
vendor/github.com/ugorji/go/codec/float.go
generated
vendored
810
vendor/github.com/ugorji/go/codec/float.go
generated
vendored
@@ -1,810 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// func parseFloat(b []byte, bitsize int) (f float64, err error) {
|
||||
// if bitsize == 32 {
|
||||
// return parseFloat32(b)
|
||||
// } else {
|
||||
// return parseFloat64(b)
|
||||
// }
|
||||
// }
|
||||
|
||||
func parseFloat32(b []byte) (f float32, err error) {
|
||||
return parseFloat32_custom(b)
|
||||
// return parseFloat32_strconv(b)
|
||||
}
|
||||
|
||||
func parseFloat64(b []byte) (f float64, err error) {
|
||||
return parseFloat64_custom(b)
|
||||
// return parseFloat64_strconv(b)
|
||||
}
|
||||
|
||||
func parseFloat32_strconv(b []byte) (f float32, err error) {
|
||||
f64, err := strconv.ParseFloat(stringView(b), 32)
|
||||
f = float32(f64)
|
||||
return
|
||||
}
|
||||
|
||||
func parseFloat64_strconv(b []byte) (f float64, err error) {
|
||||
return strconv.ParseFloat(stringView(b), 64)
|
||||
}
|
||||
|
||||
// ------ parseFloat custom below --------
|
||||
|
||||
// We assume that a lot of floating point numbers in json files will be
|
||||
// those that are handwritten, and with defined precision (in terms of number
|
||||
// of digits after decimal point), etc.
|
||||
//
|
||||
// We further assume that this ones can be written in exact format.
|
||||
//
|
||||
// strconv.ParseFloat has some unnecessary overhead which we can do without
|
||||
// for the common case:
|
||||
//
|
||||
// - expensive char-by-char check to see if underscores are in right place
|
||||
// - testing for and skipping underscores
|
||||
// - check if the string matches ignorecase +/- inf, +/- infinity, nan
|
||||
// - support for base 16 (0xFFFF...)
|
||||
//
|
||||
// The functions below will try a fast-path for floats which can be decoded
|
||||
// without any loss of precision, meaning they:
|
||||
//
|
||||
// - fits within the significand bits of the 32-bits or 64-bits
|
||||
// - exponent fits within the exponent value
|
||||
// - there is no truncation (any extra numbers are all trailing zeros)
|
||||
//
|
||||
// To figure out what the values are for maxMantDigits, use this idea below:
|
||||
//
|
||||
// 2^23 = 838 8608 (between 10^ 6 and 10^ 7) (significand bits of uint32)
|
||||
// 2^32 = 42 9496 7296 (between 10^ 9 and 10^10) (full uint32)
|
||||
// 2^52 = 4503 5996 2737 0496 (between 10^15 and 10^16) (significand bits of uint64)
|
||||
// 2^64 = 1844 6744 0737 0955 1616 (between 10^19 and 10^20) (full uint64)
|
||||
//
|
||||
// Note: we only allow for up to what can comfortably fit into the significand
|
||||
// ignoring the exponent, and we only try to parse iff significand fits.
|
||||
|
||||
const (
|
||||
thousand = 1000
|
||||
million = thousand * thousand
|
||||
billion = thousand * million
|
||||
trillion = thousand * billion
|
||||
quadrillion = thousand * trillion
|
||||
quintillion = thousand * quadrillion
|
||||
)
|
||||
|
||||
// Exact powers of 10.
|
||||
var uint64pow10 = [...]uint64{
|
||||
1, 10, 100,
|
||||
1 * thousand, 10 * thousand, 100 * thousand,
|
||||
1 * million, 10 * million, 100 * million,
|
||||
1 * billion, 10 * billion, 100 * billion,
|
||||
1 * trillion, 10 * trillion, 100 * trillion,
|
||||
1 * quadrillion, 10 * quadrillion, 100 * quadrillion,
|
||||
1 * quintillion, 10 * quintillion,
|
||||
}
|
||||
var float64pow10 = [...]float64{
|
||||
1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19,
|
||||
1e20, 1e21, 1e22,
|
||||
}
|
||||
var float32pow10 = [...]float32{
|
||||
1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10,
|
||||
}
|
||||
|
||||
type floatinfo struct {
|
||||
mantbits uint8
|
||||
|
||||
// expbits uint8 // (unused)
|
||||
// bias int16 // (unused)
|
||||
|
||||
is32bit bool
|
||||
exactPow10 int8 // Exact powers of ten are <= 10^N (32: 10, 64: 22)
|
||||
|
||||
exactInts int8 // Exact integers are <= 10^N (for non-float, set to 0)
|
||||
// maxMantDigits int8 // 10^19 fits in uint64, while 10^9 fits in uint32
|
||||
|
||||
mantCutoffIsUint64Cutoff bool
|
||||
|
||||
mantCutoff uint64
|
||||
}
|
||||
|
||||
// var fi32 = floatinfo{23, 8, -127, 10, 7, 9, fUint32Cutoff}
|
||||
// var fi64 = floatinfo{52, 11, -1023, 22, 15, 19, fUint64Cutoff}
|
||||
|
||||
// var fi64u = floatinfo{64, 0, -1023, 19, 0, 19, fUint64Cutoff}
|
||||
// var fi64i = floatinfo{63, 0, -1023, 19, 0, 19, fUint64Cutoff}
|
||||
|
||||
var fi32 = floatinfo{23, true, 10, 7, false, 1<<23 - 1}
|
||||
var fi64 = floatinfo{52, false, 22, 15, false, 1<<52 - 1}
|
||||
|
||||
var fi64u = floatinfo{0, false, 19, 0, true, fUint64Cutoff}
|
||||
var fi64i = floatinfo{0, false, 19, 0, true, fUint64Cutoff}
|
||||
|
||||
const fMaxMultiplierForExactPow10_64 = 1e15
|
||||
const fMaxMultiplierForExactPow10_32 = 1e7
|
||||
|
||||
const fUint64Cutoff = (1<<64-1)/10 + 1
|
||||
const fUint32Cutoff = (1<<32-1)/10 + 1
|
||||
|
||||
const fBase = 10
|
||||
|
||||
func strconvParseErr(b []byte, fn string) error {
|
||||
return &strconv.NumError{
|
||||
Func: fn,
|
||||
Err: strconv.ErrSyntax,
|
||||
Num: string(b),
|
||||
}
|
||||
}
|
||||
|
||||
func parseFloat32_reader(r readFloatResult) (f float32, fail bool) {
|
||||
// parseFloatDebug(b, 32, false, exp, trunc, ok)
|
||||
f = float32(r.mantissa)
|
||||
if r.exp == 0 {
|
||||
} else if r.exp < 0 { // int / 10^k
|
||||
f /= float32pow10[uint8(-r.exp)]
|
||||
} else { // exp > 0
|
||||
if r.exp > fi32.exactPow10 {
|
||||
f *= float32pow10[r.exp-fi32.exactPow10]
|
||||
if f > fMaxMultiplierForExactPow10_32 { // exponent too large - outside range
|
||||
fail = true
|
||||
return // ok = false
|
||||
}
|
||||
f *= float32pow10[fi32.exactPow10]
|
||||
} else {
|
||||
f *= float32pow10[uint8(r.exp)]
|
||||
}
|
||||
}
|
||||
if r.neg {
|
||||
f = -f
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parseFloat32_custom(b []byte) (f float32, err error) {
|
||||
r := readFloat(b, fi32)
|
||||
if r.bad {
|
||||
return 0, strconvParseErr(b, "ParseFloat")
|
||||
}
|
||||
if r.ok {
|
||||
f, r.bad = parseFloat32_reader(r)
|
||||
if r.bad {
|
||||
goto FALLBACK
|
||||
}
|
||||
return
|
||||
}
|
||||
FALLBACK:
|
||||
return parseFloat32_strconv(b)
|
||||
}
|
||||
|
||||
func parseFloat64_reader(r readFloatResult) (f float64, fail bool) {
|
||||
f = float64(r.mantissa)
|
||||
if r.exp == 0 {
|
||||
} else if r.exp < 0 { // int / 10^k
|
||||
f /= float64pow10[-uint8(r.exp)]
|
||||
} else { // exp > 0
|
||||
if r.exp > fi64.exactPow10 {
|
||||
f *= float64pow10[r.exp-fi64.exactPow10]
|
||||
if f > fMaxMultiplierForExactPow10_64 { // exponent too large - outside range
|
||||
fail = true
|
||||
return
|
||||
}
|
||||
f *= float64pow10[fi64.exactPow10]
|
||||
} else {
|
||||
f *= float64pow10[uint8(r.exp)]
|
||||
}
|
||||
}
|
||||
if r.neg {
|
||||
f = -f
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parseFloat64_custom(b []byte) (f float64, err error) {
|
||||
r := readFloat(b, fi64)
|
||||
if r.bad {
|
||||
return 0, strconvParseErr(b, "ParseFloat")
|
||||
}
|
||||
if r.ok {
|
||||
f, r.bad = parseFloat64_reader(r)
|
||||
if r.bad {
|
||||
goto FALLBACK
|
||||
}
|
||||
return
|
||||
}
|
||||
FALLBACK:
|
||||
return parseFloat64_strconv(b)
|
||||
}
|
||||
|
||||
func parseUint64_simple(b []byte) (n uint64, ok bool) {
|
||||
var i int
|
||||
var n1 uint64
|
||||
var c uint8
|
||||
LOOP:
|
||||
if i < len(b) {
|
||||
c = b[i]
|
||||
// unsigned integers don't overflow well on multiplication, so check cutoff here
|
||||
// e.g. (maxUint64-5)*10 doesn't overflow well ...
|
||||
// if n >= fUint64Cutoff || !isDigitChar(b[i]) { // if c < '0' || c > '9' {
|
||||
if n >= fUint64Cutoff || c < '0' || c > '9' {
|
||||
return
|
||||
} else if c == '0' {
|
||||
n *= fBase
|
||||
} else {
|
||||
n1 = n
|
||||
n = n*fBase + uint64(c-'0')
|
||||
if n < n1 {
|
||||
return
|
||||
}
|
||||
}
|
||||
i++
|
||||
goto LOOP
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
// func parseUint64_simple(b []byte) (n uint64, ok bool) {
|
||||
// var i int
|
||||
// var n1 uint64
|
||||
// LOOP:
|
||||
// if i < len(b) {
|
||||
// // unsigned integers don't overflow well on multiplication, so check cutoff here
|
||||
// // e.g. (maxUint64-5)*10 doesn't overflow well ...
|
||||
// if n >= fUint64Cutoff || !isDigitChar(b[i]) { // if c < '0' || c > '9' {
|
||||
// return
|
||||
// }
|
||||
// n *= fBase
|
||||
// n1 = n + uint64(b[i]-'0')
|
||||
// if n1 < n {
|
||||
// return
|
||||
// }
|
||||
// n = n1
|
||||
// i++
|
||||
// goto LOOP
|
||||
// }
|
||||
// ok = true
|
||||
// return
|
||||
// }
|
||||
|
||||
// func parseUint64_simple(b []byte) (n uint64, ok bool) {
|
||||
// var i int
|
||||
// var n1 uint64
|
||||
// LOOP:
|
||||
// if i < len(b) {
|
||||
// // unsigned integers don't overflow well on multiplication, so check cutoff here
|
||||
// // e.g. (maxUint64-5)*10 doesn't overflow well ...
|
||||
// if n >= fUint64Cutoff { // if c < '0' || c > '9' {
|
||||
// return
|
||||
// }
|
||||
// n *= fBase
|
||||
// switch b[i] {
|
||||
// case '0':
|
||||
// case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
// n1 = n
|
||||
// n += uint64(b[i] - '0')
|
||||
// if n < n1 {
|
||||
// return
|
||||
// }
|
||||
// default:
|
||||
// return
|
||||
// }
|
||||
// i++
|
||||
// goto LOOP
|
||||
// }
|
||||
// ok = true
|
||||
// return
|
||||
// }
|
||||
|
||||
// func parseUint64_simple(b []byte) (n uint64, ok bool) {
|
||||
// var i int
|
||||
// var n1 uint64
|
||||
// LOOP:
|
||||
// if i < len(b) {
|
||||
// // unsigned integers don't overflow well on multiplication, so check cutoff here
|
||||
// // e.g. (maxUint64-5)*10 doesn't overflow well ...
|
||||
// if n >= fUint64Cutoff { // if c < '0' || c > '9' {
|
||||
// return
|
||||
// }
|
||||
// switch b[i] {
|
||||
// case '0':
|
||||
// // n = n<<3 + n<<1
|
||||
// n *= 10
|
||||
// case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
// n1 = n
|
||||
// // n = n<<3 + n<<1 + uint64(b[i]-'0')
|
||||
// n = (n * 10) + uint64(b[i]-'0')
|
||||
// if n < n1 {
|
||||
// return
|
||||
// }
|
||||
// default:
|
||||
// return
|
||||
// }
|
||||
// i++
|
||||
// goto LOOP
|
||||
// }
|
||||
// ok = true
|
||||
// return
|
||||
// }
|
||||
|
||||
func parseUint64_reader(r readFloatResult) (f uint64, fail bool) {
|
||||
f = r.mantissa
|
||||
if r.exp == 0 {
|
||||
} else if r.exp < 0 { // int / 10^k
|
||||
if f%uint64pow10[uint8(-r.exp)] != 0 {
|
||||
fail = true
|
||||
} else {
|
||||
f /= uint64pow10[uint8(-r.exp)]
|
||||
}
|
||||
} else { // exp > 0
|
||||
f *= uint64pow10[uint8(r.exp)]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parseInt64_reader(r readFloatResult) (v int64, fail bool) {
|
||||
// xdebugf("parseint64_reader: r: %#v", r)
|
||||
if r.exp == 0 {
|
||||
} else if r.exp < 0 { // int / 10^k
|
||||
if r.mantissa%uint64pow10[uint8(-r.exp)] != 0 {
|
||||
// fail = true
|
||||
return 0, true
|
||||
}
|
||||
r.mantissa /= uint64pow10[uint8(-r.exp)]
|
||||
} else { // exp > 0
|
||||
r.mantissa *= uint64pow10[uint8(r.exp)]
|
||||
}
|
||||
if chkOvf.Uint2Int(r.mantissa, r.neg) {
|
||||
fail = true
|
||||
} else if r.neg {
|
||||
v = -int64(r.mantissa)
|
||||
} else {
|
||||
v = int64(r.mantissa)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseNumber will return an integer if only composed of [-]?[0-9]+
|
||||
// Else it will return a float.
|
||||
func parseNumber(b []byte, z *fauxUnion, preferSignedInt bool) (err error) {
|
||||
var ok, neg bool
|
||||
var f uint64
|
||||
|
||||
// var b1 []byte
|
||||
// if b[0] == '-' {
|
||||
// neg = true
|
||||
// b1 = b[1:]
|
||||
// } else {
|
||||
// b1 = b
|
||||
// }
|
||||
// f, ok = parseUint64_simple(b1)
|
||||
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if b[0] == '-' {
|
||||
neg = true
|
||||
f, ok = parseUint64_simple(b[1:])
|
||||
} else {
|
||||
f, ok = parseUint64_simple(b)
|
||||
}
|
||||
|
||||
if ok {
|
||||
if neg {
|
||||
z.v = valueTypeInt
|
||||
if chkOvf.Uint2Int(f, neg) {
|
||||
return strconvParseErr(b, "ParseInt")
|
||||
}
|
||||
z.i = -int64(f)
|
||||
} else if preferSignedInt {
|
||||
z.v = valueTypeInt
|
||||
if chkOvf.Uint2Int(f, neg) {
|
||||
return strconvParseErr(b, "ParseInt")
|
||||
}
|
||||
z.i = int64(f)
|
||||
} else {
|
||||
z.v = valueTypeUint
|
||||
z.u = f
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
z.v = valueTypeFloat
|
||||
z.f, err = parseFloat64_custom(b)
|
||||
return
|
||||
}
|
||||
|
||||
type readFloatResult struct {
|
||||
mantissa uint64
|
||||
exp int8
|
||||
neg, sawdot, sawexp, trunc, bad bool
|
||||
ok bool
|
||||
|
||||
_ byte // padding
|
||||
}
|
||||
|
||||
func readFloat(s []byte, y floatinfo) (r readFloatResult) {
|
||||
// defer func() { xdebugf("readFloat: %#v", r) }()
|
||||
|
||||
var i uint // uint, so that we eliminate bounds checking
|
||||
var slen = uint(len(s))
|
||||
if slen == 0 {
|
||||
// read an empty string as the zero value
|
||||
// r.bad = true
|
||||
r.ok = true
|
||||
return
|
||||
}
|
||||
|
||||
if s[0] == '-' {
|
||||
r.neg = true
|
||||
i++
|
||||
}
|
||||
|
||||
// we considered punting early if string has length > maxMantDigits, but this doesn't account
|
||||
// for trailing 0's e.g. 700000000000000000000 can be encoded exactly as it is 7e20
|
||||
|
||||
var nd, ndMant, dp int8
|
||||
var xu uint64
|
||||
|
||||
LOOP:
|
||||
for ; i < slen; i++ {
|
||||
switch s[i] {
|
||||
case '.':
|
||||
if r.sawdot {
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
r.sawdot = true
|
||||
dp = nd
|
||||
case 'e', 'E':
|
||||
r.sawexp = true
|
||||
break LOOP
|
||||
case '0':
|
||||
if nd == 0 {
|
||||
dp--
|
||||
continue LOOP
|
||||
}
|
||||
nd++
|
||||
if r.mantissa < y.mantCutoff {
|
||||
r.mantissa *= fBase
|
||||
ndMant++
|
||||
}
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
nd++
|
||||
if y.mantCutoffIsUint64Cutoff && r.mantissa < fUint64Cutoff {
|
||||
r.mantissa *= fBase
|
||||
xu = r.mantissa + uint64(s[i]-'0')
|
||||
if xu < r.mantissa {
|
||||
r.trunc = true
|
||||
return
|
||||
}
|
||||
r.mantissa = xu
|
||||
} else if r.mantissa < y.mantCutoff {
|
||||
// mantissa = (mantissa << 1) + (mantissa << 3) + uint64(c-'0')
|
||||
r.mantissa = r.mantissa*fBase + uint64(s[i]-'0')
|
||||
} else {
|
||||
r.trunc = true
|
||||
return
|
||||
}
|
||||
ndMant++
|
||||
default:
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if !r.sawdot {
|
||||
dp = nd
|
||||
}
|
||||
|
||||
if r.sawexp {
|
||||
i++
|
||||
if i < slen {
|
||||
var eneg bool
|
||||
if s[i] == '+' {
|
||||
i++
|
||||
} else if s[i] == '-' {
|
||||
i++
|
||||
eneg = true
|
||||
}
|
||||
if i < slen {
|
||||
// for exact match, exponent is 1 or 2 digits (float64: -22 to 37, float32: -1 to 17).
|
||||
// exit quick if exponent is more than 2 digits.
|
||||
if i+2 < slen {
|
||||
return
|
||||
}
|
||||
var e int8
|
||||
if s[i] < '0' || s[i] > '9' { // !isDigitChar(s[i]) { //
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
e = int8(s[i] - '0')
|
||||
i++
|
||||
if i < slen {
|
||||
if s[i] < '0' || s[i] > '9' { // !isDigitChar(s[i]) { //
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
e = e*fBase + int8(s[i]-'0') // (e << 1) + (e << 3) + int8(s[i]-'0')
|
||||
i++
|
||||
}
|
||||
if eneg {
|
||||
dp -= e
|
||||
} else {
|
||||
dp += e
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.mantissa != 0 {
|
||||
r.exp = dp - ndMant
|
||||
// do not set ok=true for cases we cannot handle
|
||||
if r.exp < -y.exactPow10 ||
|
||||
r.exp > y.exactInts+y.exactPow10 ||
|
||||
y.mantbits != 0 && r.mantissa>>y.mantbits != 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
r.ok = true
|
||||
return
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
func parseUint64(b []byte) (f uint64, err error) {
|
||||
if b[0] == '-' {
|
||||
return 0, strconvParseErr(b, "ParseUint")
|
||||
}
|
||||
f, ok := parseUint64_simple(b)
|
||||
if !ok {
|
||||
return parseUint64_custom(b)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parseInt64(b []byte) (v int64, err error) {
|
||||
// defer func() { xdebug2f("parseInt64: %s, return: %d, %v", b, v, err) }()
|
||||
// xdebugf("parseInt64: %s", b)
|
||||
var ok, neg bool
|
||||
var f uint64
|
||||
var r readFloatResult
|
||||
|
||||
if b[0] == '-' {
|
||||
neg = true
|
||||
b = b[1:]
|
||||
}
|
||||
|
||||
f, ok = parseUint64_simple(b)
|
||||
// xdebugf("parseuint64_simple: %v, %v", f, ok)
|
||||
if ok {
|
||||
if chkOvf.Uint2Int(f, neg) {
|
||||
goto ERROR
|
||||
}
|
||||
if neg {
|
||||
v = -int64(f)
|
||||
} else {
|
||||
v = int64(f)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
r = readFloat(b, fi64i)
|
||||
// xdebugf("readFloat ok: %v", r.ok)
|
||||
if r.ok {
|
||||
r.neg = neg
|
||||
v, r.bad = parseInt64_reader(r)
|
||||
if r.bad {
|
||||
goto ERROR
|
||||
}
|
||||
return
|
||||
}
|
||||
ERROR:
|
||||
err = strconvParseErr(b, "ParseInt")
|
||||
return
|
||||
}
|
||||
|
||||
func parseUint64_custom(b []byte) (f uint64, err error) {
|
||||
r := readFloat(b, fi64u)
|
||||
if r.ok {
|
||||
f, r.bad = parseUint64_reader(r)
|
||||
if r.bad {
|
||||
err = strconvParseErr(b, "ParseUint")
|
||||
}
|
||||
return
|
||||
}
|
||||
err = strconvParseErr(b, "ParseUint")
|
||||
return
|
||||
}
|
||||
|
||||
func parseUint64_simple(b []byte) (n uint64, ok bool) {
|
||||
for _, c := range b {
|
||||
if c < '0' || c > '9' {
|
||||
return
|
||||
}
|
||||
// unsigned integers don't overflow well on multiplication, so check cutoff here
|
||||
// e.g. (maxUint64-5)*10 doesn't overflow well ...
|
||||
if n >= uint64Cutoff {
|
||||
return
|
||||
}
|
||||
n *= 10
|
||||
n1 := n + uint64(c-'0')
|
||||
if n1 < n {
|
||||
return
|
||||
}
|
||||
n = n1
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func readFloat(s []byte, y floatinfo) (r readFloatResult) {
|
||||
// defer func() { xdebugf("readFloat: %#v", r) }()
|
||||
|
||||
var i uint // uint, so that we eliminate bounds checking
|
||||
var slen = uint(len(s))
|
||||
if slen == 0 {
|
||||
// read an empty string as the zero value
|
||||
// r.bad = true
|
||||
r.ok = true
|
||||
return
|
||||
}
|
||||
|
||||
if s[0] == '-' {
|
||||
r.neg = true
|
||||
i++
|
||||
}
|
||||
|
||||
// we considered punting early if string has length > maxMantDigits, but this doesn't account
|
||||
// for trailing 0's e.g. 700000000000000000000 can be encoded exactly as it is 7e20
|
||||
|
||||
// var mantCutoffIsUint64Cutoff bool
|
||||
|
||||
// var mantCutoff uint64
|
||||
// if y.mantbits != 0 {
|
||||
// mantCutoff = 1<<y.mantbits - 1
|
||||
// } else if y.is32bit {
|
||||
// mantCutoff = fUint32Cutoff
|
||||
// } else {
|
||||
// mantCutoffIsUint64Cutoff = true
|
||||
// mantCutoff = fUint64Cutoff
|
||||
// }
|
||||
|
||||
var nd, ndMant, dp int8
|
||||
var xu uint64
|
||||
|
||||
var c uint8
|
||||
for ; i < slen; i++ {
|
||||
c = s[i]
|
||||
if c == '.' {
|
||||
if r.sawdot {
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
r.sawdot = true
|
||||
dp = nd
|
||||
} else if c == 'e' || c == 'E' {
|
||||
r.sawexp = true
|
||||
break
|
||||
} else if c == '0' {
|
||||
if nd == 0 {
|
||||
dp--
|
||||
continue
|
||||
}
|
||||
nd++
|
||||
if r.mantissa < y.mantCutoff {
|
||||
r.mantissa *= fBase
|
||||
ndMant++
|
||||
}
|
||||
} else if isDigitChar(c) { // c >= '1' && c <= '9' { // !(c < '0' || c > '9') { //
|
||||
nd++
|
||||
|
||||
if y.mantCutoffIsUint64Cutoff && r.mantissa < fUint64Cutoff {
|
||||
// mantissa = (mantissa << 1) + (mantissa << 3) + uint64(c-'0')
|
||||
r.mantissa *= fBase
|
||||
xu = r.mantissa + uint64(c-'0')
|
||||
if xu < r.mantissa {
|
||||
r.trunc = true
|
||||
return
|
||||
}
|
||||
r.mantissa = xu
|
||||
ndMant++
|
||||
} else if r.mantissa < y.mantCutoff {
|
||||
// mantissa = (mantissa << 1) + (mantissa << 3) + uint64(c-'0')
|
||||
r.mantissa = r.mantissa*fBase + uint64(c-'0')
|
||||
ndMant++
|
||||
} else {
|
||||
r.trunc = true
|
||||
return
|
||||
}
|
||||
|
||||
// if r.mantissa < y.mantCutoff {
|
||||
// if y.mantCutoffIsUint64Cutoff {
|
||||
// // mantissa = (mantissa << 1) + (mantissa << 3) + uint64(c-'0')
|
||||
// r.mantissa *= fBase
|
||||
// xu = r.mantissa + uint64(c-'0')
|
||||
// if xu < r.mantissa {
|
||||
// r.trunc = true
|
||||
// return
|
||||
// }
|
||||
// r.mantissa = xu
|
||||
// } else {
|
||||
// // mantissa = (mantissa << 1) + (mantissa << 3) + uint64(c-'0')
|
||||
// r.mantissa = r.mantissa*fBase + uint64(c-'0')
|
||||
// }
|
||||
// ndMant++
|
||||
// } else {
|
||||
// r.trunc = true
|
||||
// return
|
||||
// }
|
||||
} else {
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if !r.sawdot {
|
||||
dp = nd
|
||||
}
|
||||
|
||||
if r.sawexp {
|
||||
i++
|
||||
if i < slen {
|
||||
var eneg bool
|
||||
if s[i] == '+' {
|
||||
i++
|
||||
} else if s[i] == '-' {
|
||||
i++
|
||||
eneg = true
|
||||
}
|
||||
if i < slen {
|
||||
// for exact match, exponent is 1 or 2 digits (float64: -22 to 37, float32: -1 to 17).
|
||||
// exit quick if exponent is more than 2 digits.
|
||||
if i+2 < slen {
|
||||
return
|
||||
}
|
||||
var e int8
|
||||
if !isDigitChar(s[i]) { // s[i] < '0' || s[i] > '9' {
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
e = int8(s[i] - '0')
|
||||
i++
|
||||
if i < slen {
|
||||
if !isDigitChar(s[i]) { // s[i] < '0' || s[i] > '9' {
|
||||
r.bad = true
|
||||
return
|
||||
}
|
||||
e = e*fBase + int8(s[i]-'0') // (e << 1) + (e << 3) + int8(s[i]-'0')
|
||||
i++
|
||||
}
|
||||
if eneg {
|
||||
dp -= e
|
||||
} else {
|
||||
dp += e
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.mantissa != 0 {
|
||||
r.exp = dp - ndMant
|
||||
// do not set ok=true for cases we cannot handle
|
||||
if r.exp < -y.exactPow10 ||
|
||||
r.exp > y.exactInts+y.exactPow10 ||
|
||||
y.mantbits != 0 && r.mantissa>>y.mantbits != 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
r.ok = true
|
||||
return
|
||||
}
|
||||
|
||||
*/
|
||||
90
vendor/github.com/ugorji/go/codec/gen-dec-array.go.tmpl
generated
vendored
90
vendor/github.com/ugorji/go/codec/gen-dec-array.go.tmpl
generated
vendored
@@ -1,90 +0,0 @@
|
||||
{{var "v"}} := {{if not isArray}}*{{end}}{{ .Varname }}
|
||||
{{var "h"}}, {{var "l"}} := z.DecSliceHelperStart() {{/* // helper, containerLenS */}}
|
||||
{{if not isArray -}}
|
||||
var {{var "c"}} bool {{/* // changed */}}
|
||||
_ = {{var "c"}}
|
||||
if {{var "h"}}.IsNil {
|
||||
if {{var "v"}} != nil {
|
||||
{{var "v"}} = nil
|
||||
{{var "c"}} = true
|
||||
}
|
||||
} else {{end -}}
|
||||
if {{var "l"}} == 0 {
|
||||
{{if isSlice -}}
|
||||
if {{var "v"}} == nil {
|
||||
{{var "v"}} = []{{ .Typ }}{}
|
||||
{{var "c"}} = true
|
||||
} else if len({{var "v"}}) != 0 {
|
||||
{{var "v"}} = {{var "v"}}[:0]
|
||||
{{var "c"}} = true
|
||||
} {{else if isChan }}if {{var "v"}} == nil {
|
||||
{{var "v"}} = make({{ .CTyp }}, 0)
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
} else {
|
||||
{{var "hl"}} := {{var "l"}} > 0
|
||||
var {{var "rl"}} int
|
||||
_ = {{var "rl"}}
|
||||
{{if isSlice }} if {{var "hl"}} {
|
||||
if {{var "l"}} > cap({{var "v"}}) {
|
||||
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
if {{var "rl"}} <= cap({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "rl"}}]
|
||||
} else {
|
||||
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
|
||||
}
|
||||
{{var "c"}} = true
|
||||
} else if {{var "l"}} != len({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "l"}}]
|
||||
{{var "c"}} = true
|
||||
}
|
||||
}
|
||||
{{end -}}
|
||||
var {{var "j"}} int
|
||||
{{/* // var {{var "dn"}} bool */ -}}
|
||||
for {{var "j"}} = 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || z.DecCheckBreak()); {{var "j"}}++ { // bounds-check-elimination
|
||||
{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
|
||||
if {{var "hl"}} {
|
||||
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
} else {
|
||||
{{var "rl"}} = {{if isSlice}}8{{else if isChan}}64{{end}}
|
||||
}
|
||||
{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
{{var "h"}}.ElemContainerState({{var "j"}})
|
||||
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */ -}}
|
||||
{{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
|
||||
{{ decLineVar $x -}}
|
||||
{{var "v"}} <- {{ $x }}
|
||||
{{else}}{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
|
||||
var {{var "db"}} bool
|
||||
if {{var "j"}} >= len({{var "v"}}) {
|
||||
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
|
||||
{{var "c"}} = true
|
||||
{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
|
||||
{{end -}}
|
||||
}
|
||||
if {{var "db"}} {
|
||||
z.DecSwallow()
|
||||
} else {
|
||||
{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x -}}
|
||||
}
|
||||
{{end -}}
|
||||
}
|
||||
{{if isSlice}} if {{var "j"}} < len({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "j"}}]
|
||||
{{var "c"}} = true
|
||||
} else if {{var "j"}} == 0 && {{var "v"}} == nil {
|
||||
{{var "v"}} = make([]{{ .Typ }}, 0)
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
}
|
||||
{{var "h"}}.End()
|
||||
{{if not isArray }}if {{var "c"}} {
|
||||
*{{ .Varname }} = {{var "v"}}
|
||||
}
|
||||
{{end -}}
|
||||
53
vendor/github.com/ugorji/go/codec/gen-dec-map.go.tmpl
generated
vendored
53
vendor/github.com/ugorji/go/codec/gen-dec-map.go.tmpl
generated
vendored
@@ -1,53 +0,0 @@
|
||||
{{var "v"}} := *{{ .Varname }}
|
||||
{{var "l"}} := z.DecReadMapStart()
|
||||
if {{var "l"}} == codecSelferDecContainerLenNil{{xs}} {
|
||||
*{{ .Varname }} = nil
|
||||
} else {
|
||||
if {{var "v"}} == nil {
|
||||
{{var "rl"}} := z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
{{var "v"}} = make(map[{{ .KTyp }}]{{ .Typ }}, {{var "rl"}})
|
||||
*{{ .Varname }} = {{var "v"}}
|
||||
}
|
||||
var {{var "mk"}} {{ .KTyp }}
|
||||
var {{var "mv"}} {{ .Typ }}
|
||||
var {{var "mg"}}, {{var "mdn"}} {{if decElemKindPtr}}, {{var "ms"}}, {{var "mok"}}{{end}} bool
|
||||
if z.DecBasicHandle().MapValueReset {
|
||||
{{if decElemKindPtr}}{{var "mg"}} = true
|
||||
{{else if decElemKindIntf}}if !z.DecBasicHandle().InterfaceReset { {{var "mg"}} = true }
|
||||
{{else if not decElemKindImmutable}}{{var "mg"}} = true
|
||||
{{end}} }
|
||||
if {{var "l"}} != 0 {
|
||||
{{var "hl"}} := {{var "l"}} > 0
|
||||
for {{var "j"}} := 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || z.DecCheckBreak()); {{var "j"}}++ {
|
||||
z.DecReadMapElemKey()
|
||||
{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x -}}
|
||||
{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */ -}}
|
||||
if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
|
||||
{{var "mk"}} = string({{var "bv"}})
|
||||
}
|
||||
{{ end -}}
|
||||
{{if decElemKindPtr -}}
|
||||
{{var "ms"}} = true
|
||||
{{end -}}
|
||||
if {{var "mg"}} {
|
||||
{{if decElemKindPtr -}}
|
||||
{{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}]
|
||||
if {{var "mok"}} {
|
||||
{{var "ms"}} = false
|
||||
}
|
||||
{{else -}}
|
||||
{{var "mv"}} = {{var "v"}}[{{var "mk"}}]
|
||||
{{end -}}
|
||||
} {{if not decElemKindImmutable}}else { {{var "mv"}} = {{decElemZero}} }{{end}}
|
||||
z.DecReadMapElemValue()
|
||||
{{var "mdn"}} = false
|
||||
{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y -}}
|
||||
if {{var "mdn"}} {
|
||||
if z.DecBasicHandle().DeleteOnNilMapValue { delete({{var "v"}}, {{var "mk"}}) } else { {{var "v"}}[{{var "mk"}}] = {{decElemZero}} }
|
||||
} else if {{if decElemKindPtr}} {{var "ms"}} && {{end}} {{var "v"}} != nil {
|
||||
{{var "v"}}[{{var "mk"}}] = {{var "mv"}}
|
||||
}
|
||||
}
|
||||
} // else len==0: TODO: Should we clear map entries?
|
||||
z.DecReadMapEnd()
|
||||
}
|
||||
27
vendor/github.com/ugorji/go/codec/gen-enc-chan.go.tmpl
generated
vendored
27
vendor/github.com/ugorji/go/codec/gen-enc-chan.go.tmpl
generated
vendored
@@ -1,27 +0,0 @@
|
||||
{{.Label}}:
|
||||
switch timeout{{.Sfx}} := z.EncBasicHandle().ChanRecvTimeout; {
|
||||
case timeout{{.Sfx}} == 0: // only consume available
|
||||
for {
|
||||
select {
|
||||
case b{{.Sfx}} := <-{{.Chan}}:
|
||||
{{ .Slice }} = append({{.Slice}}, b{{.Sfx}})
|
||||
default:
|
||||
break {{.Label}}
|
||||
}
|
||||
}
|
||||
case timeout{{.Sfx}} > 0: // consume until timeout
|
||||
tt{{.Sfx}} := time.NewTimer(timeout{{.Sfx}})
|
||||
for {
|
||||
select {
|
||||
case b{{.Sfx}} := <-{{.Chan}}:
|
||||
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
|
||||
case <-tt{{.Sfx}}.C:
|
||||
// close(tt.C)
|
||||
break {{.Label}}
|
||||
}
|
||||
}
|
||||
default: // consume until close
|
||||
for b{{.Sfx}} := range {{.Chan}} {
|
||||
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
|
||||
}
|
||||
}
|
||||
273
vendor/github.com/ugorji/go/codec/gen-helper.generated.go
generated
vendored
273
vendor/github.com/ugorji/go/codec/gen-helper.generated.go
generated
vendored
@@ -1,273 +0,0 @@
|
||||
// comment this out // + build ignore
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from gen-helper.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
import "encoding"
|
||||
|
||||
// GenVersion is the current version of codecgen.
|
||||
const GenVersion = 16
|
||||
|
||||
// This file is used to generate helper code for codecgen.
|
||||
// The values here i.e. genHelper(En|De)coder are not to be used directly by
|
||||
// library users. They WILL change continuously and without notice.
|
||||
|
||||
// GenHelperEncoder is exported so that it can be used externally by codecgen.
|
||||
//
|
||||
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||
func GenHelperEncoder(e *Encoder) (ge genHelperEncoder, ee genHelperEncDriver) {
|
||||
ge = genHelperEncoder{e: e}
|
||||
ee = genHelperEncDriver{encDriver: e.e}
|
||||
return
|
||||
}
|
||||
|
||||
// GenHelperDecoder is exported so that it can be used externally by codecgen.
|
||||
//
|
||||
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||
func GenHelperDecoder(d *Decoder) (gd genHelperDecoder, dd genHelperDecDriver) {
|
||||
gd = genHelperDecoder{d: d}
|
||||
dd = genHelperDecDriver{decDriver: d.d}
|
||||
return
|
||||
}
|
||||
|
||||
type genHelperEncDriver struct {
|
||||
encDriver
|
||||
}
|
||||
|
||||
type genHelperDecDriver struct {
|
||||
decDriver
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
type genHelperEncoder struct {
|
||||
M must
|
||||
F fastpathT
|
||||
e *Encoder
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
type genHelperDecoder struct {
|
||||
C checkOverflow
|
||||
F fastpathT
|
||||
d *Decoder
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBasicHandle() *BasicHandle {
|
||||
return f.e.h
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBinary() bool {
|
||||
return f.e.be // f.e.hh.isBinaryEncoding()
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) IsJSONHandle() bool {
|
||||
return f.e.js
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncFallback(iv interface{}) {
|
||||
// f.e.encodeI(iv, false, false)
|
||||
f.e.encodeValue(rv4i(iv), nil)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncTextMarshal(iv encoding.TextMarshaler) {
|
||||
bs, fnerr := iv.MarshalText()
|
||||
f.e.marshalUtf8(bs, fnerr)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncJSONMarshal(iv jsonMarshaler) {
|
||||
bs, fnerr := iv.MarshalJSON()
|
||||
f.e.marshalAsis(bs, fnerr)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBinaryMarshal(iv encoding.BinaryMarshaler) {
|
||||
bs, fnerr := iv.MarshalBinary()
|
||||
f.e.marshalRaw(bs, fnerr)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncRaw(iv Raw) { f.e.rawBytes(iv) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) I2Rtid(v interface{}) uintptr {
|
||||
return i2rtid(v)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||
return f.e.h.getExt(rtid, true)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||
f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) WriteStr(s string) {
|
||||
f.e.w().writestr(s)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) BytesView(v string) []byte { return bytesView(v) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapStart(length int) { f.e.mapStart(length) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapEnd() { f.e.mapEnd() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayStart(length int) { f.e.arrayStart(length) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayEnd() { f.e.arrayEnd() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayElem() { f.e.arrayElem() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapElemKey() { f.e.mapElemKey() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapElemValue() { f.e.mapElemValue() }
|
||||
|
||||
// ---------------- DECODER FOLLOWS -----------------
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBasicHandle() *BasicHandle {
|
||||
return f.d.h
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBinary() bool {
|
||||
return f.d.be // f.d.hh.isBinaryEncoding()
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecSwallow() { f.d.swallow() }
|
||||
|
||||
// // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
// func (f genHelperDecoder) DecScratchBuffer() []byte {
|
||||
// return f.d.b[:]
|
||||
// }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecScratchArrayBuffer() *[decScratchByteArrayLen]byte {
|
||||
return &f.d.b
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
|
||||
rv := rv4i(iv)
|
||||
if chkPtr {
|
||||
f.d.ensureDecodeable(rv)
|
||||
}
|
||||
f.d.decodeValue(rv, nil)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecSliceHelperStart() (decSliceHelper, int) {
|
||||
return f.d.decSliceHelperStart()
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecStructFieldNotFound(index int, name string) {
|
||||
f.d.structFieldNotFound(index, name)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecArrayCannotExpand(sliceLen, streamLen int) {
|
||||
f.d.arrayCannotExpand(sliceLen, streamLen)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecTextUnmarshal(tm encoding.TextUnmarshaler) {
|
||||
if fnerr := tm.UnmarshalText(f.d.d.DecodeStringAsBytes()); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecJSONUnmarshal(tm jsonUnmarshaler) {
|
||||
// bs := f.dd.DecodeStringAsBytes()
|
||||
// grab the bytes to be read, as UnmarshalJSON needs the full JSON so as to unmarshal it itself.
|
||||
if fnerr := tm.UnmarshalJSON(f.d.nextValueBytes()); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBinaryUnmarshal(bm encoding.BinaryUnmarshaler) {
|
||||
if fnerr := bm.UnmarshalBinary(f.d.d.DecodeBytes(nil, true)); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecRaw() []byte { return f.d.rawBytes() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) IsJSONHandle() bool {
|
||||
return f.d.js
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) I2Rtid(v interface{}) uintptr {
|
||||
return i2rtid(v)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||
return f.d.h.getExt(rtid, true)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||
f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
|
||||
return decInferLen(clen, maxlen, unit)
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapStart() int { return f.d.mapStart() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapEnd() { f.d.mapEnd() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayStart() int { return f.d.arrayStart() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayEnd() { f.d.arrayEnd() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayElem() { f.d.arrayElem() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapElemKey() { f.d.mapElemKey() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapElemValue() { f.d.mapElemValue() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecDecodeFloat32() float32 { return f.d.decodeFloat32() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecCheckBreak() bool { return f.d.checkBreak() }
|
||||
243
vendor/github.com/ugorji/go/codec/gen-helper.go.tmpl
generated
vendored
243
vendor/github.com/ugorji/go/codec/gen-helper.go.tmpl
generated
vendored
@@ -1,243 +0,0 @@
|
||||
// comment this out // + build ignore
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from gen-helper.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
import "encoding"
|
||||
|
||||
// GenVersion is the current version of codecgen.
|
||||
const GenVersion = {{ .Version }}
|
||||
|
||||
// This file is used to generate helper code for codecgen.
|
||||
// The values here i.e. genHelper(En|De)coder are not to be used directly by
|
||||
// library users. They WILL change continuously and without notice.
|
||||
|
||||
{{/*
|
||||
// To help enforce this, we create an unexported type with exported members.
|
||||
// The only way to get the type is via the one exported type that we control (somewhat).
|
||||
//
|
||||
// When static codecs are created for types, they will use this value
|
||||
// to perform encoding or decoding of primitives or known slice or map types.
|
||||
*/ -}}
|
||||
|
||||
// GenHelperEncoder is exported so that it can be used externally by codecgen.
|
||||
//
|
||||
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||
func GenHelperEncoder(e *Encoder) (ge genHelperEncoder, ee genHelperEncDriver) {
|
||||
ge = genHelperEncoder{e: e}
|
||||
ee = genHelperEncDriver{encDriver: e.e}
|
||||
return
|
||||
}
|
||||
|
||||
// GenHelperDecoder is exported so that it can be used externally by codecgen.
|
||||
//
|
||||
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||
func GenHelperDecoder(d *Decoder) (gd genHelperDecoder, dd genHelperDecDriver) {
|
||||
gd = genHelperDecoder{d: d}
|
||||
dd = genHelperDecDriver{decDriver: d.d}
|
||||
return
|
||||
}
|
||||
|
||||
type genHelperEncDriver struct {
|
||||
encDriver
|
||||
}
|
||||
|
||||
type genHelperDecDriver struct {
|
||||
decDriver
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
type genHelperEncoder struct {
|
||||
M must
|
||||
F fastpathT
|
||||
e *Encoder
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
type genHelperDecoder struct {
|
||||
C checkOverflow
|
||||
F fastpathT
|
||||
d *Decoder
|
||||
}
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBasicHandle() *BasicHandle {
|
||||
return f.e.h
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBinary() bool {
|
||||
return f.e.be // f.e.hh.isBinaryEncoding()
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) IsJSONHandle() bool {
|
||||
return f.e.js
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncFallback(iv interface{}) {
|
||||
// f.e.encodeI(iv, false, false)
|
||||
f.e.encodeValue(rv4i(iv), nil)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncTextMarshal(iv encoding.TextMarshaler) {
|
||||
bs, fnerr := iv.MarshalText()
|
||||
f.e.marshalUtf8(bs, fnerr)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncJSONMarshal(iv jsonMarshaler) {
|
||||
bs, fnerr := iv.MarshalJSON()
|
||||
f.e.marshalAsis(bs, fnerr)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncBinaryMarshal(iv encoding.BinaryMarshaler) {
|
||||
bs, fnerr := iv.MarshalBinary()
|
||||
f.e.marshalRaw(bs, fnerr)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncRaw(iv Raw) { f.e.rawBytes(iv) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) I2Rtid(v interface{}) uintptr {
|
||||
return i2rtid(v)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||
return f.e.h.getExt(rtid, true)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||
f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) WriteStr(s string) {
|
||||
f.e.w().writestr(s)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) BytesView(v string) []byte { return bytesView(v) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapStart(length int) { f.e.mapStart(length) }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapEnd() { f.e.mapEnd() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayStart(length int) { f.e.arrayStart(length) }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayEnd() { f.e.arrayEnd() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteArrayElem() { f.e.arrayElem() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapElemKey() { f.e.mapElemKey() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperEncoder) EncWriteMapElemValue() { f.e.mapElemValue() }
|
||||
|
||||
// ---------------- DECODER FOLLOWS -----------------
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBasicHandle() *BasicHandle {
|
||||
return f.d.h
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBinary() bool {
|
||||
return f.d.be // f.d.hh.isBinaryEncoding()
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecSwallow() { f.d.swallow() }
|
||||
|
||||
// // FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
// func (f genHelperDecoder) DecScratchBuffer() []byte {
|
||||
// return f.d.b[:]
|
||||
// }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecScratchArrayBuffer() *[decScratchByteArrayLen]byte {
|
||||
return &f.d.b
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
|
||||
rv := rv4i(iv)
|
||||
if chkPtr {
|
||||
f.d.ensureDecodeable(rv)
|
||||
}
|
||||
f.d.decodeValue(rv, nil)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecSliceHelperStart() (decSliceHelper, int) {
|
||||
return f.d.decSliceHelperStart()
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecStructFieldNotFound(index int, name string) {
|
||||
f.d.structFieldNotFound(index, name)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecArrayCannotExpand(sliceLen, streamLen int) {
|
||||
f.d.arrayCannotExpand(sliceLen, streamLen)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecTextUnmarshal(tm encoding.TextUnmarshaler) {
|
||||
if fnerr := tm.UnmarshalText(f.d.d.DecodeStringAsBytes()); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecJSONUnmarshal(tm jsonUnmarshaler) {
|
||||
// bs := f.dd.DecodeStringAsBytes()
|
||||
// grab the bytes to be read, as UnmarshalJSON needs the full JSON so as to unmarshal it itself.
|
||||
if fnerr := tm.UnmarshalJSON(f.d.nextValueBytes()); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecBinaryUnmarshal(bm encoding.BinaryUnmarshaler) {
|
||||
if fnerr := bm.UnmarshalBinary(f.d.d.DecodeBytes(nil, true)); fnerr != nil {
|
||||
halt.errorv(fnerr)
|
||||
}
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecRaw() []byte { return f.d.rawBytes() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) IsJSONHandle() bool {
|
||||
return f.d.js
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) I2Rtid(v interface{}) uintptr {
|
||||
return i2rtid(v)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||
return f.d.h.getExt(rtid, true)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||
f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
|
||||
return decInferLen(clen, maxlen, unit)
|
||||
}
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapStart() int { return f.d.mapStart() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapEnd() { f.d.mapEnd() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayStart() int { return f.d.arrayStart() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayEnd() { f.d.arrayEnd() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadArrayElem() { f.d.arrayElem() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapElemKey() { f.d.mapElemKey() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecReadMapElemValue() { f.d.mapElemValue() }
|
||||
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecDecodeFloat32() float32 { return f.d.decodeFloat32() }
|
||||
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||
func (f genHelperDecoder) DecCheckBreak() bool { return f.d.checkBreak() }
|
||||
|
||||
187
vendor/github.com/ugorji/go/codec/gen.generated.go
generated
vendored
187
vendor/github.com/ugorji/go/codec/gen.generated.go
generated
vendored
@@ -1,187 +0,0 @@
|
||||
// +build codecgen.exec
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED FROM gen-dec-(map|array).go.tmpl
|
||||
|
||||
const genDecMapTmpl = `
|
||||
{{var "v"}} := *{{ .Varname }}
|
||||
{{var "l"}} := z.DecReadMapStart()
|
||||
if {{var "l"}} == codecSelferDecContainerLenNil{{xs}} {
|
||||
*{{ .Varname }} = nil
|
||||
} else {
|
||||
if {{var "v"}} == nil {
|
||||
{{var "rl"}} := z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
{{var "v"}} = make(map[{{ .KTyp }}]{{ .Typ }}, {{var "rl"}})
|
||||
*{{ .Varname }} = {{var "v"}}
|
||||
}
|
||||
var {{var "mk"}} {{ .KTyp }}
|
||||
var {{var "mv"}} {{ .Typ }}
|
||||
var {{var "mg"}}, {{var "mdn"}} {{if decElemKindPtr}}, {{var "ms"}}, {{var "mok"}}{{end}} bool
|
||||
if z.DecBasicHandle().MapValueReset {
|
||||
{{if decElemKindPtr}}{{var "mg"}} = true
|
||||
{{else if decElemKindIntf}}if !z.DecBasicHandle().InterfaceReset { {{var "mg"}} = true }
|
||||
{{else if not decElemKindImmutable}}{{var "mg"}} = true
|
||||
{{end}} }
|
||||
if {{var "l"}} != 0 {
|
||||
{{var "hl"}} := {{var "l"}} > 0
|
||||
for {{var "j"}} := 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || z.DecCheckBreak()); {{var "j"}}++ {
|
||||
z.DecReadMapElemKey()
|
||||
{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x -}}
|
||||
{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */ -}}
|
||||
if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
|
||||
{{var "mk"}} = string({{var "bv"}})
|
||||
}
|
||||
{{ end -}}
|
||||
{{if decElemKindPtr -}}
|
||||
{{var "ms"}} = true
|
||||
{{end -}}
|
||||
if {{var "mg"}} {
|
||||
{{if decElemKindPtr -}}
|
||||
{{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}]
|
||||
if {{var "mok"}} {
|
||||
{{var "ms"}} = false
|
||||
}
|
||||
{{else -}}
|
||||
{{var "mv"}} = {{var "v"}}[{{var "mk"}}]
|
||||
{{end -}}
|
||||
} {{if not decElemKindImmutable}}else { {{var "mv"}} = {{decElemZero}} }{{end}}
|
||||
z.DecReadMapElemValue()
|
||||
{{var "mdn"}} = false
|
||||
{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y -}}
|
||||
if {{var "mdn"}} {
|
||||
if z.DecBasicHandle().DeleteOnNilMapValue { delete({{var "v"}}, {{var "mk"}}) } else { {{var "v"}}[{{var "mk"}}] = {{decElemZero}} }
|
||||
} else if {{if decElemKindPtr}} {{var "ms"}} && {{end}} {{var "v"}} != nil {
|
||||
{{var "v"}}[{{var "mk"}}] = {{var "mv"}}
|
||||
}
|
||||
}
|
||||
} // else len==0: TODO: Should we clear map entries?
|
||||
z.DecReadMapEnd()
|
||||
}
|
||||
`
|
||||
|
||||
const genDecListTmpl = `
|
||||
{{var "v"}} := {{if not isArray}}*{{end}}{{ .Varname }}
|
||||
{{var "h"}}, {{var "l"}} := z.DecSliceHelperStart() {{/* // helper, containerLenS */}}
|
||||
{{if not isArray -}}
|
||||
var {{var "c"}} bool {{/* // changed */}}
|
||||
_ = {{var "c"}}
|
||||
if {{var "h"}}.IsNil {
|
||||
if {{var "v"}} != nil {
|
||||
{{var "v"}} = nil
|
||||
{{var "c"}} = true
|
||||
}
|
||||
} else {{end -}}
|
||||
if {{var "l"}} == 0 {
|
||||
{{if isSlice -}}
|
||||
if {{var "v"}} == nil {
|
||||
{{var "v"}} = []{{ .Typ }}{}
|
||||
{{var "c"}} = true
|
||||
} else if len({{var "v"}}) != 0 {
|
||||
{{var "v"}} = {{var "v"}}[:0]
|
||||
{{var "c"}} = true
|
||||
} {{else if isChan }}if {{var "v"}} == nil {
|
||||
{{var "v"}} = make({{ .CTyp }}, 0)
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
} else {
|
||||
{{var "hl"}} := {{var "l"}} > 0
|
||||
var {{var "rl"}} int
|
||||
_ = {{var "rl"}}
|
||||
{{if isSlice }} if {{var "hl"}} {
|
||||
if {{var "l"}} > cap({{var "v"}}) {
|
||||
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
if {{var "rl"}} <= cap({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "rl"}}]
|
||||
} else {
|
||||
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
|
||||
}
|
||||
{{var "c"}} = true
|
||||
} else if {{var "l"}} != len({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "l"}}]
|
||||
{{var "c"}} = true
|
||||
}
|
||||
}
|
||||
{{end -}}
|
||||
var {{var "j"}} int
|
||||
{{/* // var {{var "dn"}} bool */ -}}
|
||||
for {{var "j"}} = 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || z.DecCheckBreak()); {{var "j"}}++ { // bounds-check-elimination
|
||||
{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
|
||||
if {{var "hl"}} {
|
||||
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||
} else {
|
||||
{{var "rl"}} = {{if isSlice}}8{{else if isChan}}64{{end}}
|
||||
}
|
||||
{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
{{var "h"}}.ElemContainerState({{var "j"}})
|
||||
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */ -}}
|
||||
{{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
|
||||
{{ decLineVar $x -}}
|
||||
{{var "v"}} <- {{ $x }}
|
||||
{{else}}{{/* // if indefinite, etc, then expand the slice if necessary */ -}}
|
||||
var {{var "db"}} bool
|
||||
if {{var "j"}} >= len({{var "v"}}) {
|
||||
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
|
||||
{{var "c"}} = true
|
||||
{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
|
||||
{{end -}}
|
||||
}
|
||||
if {{var "db"}} {
|
||||
z.DecSwallow()
|
||||
} else {
|
||||
{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x -}}
|
||||
}
|
||||
{{end -}}
|
||||
}
|
||||
{{if isSlice}} if {{var "j"}} < len({{var "v"}}) {
|
||||
{{var "v"}} = {{var "v"}}[:{{var "j"}}]
|
||||
{{var "c"}} = true
|
||||
} else if {{var "j"}} == 0 && {{var "v"}} == nil {
|
||||
{{var "v"}} = make([]{{ .Typ }}, 0)
|
||||
{{var "c"}} = true
|
||||
}
|
||||
{{end -}}
|
||||
}
|
||||
{{var "h"}}.End()
|
||||
{{if not isArray }}if {{var "c"}} {
|
||||
*{{ .Varname }} = {{var "v"}}
|
||||
}
|
||||
{{end -}}
|
||||
`
|
||||
|
||||
const genEncChanTmpl = `
|
||||
{{.Label}}:
|
||||
switch timeout{{.Sfx}} := z.EncBasicHandle().ChanRecvTimeout; {
|
||||
case timeout{{.Sfx}} == 0: // only consume available
|
||||
for {
|
||||
select {
|
||||
case b{{.Sfx}} := <-{{.Chan}}:
|
||||
{{ .Slice }} = append({{.Slice}}, b{{.Sfx}})
|
||||
default:
|
||||
break {{.Label}}
|
||||
}
|
||||
}
|
||||
case timeout{{.Sfx}} > 0: // consume until timeout
|
||||
tt{{.Sfx}} := time.NewTimer(timeout{{.Sfx}})
|
||||
for {
|
||||
select {
|
||||
case b{{.Sfx}} := <-{{.Chan}}:
|
||||
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
|
||||
case <-tt{{.Sfx}}.C:
|
||||
// close(tt.C)
|
||||
break {{.Label}}
|
||||
}
|
||||
}
|
||||
default: // consume until close
|
||||
for b{{.Sfx}} := range {{.Chan}} {
|
||||
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
|
||||
}
|
||||
}
|
||||
`
|
||||
2339
vendor/github.com/ugorji/go/codec/gen.go
generated
vendored
2339
vendor/github.com/ugorji/go/codec/gen.go
generated
vendored
File diff suppressed because it is too large
Load Diff
5
vendor/github.com/ugorji/go/codec/go.mod
generated
vendored
5
vendor/github.com/ugorji/go/codec/go.mod
generated
vendored
@@ -1,5 +0,0 @@
|
||||
module github.com/ugorji/go/codec
|
||||
|
||||
require (
|
||||
github.com/ugorji/go v1.1.7
|
||||
)
|
||||
14
vendor/github.com/ugorji/go/codec/goversion_arrayof_gte_go15.go
generated
vendored
14
vendor/github.com/ugorji/go/codec/goversion_arrayof_gte_go15.go
generated
vendored
@@ -1,14 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.5
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
const reflectArrayOfSupported = true
|
||||
|
||||
func reflectArrayOf(count int, elem reflect.Type) reflect.Type {
|
||||
return reflect.ArrayOf(count, elem)
|
||||
}
|
||||
14
vendor/github.com/ugorji/go/codec/goversion_arrayof_lt_go15.go
generated
vendored
14
vendor/github.com/ugorji/go/codec/goversion_arrayof_lt_go15.go
generated
vendored
@@ -1,14 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
const reflectArrayOfSupported = false
|
||||
|
||||
func reflectArrayOf(count int, elem reflect.Type) reflect.Type {
|
||||
panic("codec: reflect.ArrayOf unsupported in this go version")
|
||||
}
|
||||
12
vendor/github.com/ugorji/go/codec/goversion_fmt_time_gte_go15.go
generated
vendored
12
vendor/github.com/ugorji/go/codec/goversion_fmt_time_gte_go15.go
generated
vendored
@@ -1,12 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.5
|
||||
|
||||
package codec
|
||||
|
||||
import "time"
|
||||
|
||||
func fmtTime(t time.Time, b []byte) []byte {
|
||||
return t.AppendFormat(b, time.RFC3339Nano)
|
||||
}
|
||||
15
vendor/github.com/ugorji/go/codec/goversion_fmt_time_lt_go15.go
generated
vendored
15
vendor/github.com/ugorji/go/codec/goversion_fmt_time_lt_go15.go
generated
vendored
@@ -1,15 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package codec
|
||||
|
||||
import "time"
|
||||
|
||||
func fmtTime(t time.Time, b []byte) []byte {
|
||||
s := t.Format(time.RFC3339Nano)
|
||||
b = b[:len(s)]
|
||||
copy(b, s)
|
||||
return b
|
||||
}
|
||||
15
vendor/github.com/ugorji/go/codec/goversion_makemap_gte_go19.go
generated
vendored
15
vendor/github.com/ugorji/go/codec/goversion_makemap_gte_go19.go
generated
vendored
@@ -1,15 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.9
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
func makeMapReflect(t reflect.Type, size int) reflect.Value {
|
||||
// if size < 0 {
|
||||
// return reflect.MakeMapWithSize(t, 4)
|
||||
// }
|
||||
return reflect.MakeMapWithSize(t, size)
|
||||
}
|
||||
12
vendor/github.com/ugorji/go/codec/goversion_makemap_lt_go19.go
generated
vendored
12
vendor/github.com/ugorji/go/codec/goversion_makemap_lt_go19.go
generated
vendored
@@ -1,12 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.9
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
func makeMapReflect(t reflect.Type, size int) reflect.Value {
|
||||
return reflect.MakeMap(t)
|
||||
}
|
||||
44
vendor/github.com/ugorji/go/codec/goversion_maprange_gte_go112.go
generated
vendored
44
vendor/github.com/ugorji/go/codec/goversion_maprange_gte_go112.go
generated
vendored
@@ -1,44 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.12
|
||||
// +build safe appengine
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
type mapIter struct {
|
||||
t *reflect.MapIter
|
||||
m reflect.Value
|
||||
values bool
|
||||
}
|
||||
|
||||
func (t *mapIter) ValidKV() (r bool) {
|
||||
return true
|
||||
}
|
||||
|
||||
func (t *mapIter) Next() (r bool) {
|
||||
return t.t.Next()
|
||||
}
|
||||
|
||||
func (t *mapIter) Key() reflect.Value {
|
||||
return t.t.Key()
|
||||
}
|
||||
|
||||
func (t *mapIter) Value() (r reflect.Value) {
|
||||
if t.values {
|
||||
return t.t.Value()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *mapIter) Done() {}
|
||||
|
||||
func mapRange(t *mapIter, m, k, v reflect.Value, values bool) {
|
||||
*t = mapIter{
|
||||
m: m,
|
||||
t: m.MapRange(),
|
||||
values: values,
|
||||
}
|
||||
}
|
||||
48
vendor/github.com/ugorji/go/codec/goversion_maprange_lt_go112.go
generated
vendored
48
vendor/github.com/ugorji/go/codec/goversion_maprange_lt_go112.go
generated
vendored
@@ -1,48 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.7
|
||||
// +build !go1.12
|
||||
// +build safe appengine
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
type mapIter struct {
|
||||
m reflect.Value
|
||||
keys []reflect.Value
|
||||
j int
|
||||
values bool
|
||||
}
|
||||
|
||||
func (t *mapIter) ValidKV() (r bool) {
|
||||
return true
|
||||
}
|
||||
|
||||
func (t *mapIter) Next() (r bool) {
|
||||
t.j++
|
||||
return t.j < len(t.keys)
|
||||
}
|
||||
|
||||
func (t *mapIter) Key() reflect.Value {
|
||||
return t.keys[t.j]
|
||||
}
|
||||
|
||||
func (t *mapIter) Value() (r reflect.Value) {
|
||||
if t.values {
|
||||
return t.m.MapIndex(t.keys[t.j])
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *mapIter) Done() {}
|
||||
|
||||
func mapRange(t *mapIter, m, k, v reflect.Value, values bool) {
|
||||
*t = mapIter{
|
||||
m: m,
|
||||
keys: m.MapKeys(),
|
||||
values: values,
|
||||
j: -1,
|
||||
}
|
||||
}
|
||||
8
vendor/github.com/ugorji/go/codec/goversion_unexportedembeddedptr_gte_go110.go
generated
vendored
8
vendor/github.com/ugorji/go/codec/goversion_unexportedembeddedptr_gte_go110.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.10
|
||||
|
||||
package codec
|
||||
|
||||
const allowSetUnexportedEmbeddedPtr = false
|
||||
8
vendor/github.com/ugorji/go/codec/goversion_unexportedembeddedptr_lt_go110.go
generated
vendored
8
vendor/github.com/ugorji/go/codec/goversion_unexportedembeddedptr_lt_go110.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.10
|
||||
|
||||
package codec
|
||||
|
||||
const allowSetUnexportedEmbeddedPtr = true
|
||||
17
vendor/github.com/ugorji/go/codec/goversion_unsupported_lt_go14.go
generated
vendored
17
vendor/github.com/ugorji/go/codec/goversion_unsupported_lt_go14.go
generated
vendored
@@ -1,17 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.4
|
||||
|
||||
package codec
|
||||
|
||||
// This codec package will only work for go1.4 and above.
|
||||
// This is for the following reasons:
|
||||
// - go 1.4 was released in 2014
|
||||
// - go runtime is written fully in go
|
||||
// - interface only holds pointers
|
||||
// - reflect.Value is stabilized as 3 words
|
||||
|
||||
func init() {
|
||||
panic("codec: go 1.3 and below are not supported")
|
||||
}
|
||||
10
vendor/github.com/ugorji/go/codec/goversion_vendor_eq_go15.go
generated
vendored
10
vendor/github.com/ugorji/go/codec/goversion_vendor_eq_go15.go
generated
vendored
@@ -1,10 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.5,!go1.6
|
||||
|
||||
package codec
|
||||
|
||||
import "os"
|
||||
|
||||
var genCheckVendor = os.Getenv("GO15VENDOREXPERIMENT") == "1"
|
||||
10
vendor/github.com/ugorji/go/codec/goversion_vendor_eq_go16.go
generated
vendored
10
vendor/github.com/ugorji/go/codec/goversion_vendor_eq_go16.go
generated
vendored
@@ -1,10 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.6,!go1.7
|
||||
|
||||
package codec
|
||||
|
||||
import "os"
|
||||
|
||||
var genCheckVendor = os.Getenv("GO15VENDOREXPERIMENT") != "0"
|
||||
8
vendor/github.com/ugorji/go/codec/goversion_vendor_gte_go17.go
generated
vendored
8
vendor/github.com/ugorji/go/codec/goversion_vendor_gte_go17.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build go1.7
|
||||
|
||||
package codec
|
||||
|
||||
const genCheckVendor = true
|
||||
8
vendor/github.com/ugorji/go/codec/goversion_vendor_lt_go15.go
generated
vendored
8
vendor/github.com/ugorji/go/codec/goversion_vendor_lt_go15.go
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package codec
|
||||
|
||||
var genCheckVendor = false
|
||||
2815
vendor/github.com/ugorji/go/codec/helper.go
generated
vendored
2815
vendor/github.com/ugorji/go/codec/helper.go
generated
vendored
File diff suppressed because it is too large
Load Diff
0
vendor/github.com/ugorji/go/codec/helper.s
generated
vendored
0
vendor/github.com/ugorji/go/codec/helper.s
generated
vendored
119
vendor/github.com/ugorji/go/codec/helper_internal.go
generated
vendored
119
vendor/github.com/ugorji/go/codec/helper_internal.go
generated
vendored
@@ -1,119 +0,0 @@
|
||||
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// All non-std package dependencies live in this file,
|
||||
// so porting to different environment is easy (just update functions).
|
||||
|
||||
func pruneSignExt(v []byte, pos bool) (n int) {
|
||||
if len(v) < 2 {
|
||||
} else if pos && v[0] == 0 {
|
||||
for ; v[n] == 0 && n+1 < len(v) && (v[n+1]&(1<<7) == 0); n++ {
|
||||
}
|
||||
} else if !pos && v[0] == 0xff {
|
||||
for ; v[n] == 0xff && n+1 < len(v) && (v[n+1]&(1<<7) != 0); n++ {
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// validate that this function is correct ...
|
||||
// culled from OGRE (Object-Oriented Graphics Rendering Engine)
|
||||
// function: halfToFloatI (http://stderr.org/doc/ogre-doc/api/OgreBitwise_8h-source.html)
|
||||
func halfFloatToFloatBits(yy uint16) (d uint32) {
|
||||
y := uint32(yy)
|
||||
s := (y >> 15) & 0x01
|
||||
e := (y >> 10) & 0x1f
|
||||
m := y & 0x03ff
|
||||
|
||||
if e == 0 {
|
||||
if m == 0 { // plu or minus 0
|
||||
return s << 31
|
||||
}
|
||||
// Denormalized number -- renormalize it
|
||||
for (m & 0x00000400) == 0 {
|
||||
m <<= 1
|
||||
e -= 1
|
||||
}
|
||||
e += 1
|
||||
const zz uint32 = 0x0400
|
||||
m &= ^zz
|
||||
} else if e == 31 {
|
||||
if m == 0 { // Inf
|
||||
return (s << 31) | 0x7f800000
|
||||
}
|
||||
return (s << 31) | 0x7f800000 | (m << 13) // NaN
|
||||
}
|
||||
e = e + (127 - 15)
|
||||
m = m << 13
|
||||
return (s << 31) | (e << 23) | m
|
||||
}
|
||||
|
||||
// GrowCap will return a new capacity for a slice, given the following:
|
||||
// - oldCap: current capacity
|
||||
// - unit: in-memory size of an element
|
||||
// - num: number of elements to add
|
||||
func growCap(oldCap, unit, num int) (newCap int) {
|
||||
// appendslice logic (if cap < 1024, *2, else *1.25):
|
||||
// leads to many copy calls, especially when copying bytes.
|
||||
// bytes.Buffer model (2*cap + n): much better for bytes.
|
||||
// smarter way is to take the byte-size of the appended element(type) into account
|
||||
|
||||
// maintain 2 thresholds:
|
||||
// t1: if cap <= t1, newcap = 2x
|
||||
// t2: if cap <= t2, newcap = 1.5x
|
||||
// else newcap = 1.25x
|
||||
//
|
||||
// t1, t2 >= 1024 always.
|
||||
// This means that, if unit size >= 16, then always do 2x or 1.25x (ie t1, t2, t3 are all same)
|
||||
//
|
||||
// With this, appending for bytes increase by:
|
||||
// 100% up to 4K
|
||||
// 75% up to 16K
|
||||
// 25% beyond that
|
||||
|
||||
// unit can be 0 e.g. for struct{}{}; handle that appropriately
|
||||
if unit <= 0 {
|
||||
if uint64(^uint(0)) == ^uint64(0) { // 64-bit
|
||||
var maxInt64 uint64 = 1<<63 - 1 // prevent failure with overflow int on 32-bit (386)
|
||||
return int(maxInt64) // math.MaxInt64
|
||||
}
|
||||
return 1<<31 - 1 // math.MaxInt32
|
||||
}
|
||||
|
||||
// handle if num < 0, cap=0, etc.
|
||||
|
||||
var t1, t2 int // thresholds
|
||||
if unit <= 4 {
|
||||
t1, t2 = 4*1024, 16*1024
|
||||
} else if unit <= 16 {
|
||||
t1, t2 = unit*1*1024, unit*4*1024
|
||||
} else {
|
||||
t1, t2 = 1024, 1024
|
||||
}
|
||||
|
||||
if oldCap <= 0 {
|
||||
newCap = 2
|
||||
} else if oldCap <= t1 { // [0,t1]
|
||||
newCap = oldCap * 8 / 4
|
||||
} else if oldCap <= t2 { // (t1,t2]
|
||||
newCap = oldCap * 6 / 4
|
||||
} else { // (t2,infinity]
|
||||
newCap = oldCap * 5 / 4
|
||||
}
|
||||
|
||||
if num > 0 && newCap < num+oldCap {
|
||||
newCap = num + oldCap
|
||||
}
|
||||
|
||||
// ensure newCap takes multiples of a cache line (size is a multiple of 64)
|
||||
t1 = newCap * unit
|
||||
t2 = t1 % 64
|
||||
if t2 != 0 {
|
||||
t1 += 64 - t2
|
||||
newCap = t1 / unit
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
409
vendor/github.com/ugorji/go/codec/helper_not_unsafe.go
generated
vendored
409
vendor/github.com/ugorji/go/codec/helper_not_unsafe.go
generated
vendored
@@ -1,409 +0,0 @@
|
||||
// +build !go1.7 safe appengine
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
const safeMode = true
|
||||
|
||||
// stringView returns a view of the []byte as a string.
|
||||
// In unsafe mode, it doesn't incur allocation and copying caused by conversion.
|
||||
// In regular safe mode, it is an allocation and copy.
|
||||
//
|
||||
// Usage: Always maintain a reference to v while result of this call is in use,
|
||||
// and call keepAlive4BytesView(v) at point where done with view.
|
||||
func stringView(v []byte) string {
|
||||
return string(v)
|
||||
}
|
||||
|
||||
// bytesView returns a view of the string as a []byte.
|
||||
// In unsafe mode, it doesn't incur allocation and copying caused by conversion.
|
||||
// In regular safe mode, it is an allocation and copy.
|
||||
//
|
||||
// Usage: Always maintain a reference to v while result of this call is in use,
|
||||
// and call keepAlive4BytesView(v) at point where done with view.
|
||||
func bytesView(v string) []byte {
|
||||
return []byte(v)
|
||||
}
|
||||
|
||||
// isNil says whether the value v is nil.
|
||||
// This applies to references like map/ptr/unsafepointer/chan/func,
|
||||
// and non-reference values like interface/slice.
|
||||
func isNil(v interface{}) (rv reflect.Value, isnil bool) {
|
||||
rv = rv4i(v)
|
||||
if isnilBitset.isset(byte(rv.Kind())) {
|
||||
isnil = rv.IsNil()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func rv4i(i interface{}) reflect.Value {
|
||||
return reflect.ValueOf(i)
|
||||
}
|
||||
|
||||
func rv2i(rv reflect.Value) interface{} {
|
||||
return rv.Interface()
|
||||
}
|
||||
|
||||
func rvIsNil(rv reflect.Value) bool {
|
||||
return rv.IsNil()
|
||||
}
|
||||
|
||||
func rvSetSliceLen(rv reflect.Value, length int) {
|
||||
rv.SetLen(length)
|
||||
}
|
||||
|
||||
func rvZeroAddrK(t reflect.Type, k reflect.Kind) reflect.Value {
|
||||
return reflect.New(t).Elem()
|
||||
}
|
||||
|
||||
func rvConvert(v reflect.Value, t reflect.Type) (rv reflect.Value) {
|
||||
return v.Convert(t)
|
||||
}
|
||||
|
||||
func rt2id(rt reflect.Type) uintptr {
|
||||
return rv4i(rt).Pointer()
|
||||
}
|
||||
|
||||
func i2rtid(i interface{}) uintptr {
|
||||
return rv4i(reflect.TypeOf(i)).Pointer()
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
|
||||
func isEmptyValue(v reflect.Value, tinfos *TypeInfos, deref, checkStruct bool) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.Invalid:
|
||||
return true
|
||||
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
|
||||
return v.Len() == 0
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.Interface, reflect.Ptr:
|
||||
if deref {
|
||||
if v.IsNil() {
|
||||
return true
|
||||
}
|
||||
return isEmptyValue(v.Elem(), tinfos, deref, checkStruct)
|
||||
}
|
||||
return v.IsNil()
|
||||
case reflect.Struct:
|
||||
return isEmptyStruct(v, tinfos, deref, checkStruct)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
type atomicClsErr struct {
|
||||
v atomic.Value
|
||||
}
|
||||
|
||||
func (x *atomicClsErr) load() (e clsErr) {
|
||||
if i := x.v.Load(); i != nil {
|
||||
e = i.(clsErr)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicClsErr) store(p clsErr) {
|
||||
x.v.Store(p)
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
type atomicTypeInfoSlice struct { // expected to be 2 words
|
||||
v atomic.Value
|
||||
}
|
||||
|
||||
func (x *atomicTypeInfoSlice) load() (e []rtid2ti) {
|
||||
if i := x.v.Load(); i != nil {
|
||||
e = i.([]rtid2ti)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicTypeInfoSlice) store(p []rtid2ti) {
|
||||
x.v.Store(p)
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
type atomicRtidFnSlice struct { // expected to be 2 words
|
||||
v atomic.Value
|
||||
}
|
||||
|
||||
func (x *atomicRtidFnSlice) load() (e []codecRtidFn) {
|
||||
if i := x.v.Load(); i != nil {
|
||||
e = i.([]codecRtidFn)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicRtidFnSlice) store(p []codecRtidFn) {
|
||||
x.v.Store(p)
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
func (n *fauxUnion) ru() reflect.Value {
|
||||
return rv4i(&n.u).Elem()
|
||||
}
|
||||
func (n *fauxUnion) ri() reflect.Value {
|
||||
return rv4i(&n.i).Elem()
|
||||
}
|
||||
func (n *fauxUnion) rf() reflect.Value {
|
||||
return rv4i(&n.f).Elem()
|
||||
}
|
||||
func (n *fauxUnion) rl() reflect.Value {
|
||||
return rv4i(&n.l).Elem()
|
||||
}
|
||||
func (n *fauxUnion) rs() reflect.Value {
|
||||
return rv4i(&n.s).Elem()
|
||||
}
|
||||
func (n *fauxUnion) rt() reflect.Value {
|
||||
return rv4i(&n.t).Elem()
|
||||
}
|
||||
func (n *fauxUnion) rb() reflect.Value {
|
||||
return rv4i(&n.b).Elem()
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
func rvSetBytes(rv reflect.Value, v []byte) {
|
||||
rv.SetBytes(v)
|
||||
}
|
||||
|
||||
func rvSetString(rv reflect.Value, v string) {
|
||||
rv.SetString(v)
|
||||
}
|
||||
|
||||
func rvSetBool(rv reflect.Value, v bool) {
|
||||
rv.SetBool(v)
|
||||
}
|
||||
|
||||
func rvSetTime(rv reflect.Value, v time.Time) {
|
||||
rv.Set(rv4i(v))
|
||||
}
|
||||
|
||||
func rvSetFloat32(rv reflect.Value, v float32) {
|
||||
rv.SetFloat(float64(v))
|
||||
}
|
||||
|
||||
func rvSetFloat64(rv reflect.Value, v float64) {
|
||||
rv.SetFloat(v)
|
||||
}
|
||||
|
||||
func rvSetInt(rv reflect.Value, v int) {
|
||||
rv.SetInt(int64(v))
|
||||
}
|
||||
|
||||
func rvSetInt8(rv reflect.Value, v int8) {
|
||||
rv.SetInt(int64(v))
|
||||
}
|
||||
|
||||
func rvSetInt16(rv reflect.Value, v int16) {
|
||||
rv.SetInt(int64(v))
|
||||
}
|
||||
|
||||
func rvSetInt32(rv reflect.Value, v int32) {
|
||||
rv.SetInt(int64(v))
|
||||
}
|
||||
|
||||
func rvSetInt64(rv reflect.Value, v int64) {
|
||||
rv.SetInt(v)
|
||||
}
|
||||
|
||||
func rvSetUint(rv reflect.Value, v uint) {
|
||||
rv.SetUint(uint64(v))
|
||||
}
|
||||
|
||||
func rvSetUintptr(rv reflect.Value, v uintptr) {
|
||||
rv.SetUint(uint64(v))
|
||||
}
|
||||
|
||||
func rvSetUint8(rv reflect.Value, v uint8) {
|
||||
rv.SetUint(uint64(v))
|
||||
}
|
||||
|
||||
func rvSetUint16(rv reflect.Value, v uint16) {
|
||||
rv.SetUint(uint64(v))
|
||||
}
|
||||
|
||||
func rvSetUint32(rv reflect.Value, v uint32) {
|
||||
rv.SetUint(uint64(v))
|
||||
}
|
||||
|
||||
func rvSetUint64(rv reflect.Value, v uint64) {
|
||||
rv.SetUint(v)
|
||||
}
|
||||
|
||||
// ----------------
|
||||
|
||||
// rvSetDirect is rv.Set for all kinds except reflect.Interface
|
||||
func rvSetDirect(rv reflect.Value, v reflect.Value) {
|
||||
rv.Set(v)
|
||||
}
|
||||
|
||||
// rvSlice returns a slice of the slice of lenth
|
||||
func rvSlice(rv reflect.Value, length int) reflect.Value {
|
||||
return rv.Slice(0, length)
|
||||
}
|
||||
|
||||
// ----------------
|
||||
|
||||
func rvSliceIndex(rv reflect.Value, i int, ti *typeInfo) reflect.Value {
|
||||
return rv.Index(i)
|
||||
}
|
||||
|
||||
func rvGetSliceLen(rv reflect.Value) int {
|
||||
return rv.Len()
|
||||
}
|
||||
|
||||
func rvGetSliceCap(rv reflect.Value) int {
|
||||
return rv.Cap()
|
||||
}
|
||||
|
||||
func rvGetArrayBytesRO(rv reflect.Value, scratch []byte) (bs []byte) {
|
||||
l := rv.Len()
|
||||
if rv.CanAddr() {
|
||||
return rvGetBytes(rv.Slice(0, l))
|
||||
}
|
||||
|
||||
if l <= cap(scratch) {
|
||||
bs = scratch[:l]
|
||||
} else {
|
||||
bs = make([]byte, l)
|
||||
}
|
||||
reflect.Copy(rv4i(bs), rv)
|
||||
return
|
||||
}
|
||||
|
||||
func rvGetArray4Slice(rv reflect.Value) (v reflect.Value) {
|
||||
v = rvZeroAddrK(reflectArrayOf(rvGetSliceLen(rv), rv.Type().Elem()), reflect.Array)
|
||||
reflect.Copy(v, rv)
|
||||
return
|
||||
}
|
||||
|
||||
func rvGetSlice4Array(rv reflect.Value, tslice reflect.Type) (v reflect.Value) {
|
||||
return rv.Slice(0, rv.Len())
|
||||
}
|
||||
|
||||
func rvCopySlice(dest, src reflect.Value) {
|
||||
reflect.Copy(dest, src)
|
||||
}
|
||||
|
||||
// ------------
|
||||
|
||||
func rvGetBool(rv reflect.Value) bool {
|
||||
return rv.Bool()
|
||||
}
|
||||
|
||||
func rvGetBytes(rv reflect.Value) []byte {
|
||||
return rv.Bytes()
|
||||
}
|
||||
|
||||
func rvGetTime(rv reflect.Value) time.Time {
|
||||
return rv2i(rv).(time.Time)
|
||||
}
|
||||
|
||||
func rvGetString(rv reflect.Value) string {
|
||||
return rv.String()
|
||||
}
|
||||
|
||||
func rvGetFloat64(rv reflect.Value) float64 {
|
||||
return rv.Float()
|
||||
}
|
||||
|
||||
func rvGetFloat32(rv reflect.Value) float32 {
|
||||
return float32(rv.Float())
|
||||
}
|
||||
|
||||
func rvGetInt(rv reflect.Value) int {
|
||||
return int(rv.Int())
|
||||
}
|
||||
|
||||
func rvGetInt8(rv reflect.Value) int8 {
|
||||
return int8(rv.Int())
|
||||
}
|
||||
|
||||
func rvGetInt16(rv reflect.Value) int16 {
|
||||
return int16(rv.Int())
|
||||
}
|
||||
|
||||
func rvGetInt32(rv reflect.Value) int32 {
|
||||
return int32(rv.Int())
|
||||
}
|
||||
|
||||
func rvGetInt64(rv reflect.Value) int64 {
|
||||
return rv.Int()
|
||||
}
|
||||
|
||||
func rvGetUint(rv reflect.Value) uint {
|
||||
return uint(rv.Uint())
|
||||
}
|
||||
|
||||
func rvGetUint8(rv reflect.Value) uint8 {
|
||||
return uint8(rv.Uint())
|
||||
}
|
||||
|
||||
func rvGetUint16(rv reflect.Value) uint16 {
|
||||
return uint16(rv.Uint())
|
||||
}
|
||||
|
||||
func rvGetUint32(rv reflect.Value) uint32 {
|
||||
return uint32(rv.Uint())
|
||||
}
|
||||
|
||||
func rvGetUint64(rv reflect.Value) uint64 {
|
||||
return rv.Uint()
|
||||
}
|
||||
|
||||
func rvGetUintptr(rv reflect.Value) uintptr {
|
||||
return uintptr(rv.Uint())
|
||||
}
|
||||
|
||||
// ------------ map range and map indexing ----------
|
||||
|
||||
func mapGet(m, k, v reflect.Value) (vv reflect.Value) {
|
||||
return m.MapIndex(k)
|
||||
}
|
||||
|
||||
func mapSet(m, k, v reflect.Value) {
|
||||
m.SetMapIndex(k, v)
|
||||
}
|
||||
|
||||
func mapDelete(m, k reflect.Value) {
|
||||
m.SetMapIndex(k, reflect.Value{})
|
||||
}
|
||||
|
||||
// return an addressable reflect value that can be used in mapRange and mapGet operations.
|
||||
//
|
||||
// all calls to mapGet or mapRange will call here to get an addressable reflect.Value.
|
||||
func mapAddressableRV(t reflect.Type, k reflect.Kind) (r reflect.Value) {
|
||||
return // reflect.New(t).Elem()
|
||||
}
|
||||
|
||||
// ---------- ENCODER optimized ---------------
|
||||
|
||||
func (e *Encoder) jsondriver() *jsonEncDriver {
|
||||
return e.e.(*jsonEncDriver)
|
||||
}
|
||||
|
||||
// ---------- DECODER optimized ---------------
|
||||
|
||||
func (d *Decoder) checkBreak() bool {
|
||||
return d.d.CheckBreak()
|
||||
}
|
||||
|
||||
func (d *Decoder) jsondriver() *jsonDecDriver {
|
||||
return d.d.(*jsonDecDriver)
|
||||
}
|
||||
56
vendor/github.com/ugorji/go/codec/helper_test.go
generated
vendored
56
vendor/github.com/ugorji/go/codec/helper_test.go
generated
vendored
@@ -1,56 +0,0 @@
|
||||
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// All non-std package dependencies related to testing live in this file,
|
||||
// so porting to different environment is easy (just update functions).
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// --- these functions are used by both benchmarks and tests
|
||||
|
||||
var errDeepEqualNotMatch = errors.New("Not Match")
|
||||
|
||||
func deepEqual(v1, v2 interface{}) (err error) {
|
||||
if !reflect.DeepEqual(v1, v2) {
|
||||
err = errDeepEqualNotMatch
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func approxDataSize(rv reflect.Value) (sum int) {
|
||||
switch rk := rv.Kind(); rk {
|
||||
case reflect.Invalid:
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
sum += int(rv.Type().Size())
|
||||
sum += approxDataSize(rv.Elem())
|
||||
case reflect.Slice:
|
||||
sum += int(rv.Type().Size())
|
||||
for j := 0; j < rv.Len(); j++ {
|
||||
sum += approxDataSize(rv.Index(j))
|
||||
}
|
||||
case reflect.String:
|
||||
sum += int(rv.Type().Size())
|
||||
sum += rv.Len()
|
||||
case reflect.Map:
|
||||
sum += int(rv.Type().Size())
|
||||
for _, mk := range rv.MapKeys() {
|
||||
sum += approxDataSize(mk)
|
||||
sum += approxDataSize(rv.MapIndex(mk))
|
||||
}
|
||||
case reflect.Struct:
|
||||
//struct size already includes the full data size.
|
||||
//sum += int(rv.Type().Size())
|
||||
for j := 0; j < rv.NumField(); j++ {
|
||||
sum += approxDataSize(rv.Field(j))
|
||||
}
|
||||
default:
|
||||
//pure value types
|
||||
sum += int(rv.Type().Size())
|
||||
}
|
||||
return
|
||||
}
|
||||
867
vendor/github.com/ugorji/go/codec/helper_unsafe.go
generated
vendored
867
vendor/github.com/ugorji/go/codec/helper_unsafe.go
generated
vendored
@@ -1,867 +0,0 @@
|
||||
// +build !safe
|
||||
// +build !appengine
|
||||
// +build go1.7
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// This file has unsafe variants of some helper methods.
|
||||
// NOTE: See helper_not_unsafe.go for the usage information.
|
||||
|
||||
// For reflect.Value code, we decided to do the following:
|
||||
// - if we know the kind, we can elide conditional checks for
|
||||
// - SetXXX (Int, Uint, String, Bool, etc)
|
||||
// - SetLen
|
||||
//
|
||||
// We can also optimize
|
||||
// - IsNil
|
||||
|
||||
const safeMode = false
|
||||
|
||||
// keep in sync with GO_ROOT/src/reflect/value.go
|
||||
const (
|
||||
unsafeFlagIndir = 1 << 7
|
||||
unsafeFlagAddr = 1 << 8
|
||||
unsafeFlagKindMask = (1 << 5) - 1 // 5 bits for 27 kinds (up to 31)
|
||||
// unsafeTypeKindDirectIface = 1 << 5
|
||||
)
|
||||
|
||||
type unsafeString struct {
|
||||
Data unsafe.Pointer
|
||||
Len int
|
||||
}
|
||||
|
||||
type unsafeSlice struct {
|
||||
Data unsafe.Pointer
|
||||
Len int
|
||||
Cap int
|
||||
}
|
||||
|
||||
type unsafeIntf struct {
|
||||
typ unsafe.Pointer
|
||||
word unsafe.Pointer
|
||||
}
|
||||
|
||||
type unsafeReflectValue struct {
|
||||
typ unsafe.Pointer
|
||||
ptr unsafe.Pointer
|
||||
flag uintptr
|
||||
}
|
||||
|
||||
func stringView(v []byte) string {
|
||||
if len(v) == 0 {
|
||||
return ""
|
||||
}
|
||||
bx := (*unsafeSlice)(unsafe.Pointer(&v))
|
||||
return *(*string)(unsafe.Pointer(&unsafeString{bx.Data, bx.Len}))
|
||||
}
|
||||
|
||||
func bytesView(v string) []byte {
|
||||
if len(v) == 0 {
|
||||
return zeroByteSlice
|
||||
}
|
||||
sx := (*unsafeString)(unsafe.Pointer(&v))
|
||||
return *(*[]byte)(unsafe.Pointer(&unsafeSlice{sx.Data, sx.Len, sx.Len}))
|
||||
}
|
||||
|
||||
// // isNilRef says whether the interface is a nil reference or not.
|
||||
// //
|
||||
// // A reference here is a pointer-sized reference i.e. map, ptr, chan, func, unsafepointer.
|
||||
// // It is optional to extend this to also check if slices or interfaces are nil also.
|
||||
// //
|
||||
// // NOTE: There is no global way of checking if an interface is nil.
|
||||
// // For true references (map, ptr, func, chan), you can just look
|
||||
// // at the word of the interface.
|
||||
// // However, for slices, you have to dereference
|
||||
// // the word, and get a pointer to the 3-word interface value.
|
||||
// func isNilRef(v interface{}) (rv reflect.Value, isnil bool) {
|
||||
// isnil = ((*unsafeIntf)(unsafe.Pointer(&v))).word == nil
|
||||
// return
|
||||
// }
|
||||
|
||||
func isNil(v interface{}) (rv reflect.Value, isnil bool) {
|
||||
var ui = (*unsafeIntf)(unsafe.Pointer(&v))
|
||||
if ui.word == nil {
|
||||
isnil = true
|
||||
return
|
||||
}
|
||||
rv = rv4i(v) // reflect.value is cheap and inline'able
|
||||
tk := rv.Kind()
|
||||
isnil = (tk == reflect.Interface || tk == reflect.Slice) && *(*unsafe.Pointer)(ui.word) == nil
|
||||
return
|
||||
}
|
||||
|
||||
func rv2ptr(urv *unsafeReflectValue) (ptr unsafe.Pointer) {
|
||||
// true references (map, func, chan, ptr - NOT slice) may be double-referenced? as flagIndir
|
||||
if refBitset.isset(byte(urv.flag&unsafeFlagKindMask)) && urv.flag&unsafeFlagIndir != 0 {
|
||||
ptr = *(*unsafe.Pointer)(urv.ptr)
|
||||
} else {
|
||||
ptr = urv.ptr
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func rv4i(i interface{}) (rv reflect.Value) {
|
||||
// Unfortunately, we cannot get the "kind" of the interface directly here.
|
||||
// We need the 'rtype', whose structure changes in different go versions.
|
||||
// Finally, it's not clear that there is benefit to reimplementing it,
|
||||
// as the "escapes(i)" is not clearly expensive since we want i to exist on the heap.
|
||||
|
||||
return reflect.ValueOf(i)
|
||||
}
|
||||
|
||||
func rv2i(rv reflect.Value) interface{} {
|
||||
// We tap into implememtation details from
|
||||
// the source go stdlib reflect/value.go, and trims the implementation.
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*interface{})(unsafe.Pointer(&unsafeIntf{typ: urv.typ, word: rv2ptr(urv)}))
|
||||
}
|
||||
|
||||
func rvIsNil(rv reflect.Value) bool {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
if urv.flag&unsafeFlagIndir != 0 {
|
||||
return *(*unsafe.Pointer)(urv.ptr) == nil
|
||||
}
|
||||
return urv.ptr == nil
|
||||
}
|
||||
|
||||
func rvSetSliceLen(rv reflect.Value, length int) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
(*unsafeString)(urv.ptr).Len = length
|
||||
}
|
||||
|
||||
func rvZeroAddrK(t reflect.Type, k reflect.Kind) (rv reflect.Value) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
urv.flag = uintptr(k) | unsafeFlagIndir | unsafeFlagAddr
|
||||
urv.typ = ((*unsafeIntf)(unsafe.Pointer(&t))).word
|
||||
urv.ptr = unsafe_New(urv.typ)
|
||||
return
|
||||
}
|
||||
|
||||
func rvConvert(v reflect.Value, t reflect.Type) (rv reflect.Value) {
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*urv = *uv
|
||||
urv.typ = ((*unsafeIntf)(unsafe.Pointer(&t))).word
|
||||
return
|
||||
}
|
||||
|
||||
func rt2id(rt reflect.Type) uintptr {
|
||||
return uintptr(((*unsafeIntf)(unsafe.Pointer(&rt))).word)
|
||||
}
|
||||
|
||||
func i2rtid(i interface{}) uintptr {
|
||||
return uintptr(((*unsafeIntf)(unsafe.Pointer(&i))).typ)
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
|
||||
func isEmptyValue(v reflect.Value, tinfos *TypeInfos, deref, checkStruct bool) bool {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
if urv.flag == 0 {
|
||||
return true
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Invalid:
|
||||
return true
|
||||
case reflect.String:
|
||||
return (*unsafeString)(urv.ptr).Len == 0
|
||||
case reflect.Slice:
|
||||
return (*unsafeSlice)(urv.ptr).Len == 0
|
||||
case reflect.Bool:
|
||||
return !*(*bool)(urv.ptr)
|
||||
case reflect.Int:
|
||||
return *(*int)(urv.ptr) == 0
|
||||
case reflect.Int8:
|
||||
return *(*int8)(urv.ptr) == 0
|
||||
case reflect.Int16:
|
||||
return *(*int16)(urv.ptr) == 0
|
||||
case reflect.Int32:
|
||||
return *(*int32)(urv.ptr) == 0
|
||||
case reflect.Int64:
|
||||
return *(*int64)(urv.ptr) == 0
|
||||
case reflect.Uint:
|
||||
return *(*uint)(urv.ptr) == 0
|
||||
case reflect.Uint8:
|
||||
return *(*uint8)(urv.ptr) == 0
|
||||
case reflect.Uint16:
|
||||
return *(*uint16)(urv.ptr) == 0
|
||||
case reflect.Uint32:
|
||||
return *(*uint32)(urv.ptr) == 0
|
||||
case reflect.Uint64:
|
||||
return *(*uint64)(urv.ptr) == 0
|
||||
case reflect.Uintptr:
|
||||
return *(*uintptr)(urv.ptr) == 0
|
||||
case reflect.Float32:
|
||||
return *(*float32)(urv.ptr) == 0
|
||||
case reflect.Float64:
|
||||
return *(*float64)(urv.ptr) == 0
|
||||
case reflect.Interface:
|
||||
isnil := urv.ptr == nil || *(*unsafe.Pointer)(urv.ptr) == nil
|
||||
if deref {
|
||||
if isnil {
|
||||
return true
|
||||
}
|
||||
return isEmptyValue(v.Elem(), tinfos, deref, checkStruct)
|
||||
}
|
||||
return isnil
|
||||
case reflect.Ptr:
|
||||
// isnil := urv.ptr == nil // (not sufficient, as a pointer value encodes the type)
|
||||
isnil := urv.ptr == nil || *(*unsafe.Pointer)(urv.ptr) == nil
|
||||
if deref {
|
||||
if isnil {
|
||||
return true
|
||||
}
|
||||
return isEmptyValue(v.Elem(), tinfos, deref, checkStruct)
|
||||
}
|
||||
return isnil
|
||||
case reflect.Struct:
|
||||
return isEmptyStruct(v, tinfos, deref, checkStruct)
|
||||
case reflect.Map, reflect.Array, reflect.Chan:
|
||||
return v.Len() == 0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
|
||||
// atomicXXX is expected to be 2 words (for symmetry with atomic.Value)
|
||||
//
|
||||
// Note that we do not atomically load/store length and data pointer separately,
|
||||
// as this could lead to some races. Instead, we atomically load/store cappedSlice.
|
||||
//
|
||||
// Note: with atomic.(Load|Store)Pointer, we MUST work with an unsafe.Pointer directly.
|
||||
|
||||
// ----------------------
|
||||
type atomicTypeInfoSlice struct {
|
||||
v unsafe.Pointer // *[]rtid2ti
|
||||
_ uint64 // padding (atomicXXX expected to be 2 words)
|
||||
}
|
||||
|
||||
func (x *atomicTypeInfoSlice) load() (s []rtid2ti) {
|
||||
x2 := atomic.LoadPointer(&x.v)
|
||||
if x2 != nil {
|
||||
s = *(*[]rtid2ti)(x2)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicTypeInfoSlice) store(p []rtid2ti) {
|
||||
atomic.StorePointer(&x.v, unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
type atomicRtidFnSlice struct {
|
||||
v unsafe.Pointer // *[]codecRtidFn
|
||||
_ uint64 // padding (atomicXXX expected to be 2 words) (make 1 word so JsonHandle fits)
|
||||
}
|
||||
|
||||
func (x *atomicRtidFnSlice) load() (s []codecRtidFn) {
|
||||
x2 := atomic.LoadPointer(&x.v)
|
||||
if x2 != nil {
|
||||
s = *(*[]codecRtidFn)(x2)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicRtidFnSlice) store(p []codecRtidFn) {
|
||||
atomic.StorePointer(&x.v, unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
type atomicClsErr struct {
|
||||
v unsafe.Pointer // *clsErr
|
||||
_ uint64 // padding (atomicXXX expected to be 2 words)
|
||||
}
|
||||
|
||||
func (x *atomicClsErr) load() (e clsErr) {
|
||||
x2 := (*clsErr)(atomic.LoadPointer(&x.v))
|
||||
if x2 != nil {
|
||||
e = *x2
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (x *atomicClsErr) store(p clsErr) {
|
||||
atomic.StorePointer(&x.v, unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
|
||||
// to create a reflect.Value for each member field of fauxUnion,
|
||||
// we first create a global fauxUnion, and create reflect.Value
|
||||
// for them all.
|
||||
// This way, we have the flags and type in the reflect.Value.
|
||||
// Then, when a reflect.Value is called, we just copy it,
|
||||
// update the ptr to the fauxUnion's, and return it.
|
||||
|
||||
type unsafeDecNakedWrapper struct {
|
||||
fauxUnion
|
||||
ru, ri, rf, rl, rs, rb, rt reflect.Value // mapping to the primitives above
|
||||
}
|
||||
|
||||
func (n *unsafeDecNakedWrapper) init() {
|
||||
n.ru = rv4i(&n.u).Elem()
|
||||
n.ri = rv4i(&n.i).Elem()
|
||||
n.rf = rv4i(&n.f).Elem()
|
||||
n.rl = rv4i(&n.l).Elem()
|
||||
n.rs = rv4i(&n.s).Elem()
|
||||
n.rt = rv4i(&n.t).Elem()
|
||||
n.rb = rv4i(&n.b).Elem()
|
||||
// n.rr[] = rv4i(&n.)
|
||||
}
|
||||
|
||||
var defUnsafeDecNakedWrapper unsafeDecNakedWrapper
|
||||
|
||||
func init() {
|
||||
defUnsafeDecNakedWrapper.init()
|
||||
}
|
||||
|
||||
func (n *fauxUnion) ru() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.ru
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.u)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) ri() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.ri
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.i)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) rf() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.rf
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.f)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) rl() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.rl
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.l)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) rs() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.rs
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.s)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) rt() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.rt
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.t)
|
||||
return
|
||||
}
|
||||
func (n *fauxUnion) rb() (v reflect.Value) {
|
||||
v = defUnsafeDecNakedWrapper.rb
|
||||
((*unsafeReflectValue)(unsafe.Pointer(&v))).ptr = unsafe.Pointer(&n.b)
|
||||
return
|
||||
}
|
||||
|
||||
// --------------------------
|
||||
func rvSetBytes(rv reflect.Value, v []byte) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*[]byte)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetString(rv reflect.Value, v string) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*string)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetBool(rv reflect.Value, v bool) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*bool)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetTime(rv reflect.Value, v time.Time) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*time.Time)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetFloat32(rv reflect.Value, v float32) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*float32)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetFloat64(rv reflect.Value, v float64) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*float64)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetInt(rv reflect.Value, v int) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*int)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetInt8(rv reflect.Value, v int8) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*int8)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetInt16(rv reflect.Value, v int16) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*int16)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetInt32(rv reflect.Value, v int32) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*int32)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetInt64(rv reflect.Value, v int64) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*int64)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUint(rv reflect.Value, v uint) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uint)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUintptr(rv reflect.Value, v uintptr) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uintptr)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUint8(rv reflect.Value, v uint8) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uint8)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUint16(rv reflect.Value, v uint16) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uint16)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUint32(rv reflect.Value, v uint32) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uint32)(urv.ptr) = v
|
||||
}
|
||||
|
||||
func rvSetUint64(rv reflect.Value, v uint64) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
*(*uint64)(urv.ptr) = v
|
||||
}
|
||||
|
||||
// ----------------
|
||||
|
||||
// rvSetDirect is rv.Set for all kinds except reflect.Interface
|
||||
func rvSetDirect(rv reflect.Value, v reflect.Value) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
if uv.flag&unsafeFlagIndir == 0 {
|
||||
*(*unsafe.Pointer)(urv.ptr) = uv.ptr
|
||||
} else {
|
||||
typedmemmove(urv.typ, urv.ptr, uv.ptr)
|
||||
}
|
||||
}
|
||||
|
||||
// rvSlice returns a slice of the slice of lenth
|
||||
func rvSlice(rv reflect.Value, length int) (v reflect.Value) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
*uv = *urv
|
||||
var x []unsafe.Pointer
|
||||
uv.ptr = unsafe.Pointer(&x)
|
||||
*(*unsafeSlice)(uv.ptr) = *(*unsafeSlice)(urv.ptr)
|
||||
(*unsafeSlice)(uv.ptr).Len = length
|
||||
return
|
||||
}
|
||||
|
||||
// ------------
|
||||
|
||||
func rvSliceIndex(rv reflect.Value, i int, ti *typeInfo) (v reflect.Value) {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
uv.ptr = unsafe.Pointer(uintptr(((*unsafeSlice)(urv.ptr)).Data) + (ti.elemsize * uintptr(i)))
|
||||
uv.typ = ((*unsafeIntf)(unsafe.Pointer(&ti.elem))).word
|
||||
uv.flag = uintptr(ti.elemkind) | unsafeFlagIndir | unsafeFlagAddr
|
||||
return
|
||||
}
|
||||
|
||||
func rvGetSliceLen(rv reflect.Value) int {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return (*unsafeSlice)(urv.ptr).Len
|
||||
}
|
||||
|
||||
func rvGetSliceCap(rv reflect.Value) int {
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return (*unsafeSlice)(urv.ptr).Cap
|
||||
}
|
||||
|
||||
func rvGetArrayBytesRO(rv reflect.Value, scratch []byte) (bs []byte) {
|
||||
l := rv.Len()
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
bx := (*unsafeSlice)(unsafe.Pointer(&bs))
|
||||
bx.Data = urv.ptr
|
||||
bx.Len, bx.Cap = l, l
|
||||
return
|
||||
}
|
||||
|
||||
func rvGetArray4Slice(rv reflect.Value) (v reflect.Value) {
|
||||
// It is possible that this slice is based off an array with a larger
|
||||
// len that we want (where array len == slice cap).
|
||||
// However, it is ok to create an array type that is a subset of the full
|
||||
// e.g. full slice is based off a *[16]byte, but we can create a *[4]byte
|
||||
// off of it. That is ok.
|
||||
//
|
||||
// Consequently, we use rvGetSliceLen, not rvGetSliceCap.
|
||||
|
||||
t := reflectArrayOf(rvGetSliceLen(rv), rv.Type().Elem())
|
||||
// v = rvZeroAddrK(t, reflect.Array)
|
||||
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
uv.flag = uintptr(reflect.Array) | unsafeFlagIndir | unsafeFlagAddr
|
||||
uv.typ = ((*unsafeIntf)(unsafe.Pointer(&t))).word
|
||||
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
uv.ptr = *(*unsafe.Pointer)(urv.ptr) // slice rv has a ptr to the slice.
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func rvGetSlice4Array(rv reflect.Value, tslice reflect.Type) (v reflect.Value) {
|
||||
uv := (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
|
||||
var x []unsafe.Pointer
|
||||
|
||||
uv.ptr = unsafe.Pointer(&x)
|
||||
uv.typ = ((*unsafeIntf)(unsafe.Pointer(&tslice))).word
|
||||
uv.flag = unsafeFlagIndir | uintptr(reflect.Slice)
|
||||
|
||||
s := (*unsafeSlice)(uv.ptr)
|
||||
s.Data = ((*unsafeReflectValue)(unsafe.Pointer(&rv))).ptr
|
||||
s.Len = rv.Len()
|
||||
s.Cap = s.Len
|
||||
return
|
||||
}
|
||||
|
||||
func rvCopySlice(dest, src reflect.Value) {
|
||||
t := dest.Type().Elem()
|
||||
urv := (*unsafeReflectValue)(unsafe.Pointer(&dest))
|
||||
destPtr := urv.ptr
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&src))
|
||||
typedslicecopy((*unsafeIntf)(unsafe.Pointer(&t)).word,
|
||||
*(*unsafeSlice)(destPtr), *(*unsafeSlice)(urv.ptr))
|
||||
}
|
||||
|
||||
// ------------
|
||||
|
||||
func rvGetBool(rv reflect.Value) bool {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*bool)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetBytes(rv reflect.Value) []byte {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*[]byte)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetTime(rv reflect.Value) time.Time {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*time.Time)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetString(rv reflect.Value) string {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*string)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetFloat64(rv reflect.Value) float64 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*float64)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetFloat32(rv reflect.Value) float32 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*float32)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetInt(rv reflect.Value) int {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*int)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetInt8(rv reflect.Value) int8 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*int8)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetInt16(rv reflect.Value) int16 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*int16)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetInt32(rv reflect.Value) int32 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*int32)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetInt64(rv reflect.Value) int64 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*int64)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUint(rv reflect.Value) uint {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uint)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUint8(rv reflect.Value) uint8 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uint8)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUint16(rv reflect.Value) uint16 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uint16)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUint32(rv reflect.Value) uint32 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uint32)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUint64(rv reflect.Value) uint64 {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uint64)(v.ptr)
|
||||
}
|
||||
|
||||
func rvGetUintptr(rv reflect.Value) uintptr {
|
||||
v := (*unsafeReflectValue)(unsafe.Pointer(&rv))
|
||||
return *(*uintptr)(v.ptr)
|
||||
}
|
||||
|
||||
// ------------ map range and map indexing ----------
|
||||
|
||||
// regular calls to map via reflection: MapKeys, MapIndex, MapRange/MapIter etc
|
||||
// will always allocate for each map key or value.
|
||||
//
|
||||
// It is more performant to provide a value that the map entry is set into,
|
||||
// and that elides the allocation.
|
||||
|
||||
// unsafeMapHashIter
|
||||
//
|
||||
// go 1.4+ has runtime/hashmap.go or runtime/map.go which has a
|
||||
// hIter struct with the first 2 values being key and value
|
||||
// of the current iteration.
|
||||
//
|
||||
// This *hIter is passed to mapiterinit, mapiternext, mapiterkey, mapiterelem.
|
||||
// We bypass the reflect wrapper functions and just use the *hIter directly.
|
||||
//
|
||||
// Though *hIter has many fields, we only care about the first 2.
|
||||
type unsafeMapHashIter struct {
|
||||
key, value unsafe.Pointer
|
||||
// other fields are ignored
|
||||
}
|
||||
|
||||
type mapIter struct {
|
||||
unsafeMapIter
|
||||
}
|
||||
|
||||
type unsafeMapIter struct {
|
||||
it *unsafeMapHashIter
|
||||
// k, v reflect.Value
|
||||
mtyp, ktyp, vtyp unsafe.Pointer
|
||||
mptr, kptr, vptr unsafe.Pointer
|
||||
kisref, visref bool
|
||||
mapvalues bool
|
||||
done bool
|
||||
started bool
|
||||
// _ [2]uint64 // padding (cache-aligned)
|
||||
}
|
||||
|
||||
func (t *unsafeMapIter) ValidKV() (r bool) {
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *unsafeMapIter) Next() (r bool) {
|
||||
if t == nil || t.done {
|
||||
return
|
||||
}
|
||||
if t.started {
|
||||
mapiternext((unsafe.Pointer)(t.it))
|
||||
} else {
|
||||
t.started = true
|
||||
}
|
||||
|
||||
t.done = t.it.key == nil
|
||||
if t.done {
|
||||
return
|
||||
}
|
||||
unsafeMapSet(t.kptr, t.ktyp, t.it.key, t.kisref)
|
||||
if t.mapvalues {
|
||||
unsafeMapSet(t.vptr, t.vtyp, t.it.value, t.visref)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (t *unsafeMapIter) Key() (r reflect.Value) {
|
||||
return
|
||||
}
|
||||
|
||||
func (t *unsafeMapIter) Value() (r reflect.Value) {
|
||||
return
|
||||
}
|
||||
|
||||
func (t *unsafeMapIter) Done() {
|
||||
}
|
||||
|
||||
func unsafeMapSet(p, ptyp, p2 unsafe.Pointer, isref bool) {
|
||||
if isref {
|
||||
*(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(p2) // p2
|
||||
} else {
|
||||
typedmemmove(ptyp, p, p2) // *(*unsafe.Pointer)(p2)) // p2)
|
||||
}
|
||||
}
|
||||
|
||||
func unsafeMapKVPtr(urv *unsafeReflectValue) unsafe.Pointer {
|
||||
if urv.flag&unsafeFlagIndir == 0 {
|
||||
return unsafe.Pointer(&urv.ptr)
|
||||
}
|
||||
return urv.ptr
|
||||
}
|
||||
|
||||
func mapRange(t *mapIter, m, k, v reflect.Value, mapvalues bool) {
|
||||
if rvIsNil(m) {
|
||||
t.done = true
|
||||
return
|
||||
}
|
||||
t.done = false
|
||||
t.started = false
|
||||
t.mapvalues = mapvalues
|
||||
|
||||
var urv *unsafeReflectValue
|
||||
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&m))
|
||||
t.mtyp = urv.typ
|
||||
t.mptr = rv2ptr(urv)
|
||||
|
||||
t.it = (*unsafeMapHashIter)(mapiterinit(t.mtyp, t.mptr))
|
||||
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&k))
|
||||
t.ktyp = urv.typ
|
||||
t.kptr = urv.ptr
|
||||
t.kisref = refBitset.isset(byte(k.Kind()))
|
||||
|
||||
if mapvalues {
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
t.vtyp = urv.typ
|
||||
t.vptr = urv.ptr
|
||||
t.visref = refBitset.isset(byte(v.Kind()))
|
||||
} else {
|
||||
t.vtyp = nil
|
||||
t.vptr = nil
|
||||
}
|
||||
}
|
||||
|
||||
func mapGet(m, k, v reflect.Value) (vv reflect.Value) {
|
||||
var urv = (*unsafeReflectValue)(unsafe.Pointer(&k))
|
||||
var kptr = unsafeMapKVPtr(urv)
|
||||
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&m))
|
||||
|
||||
vvptr := mapaccess(urv.typ, rv2ptr(urv), kptr)
|
||||
if vvptr == nil {
|
||||
return
|
||||
}
|
||||
// vvptr = *(*unsafe.Pointer)(vvptr)
|
||||
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
|
||||
unsafeMapSet(urv.ptr, urv.typ, vvptr, refBitset.isset(byte(v.Kind())))
|
||||
return v
|
||||
}
|
||||
|
||||
func mapSet(m, k, v reflect.Value) {
|
||||
var urv = (*unsafeReflectValue)(unsafe.Pointer(&k))
|
||||
var kptr = unsafeMapKVPtr(urv)
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&v))
|
||||
var vptr = unsafeMapKVPtr(urv)
|
||||
urv = (*unsafeReflectValue)(unsafe.Pointer(&m))
|
||||
mapassign(urv.typ, rv2ptr(urv), kptr, vptr)
|
||||
}
|
||||
|
||||
// func mapDelete(m, k reflect.Value) {
|
||||
// var urv = (*unsafeReflectValue)(unsafe.Pointer(&k))
|
||||
// var kptr = unsafeMapKVPtr(urv)
|
||||
// urv = (*unsafeReflectValue)(unsafe.Pointer(&m))
|
||||
// mapdelete(urv.typ, rv2ptr(urv), kptr)
|
||||
// }
|
||||
|
||||
// return an addressable reflect value that can be used in mapRange and mapGet operations.
|
||||
//
|
||||
// all calls to mapGet or mapRange will call here to get an addressable reflect.Value.
|
||||
func mapAddressableRV(t reflect.Type, k reflect.Kind) (r reflect.Value) {
|
||||
// return reflect.New(t).Elem()
|
||||
return rvZeroAddrK(t, k)
|
||||
}
|
||||
|
||||
//go:linkname mapiterinit reflect.mapiterinit
|
||||
//go:noescape
|
||||
func mapiterinit(typ unsafe.Pointer, it unsafe.Pointer) (key unsafe.Pointer)
|
||||
|
||||
//go:linkname mapiternext reflect.mapiternext
|
||||
//go:noescape
|
||||
func mapiternext(it unsafe.Pointer) (key unsafe.Pointer)
|
||||
|
||||
//go:linkname mapaccess reflect.mapaccess
|
||||
//go:noescape
|
||||
func mapaccess(typ unsafe.Pointer, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
|
||||
|
||||
//go:linkname mapassign reflect.mapassign
|
||||
//go:noescape
|
||||
func mapassign(typ unsafe.Pointer, m unsafe.Pointer, key, val unsafe.Pointer)
|
||||
|
||||
//go:linkname mapdelete reflect.mapdelete
|
||||
//go:noescape
|
||||
func mapdelete(typ unsafe.Pointer, m unsafe.Pointer, key unsafe.Pointer)
|
||||
|
||||
//go:linkname typedmemmove reflect.typedmemmove
|
||||
//go:noescape
|
||||
func typedmemmove(typ unsafe.Pointer, dst, src unsafe.Pointer)
|
||||
|
||||
//go:linkname unsafe_New reflect.unsafe_New
|
||||
//go:noescape
|
||||
func unsafe_New(typ unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname typedslicecopy reflect.typedslicecopy
|
||||
//go:noescape
|
||||
func typedslicecopy(elemType unsafe.Pointer, dst, src unsafeSlice) int
|
||||
|
||||
// ---------- ENCODER optimized ---------------
|
||||
|
||||
func (e *Encoder) jsondriver() *jsonEncDriver {
|
||||
return (*jsonEncDriver)((*unsafeIntf)(unsafe.Pointer(&e.e)).word)
|
||||
}
|
||||
|
||||
// ---------- DECODER optimized ---------------
|
||||
|
||||
func (d *Decoder) checkBreak() bool {
|
||||
// jsonDecDriver.CheckBreak() CANNOT be inlined.
|
||||
// Consequently, there's no benefit in incurring the cost of this
|
||||
// wrapping function checkBreak.
|
||||
//
|
||||
// It is faster to just call the interface method directly.
|
||||
|
||||
// if d.js {
|
||||
// return d.jsondriver().CheckBreak()
|
||||
// }
|
||||
// if d.cbor {
|
||||
// return d.cbordriver().CheckBreak()
|
||||
// }
|
||||
return d.d.CheckBreak()
|
||||
}
|
||||
|
||||
func (d *Decoder) jsondriver() *jsonDecDriver {
|
||||
return (*jsonDecDriver)((*unsafeIntf)(unsafe.Pointer(&d.d)).word)
|
||||
}
|
||||
|
||||
// func (d *Decoder) cbordriver() *cborDecDriver {
|
||||
// return (*cborDecDriver)((*unsafeIntf)(unsafe.Pointer(&d.d)).word)
|
||||
// }
|
||||
1530
vendor/github.com/ugorji/go/codec/json.go
generated
vendored
1530
vendor/github.com/ugorji/go/codec/json.go
generated
vendored
File diff suppressed because it is too large
Load Diff
162
vendor/github.com/ugorji/go/codec/mammoth-test.go.tmpl
generated
vendored
162
vendor/github.com/ugorji/go/codec/mammoth-test.go.tmpl
generated
vendored
@@ -1,162 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from mammoth-test.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
import "testing"
|
||||
import "fmt"
|
||||
|
||||
// TestMammoth has all the different paths optimized in fast-path
|
||||
// It has all the primitives, slices and maps.
|
||||
//
|
||||
// For each of those types, it has a pointer and a non-pointer field.
|
||||
|
||||
func init() { _ = fmt.Printf } // so we can include fmt as needed
|
||||
|
||||
type TestMammoth struct {
|
||||
|
||||
{{range .Values }}{{if .Primitive -}}
|
||||
{{ .MethodNamePfx "F" true }} {{ .Primitive }}
|
||||
{{ .MethodNamePfx "Fptr" true }} *{{ .Primitive }}
|
||||
{{end}}{{end}}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if not .MapKey -}}
|
||||
{{ .MethodNamePfx "F" false }} []{{ .Elem }}
|
||||
{{ .MethodNamePfx "Fptr" false }} *[]{{ .Elem }}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if .MapKey -}}
|
||||
{{ .MethodNamePfx "F" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||
{{ .MethodNamePfx "Fptr" false }} *map[{{ .MapKey }}]{{ .Elem }}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if not .MapKey -}}
|
||||
type {{ .MethodNamePfx "typMbs" false }} []{{ .Elem }}
|
||||
func (_ {{ .MethodNamePfx "typMbs" false }}) MapBySlice() { }
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if .MapKey -}}
|
||||
type {{ .MethodNamePfx "typMap" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
func doTestMammothSlices(t *testing.T, h Handle) {
|
||||
{{range $i, $e := .Values }}{{if not .Primitive }}{{if not .MapKey -}}
|
||||
var v{{$i}}va [8]{{ .Elem }}
|
||||
for _, v := range [][]{{ .Elem }}{ nil, {}, { {{ nonzerocmd .Elem }}, {{ zerocmd .Elem }}, {{ zerocmd .Elem }}, {{ nonzerocmd .Elem }} } } {
|
||||
{{/*
|
||||
// fmt.Printf(">>>> running mammoth slice v{{$i}}: %v\n", v)
|
||||
// - encode value to some []byte
|
||||
// - decode into a length-wise-equal []byte
|
||||
// - check if equal to initial slice
|
||||
// - encode ptr to the value
|
||||
// - check if encode bytes are same
|
||||
// - decode into ptrs to: nil, then 1-elem slice, equal-length, then large len slice
|
||||
// - decode into non-addressable slice of equal length, then larger len
|
||||
// - for each decode, compare elem-by-elem to the original slice
|
||||
// -
|
||||
// - rinse and repeat for a MapBySlice version
|
||||
// -
|
||||
*/ -}}
|
||||
var v{{$i}}v1, v{{$i}}v2 []{{ .Elem }}
|
||||
var bs{{$i}} []byte
|
||||
v{{$i}}v1 = v
|
||||
bs{{$i}} = testMarshalErr(v{{$i}}v1, h, t, "enc-slice-v{{$i}}")
|
||||
if v != nil {
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||
testUnmarshalErr(v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}")
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||
testUnmarshalErr(rv4i(v{{$i}}v2), bs{{$i}}, h, t, "dec-slice-v{{$i}}-noaddr") // non-addressable value
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-noaddr")
|
||||
}
|
||||
// ...
|
||||
bs{{$i}} = testMarshalErr(&v{{$i}}v1, h, t, "enc-slice-v{{$i}}-p")
|
||||
v{{$i}}v2 = nil
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p")
|
||||
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||
v{{$i}}v2 = v{{$i}}va[:1:1]
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-1")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-1")
|
||||
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||
v{{$i}}v2 = v{{$i}}va[:len(v{{$i}}v1):len(v{{$i}}v1)]
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-len")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-len")
|
||||
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||
v{{$i}}v2 = v{{$i}}va[:]
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-cap")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-cap")
|
||||
if len(v{{$i}}v1) > 1 {
|
||||
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||
testUnmarshalErr((&v{{$i}}va)[:len(v{{$i}}v1)], bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-len-noaddr")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}va[:len(v{{$i}}v1)], t, "equal-slice-v{{$i}}-p-len-noaddr")
|
||||
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||
testUnmarshalErr((&v{{$i}}va)[:], bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-cap-noaddr")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}va[:len(v{{$i}}v1)], t, "equal-slice-v{{$i}}-p-cap-noaddr")
|
||||
}
|
||||
// ...
|
||||
var v{{$i}}v3, v{{$i}}v4 {{ .MethodNamePfx "typMbs" false }}
|
||||
v{{$i}}v2 = nil
|
||||
if v != nil { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||
v{{$i}}v3 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v1)
|
||||
v{{$i}}v4 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v2)
|
||||
if v != nil {
|
||||
bs{{$i}} = testMarshalErr(v{{$i}}v3, h, t, "enc-slice-v{{$i}}-custom")
|
||||
testUnmarshalErr(v{{$i}}v4, bs{{$i}}, h, t, "dec-slice-v{{$i}}-custom")
|
||||
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-slice-v{{$i}}-custom")
|
||||
}
|
||||
bs{{$i}} = testMarshalErr(&v{{$i}}v3, h, t, "enc-slice-v{{$i}}-custom-p")
|
||||
v{{$i}}v2 = nil
|
||||
v{{$i}}v4 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v2)
|
||||
testUnmarshalErr(&v{{$i}}v4, bs{{$i}}, h, t, "dec-slice-v{{$i}}-custom-p")
|
||||
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-slice-v{{$i}}-custom-p")
|
||||
}
|
||||
{{end}}{{end}}{{end}}
|
||||
}
|
||||
|
||||
func doTestMammothMaps(t *testing.T, h Handle) {
|
||||
{{range $i, $e := .Values }}{{if not .Primitive }}{{if .MapKey -}}
|
||||
for _, v := range []map[{{ .MapKey }}]{{ .Elem }}{ nil, {}, { {{ nonzerocmd .MapKey }}:{{ zerocmd .Elem }} {{if ne "bool" .MapKey}}, {{ nonzerocmd .MapKey }}:{{ nonzerocmd .Elem }} {{end}} } } {
|
||||
// fmt.Printf(">>>> running mammoth map v{{$i}}: %v\n", v)
|
||||
var v{{$i}}v1, v{{$i}}v2 map[{{ .MapKey }}]{{ .Elem }}
|
||||
var bs{{$i}} []byte
|
||||
v{{$i}}v1 = v
|
||||
bs{{$i}} = testMarshalErr(v{{$i}}v1, h, t, "enc-map-v{{$i}}")
|
||||
if v != nil {
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||
testUnmarshalErr(v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}")
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||
testUnmarshalErr(rv4i(v{{$i}}v2), bs{{$i}}, h, t, "dec-map-v{{$i}}-noaddr") // decode into non-addressable map value
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-noaddr")
|
||||
}
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-len")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-p-len")
|
||||
bs{{$i}} = testMarshalErr(&v{{$i}}v1, h, t, "enc-map-v{{$i}}-p")
|
||||
v{{$i}}v2 = nil
|
||||
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-nil")
|
||||
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-p-nil")
|
||||
// ...
|
||||
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||
var v{{$i}}v3, v{{$i}}v4 {{ .MethodNamePfx "typMap" false }}
|
||||
v{{$i}}v3 = {{ .MethodNamePfx "typMap" false }}(v{{$i}}v1)
|
||||
v{{$i}}v4 = {{ .MethodNamePfx "typMap" false }}(v{{$i}}v2)
|
||||
if v != nil {
|
||||
bs{{$i}} = testMarshalErr(v{{$i}}v3, h, t, "enc-map-v{{$i}}-custom")
|
||||
testUnmarshalErr(v{{$i}}v4, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-len")
|
||||
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-map-v{{$i}}-p-len")
|
||||
}
|
||||
}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
}
|
||||
|
||||
func doTestMammothMapsAndSlices(t *testing.T, h Handle) {
|
||||
doTestMammothSlices(t, h)
|
||||
doTestMammothMaps(t, h)
|
||||
}
|
||||
94
vendor/github.com/ugorji/go/codec/mammoth2-test.go.tmpl
generated
vendored
94
vendor/github.com/ugorji/go/codec/mammoth2-test.go.tmpl
generated
vendored
@@ -1,94 +0,0 @@
|
||||
// +build !notfastpath
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from mammoth2-test.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
// Increase codecoverage by covering all the codecgen paths, in fast-path and gen-helper.go....
|
||||
//
|
||||
// Add:
|
||||
// - test file for creating a mammoth generated file as _mammoth_generated.go
|
||||
// - generate a second mammoth files in a different file: mammoth2_generated_test.go
|
||||
// - mammoth-test.go.tmpl will do this
|
||||
// - run codecgen on it, into mammoth2_codecgen_generated_test.go (no build tags)
|
||||
// - as part of TestMammoth, run it also
|
||||
// - this will cover all the codecgen, gen-helper, etc in one full run
|
||||
// - check in mammoth* files into github also
|
||||
// - then
|
||||
//
|
||||
// Now, add some types:
|
||||
// - some that implement BinaryMarshal, TextMarshal, JSONMarshal, and one that implements none of it
|
||||
// - create a wrapper type that includes TestMammoth2, with it in slices, and maps, and the custom types
|
||||
// - this wrapper object is what we work encode/decode (so that the codecgen methods are called)
|
||||
|
||||
|
||||
// import "encoding/binary"
|
||||
import "fmt"
|
||||
|
||||
type TestMammoth2 struct {
|
||||
|
||||
{{range .Values }}{{if .Primitive }}{{/*
|
||||
*/}}{{ .MethodNamePfx "F" true }} {{ .Primitive }}
|
||||
{{ .MethodNamePfx "Fptr" true }} *{{ .Primitive }}
|
||||
{{end}}{{end}}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if not .MapKey }}{{/*
|
||||
*/}}{{ .MethodNamePfx "F" false }} []{{ .Elem }}
|
||||
{{ .MethodNamePfx "Fptr" false }} *[]{{ .Elem }}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
{{range .Values }}{{if not .Primitive }}{{if .MapKey }}{{/*
|
||||
*/}}{{ .MethodNamePfx "F" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||
{{ .MethodNamePfx "Fptr" false }} *map[{{ .MapKey }}]{{ .Elem }}
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
}
|
||||
|
||||
// -----------
|
||||
|
||||
type testMammoth2Binary uint64
|
||||
func (x testMammoth2Binary) MarshalBinary() (data []byte, err error) {
|
||||
data = make([]byte, 8)
|
||||
bigen.PutUint64(data, uint64(x))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Binary) UnmarshalBinary(data []byte) (err error) {
|
||||
*x = testMammoth2Binary(bigen.Uint64(data))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Text uint64
|
||||
func (x testMammoth2Text) MarshalText() (data []byte, err error) {
|
||||
data = []byte(fmt.Sprintf("%b", uint64(x)))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Text) UnmarshalText(data []byte) (err error) {
|
||||
_, err = fmt.Sscanf(string(data), "%b", (*uint64)(x))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Json uint64
|
||||
func (x testMammoth2Json) MarshalJSON() (data []byte, err error) {
|
||||
data = []byte(fmt.Sprintf("%v", uint64(x)))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Json) UnmarshalJSON(data []byte) (err error) {
|
||||
_, err = fmt.Sscanf(string(data), "%v", (*uint64)(x))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Basic [4]uint64
|
||||
|
||||
type TestMammoth2Wrapper struct {
|
||||
V TestMammoth2
|
||||
T testMammoth2Text
|
||||
B testMammoth2Binary
|
||||
J testMammoth2Json
|
||||
C testMammoth2Basic
|
||||
M map[testMammoth2Basic]TestMammoth2
|
||||
L []TestMammoth2
|
||||
A [4]int64
|
||||
}
|
||||
9739
vendor/github.com/ugorji/go/codec/mammoth2_codecgen_generated_test.go
generated
vendored
9739
vendor/github.com/ugorji/go/codec/mammoth2_codecgen_generated_test.go
generated
vendored
File diff suppressed because it is too large
Load Diff
278
vendor/github.com/ugorji/go/codec/mammoth2_generated_test.go
generated
vendored
278
vendor/github.com/ugorji/go/codec/mammoth2_generated_test.go
generated
vendored
@@ -1,278 +0,0 @@
|
||||
// +build !notfastpath
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from mammoth2-test.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
// Increase codecoverage by covering all the codecgen paths, in fast-path and gen-helper.go....
|
||||
//
|
||||
// Add:
|
||||
// - test file for creating a mammoth generated file as _mammoth_generated.go
|
||||
// - generate a second mammoth files in a different file: mammoth2_generated_test.go
|
||||
// - mammoth-test.go.tmpl will do this
|
||||
// - run codecgen on it, into mammoth2_codecgen_generated_test.go (no build tags)
|
||||
// - as part of TestMammoth, run it also
|
||||
// - this will cover all the codecgen, gen-helper, etc in one full run
|
||||
// - check in mammoth* files into github also
|
||||
// - then
|
||||
//
|
||||
// Now, add some types:
|
||||
// - some that implement BinaryMarshal, TextMarshal, JSONMarshal, and one that implements none of it
|
||||
// - create a wrapper type that includes TestMammoth2, with it in slices, and maps, and the custom types
|
||||
// - this wrapper object is what we work encode/decode (so that the codecgen methods are called)
|
||||
|
||||
// import "encoding/binary"
|
||||
import "fmt"
|
||||
|
||||
type TestMammoth2 struct {
|
||||
FIntf interface{}
|
||||
FptrIntf *interface{}
|
||||
FString string
|
||||
FptrString *string
|
||||
FBytes []byte
|
||||
FptrBytes *[]byte
|
||||
FFloat32 float32
|
||||
FptrFloat32 *float32
|
||||
FFloat64 float64
|
||||
FptrFloat64 *float64
|
||||
FUint uint
|
||||
FptrUint *uint
|
||||
FUint8 uint8
|
||||
FptrUint8 *uint8
|
||||
FUint16 uint16
|
||||
FptrUint16 *uint16
|
||||
FUint32 uint32
|
||||
FptrUint32 *uint32
|
||||
FUint64 uint64
|
||||
FptrUint64 *uint64
|
||||
FUintptr uintptr
|
||||
FptrUintptr *uintptr
|
||||
FInt int
|
||||
FptrInt *int
|
||||
FInt8 int8
|
||||
FptrInt8 *int8
|
||||
FInt16 int16
|
||||
FptrInt16 *int16
|
||||
FInt32 int32
|
||||
FptrInt32 *int32
|
||||
FInt64 int64
|
||||
FptrInt64 *int64
|
||||
FBool bool
|
||||
FptrBool *bool
|
||||
|
||||
FSliceIntf []interface{}
|
||||
FptrSliceIntf *[]interface{}
|
||||
FSliceString []string
|
||||
FptrSliceString *[]string
|
||||
FSliceBytes [][]byte
|
||||
FptrSliceBytes *[][]byte
|
||||
FSliceFloat32 []float32
|
||||
FptrSliceFloat32 *[]float32
|
||||
FSliceFloat64 []float64
|
||||
FptrSliceFloat64 *[]float64
|
||||
FSliceUint []uint
|
||||
FptrSliceUint *[]uint
|
||||
FSliceUint16 []uint16
|
||||
FptrSliceUint16 *[]uint16
|
||||
FSliceUint32 []uint32
|
||||
FptrSliceUint32 *[]uint32
|
||||
FSliceUint64 []uint64
|
||||
FptrSliceUint64 *[]uint64
|
||||
FSliceInt []int
|
||||
FptrSliceInt *[]int
|
||||
FSliceInt8 []int8
|
||||
FptrSliceInt8 *[]int8
|
||||
FSliceInt16 []int16
|
||||
FptrSliceInt16 *[]int16
|
||||
FSliceInt32 []int32
|
||||
FptrSliceInt32 *[]int32
|
||||
FSliceInt64 []int64
|
||||
FptrSliceInt64 *[]int64
|
||||
FSliceBool []bool
|
||||
FptrSliceBool *[]bool
|
||||
|
||||
FMapStringIntf map[string]interface{}
|
||||
FptrMapStringIntf *map[string]interface{}
|
||||
FMapStringString map[string]string
|
||||
FptrMapStringString *map[string]string
|
||||
FMapStringBytes map[string][]byte
|
||||
FptrMapStringBytes *map[string][]byte
|
||||
FMapStringUint map[string]uint
|
||||
FptrMapStringUint *map[string]uint
|
||||
FMapStringUint8 map[string]uint8
|
||||
FptrMapStringUint8 *map[string]uint8
|
||||
FMapStringUint64 map[string]uint64
|
||||
FptrMapStringUint64 *map[string]uint64
|
||||
FMapStringInt map[string]int
|
||||
FptrMapStringInt *map[string]int
|
||||
FMapStringInt64 map[string]int64
|
||||
FptrMapStringInt64 *map[string]int64
|
||||
FMapStringFloat32 map[string]float32
|
||||
FptrMapStringFloat32 *map[string]float32
|
||||
FMapStringFloat64 map[string]float64
|
||||
FptrMapStringFloat64 *map[string]float64
|
||||
FMapStringBool map[string]bool
|
||||
FptrMapStringBool *map[string]bool
|
||||
FMapUintIntf map[uint]interface{}
|
||||
FptrMapUintIntf *map[uint]interface{}
|
||||
FMapUintString map[uint]string
|
||||
FptrMapUintString *map[uint]string
|
||||
FMapUintBytes map[uint][]byte
|
||||
FptrMapUintBytes *map[uint][]byte
|
||||
FMapUintUint map[uint]uint
|
||||
FptrMapUintUint *map[uint]uint
|
||||
FMapUintUint8 map[uint]uint8
|
||||
FptrMapUintUint8 *map[uint]uint8
|
||||
FMapUintUint64 map[uint]uint64
|
||||
FptrMapUintUint64 *map[uint]uint64
|
||||
FMapUintInt map[uint]int
|
||||
FptrMapUintInt *map[uint]int
|
||||
FMapUintInt64 map[uint]int64
|
||||
FptrMapUintInt64 *map[uint]int64
|
||||
FMapUintFloat32 map[uint]float32
|
||||
FptrMapUintFloat32 *map[uint]float32
|
||||
FMapUintFloat64 map[uint]float64
|
||||
FptrMapUintFloat64 *map[uint]float64
|
||||
FMapUintBool map[uint]bool
|
||||
FptrMapUintBool *map[uint]bool
|
||||
FMapUint8Intf map[uint8]interface{}
|
||||
FptrMapUint8Intf *map[uint8]interface{}
|
||||
FMapUint8String map[uint8]string
|
||||
FptrMapUint8String *map[uint8]string
|
||||
FMapUint8Bytes map[uint8][]byte
|
||||
FptrMapUint8Bytes *map[uint8][]byte
|
||||
FMapUint8Uint map[uint8]uint
|
||||
FptrMapUint8Uint *map[uint8]uint
|
||||
FMapUint8Uint8 map[uint8]uint8
|
||||
FptrMapUint8Uint8 *map[uint8]uint8
|
||||
FMapUint8Uint64 map[uint8]uint64
|
||||
FptrMapUint8Uint64 *map[uint8]uint64
|
||||
FMapUint8Int map[uint8]int
|
||||
FptrMapUint8Int *map[uint8]int
|
||||
FMapUint8Int64 map[uint8]int64
|
||||
FptrMapUint8Int64 *map[uint8]int64
|
||||
FMapUint8Float32 map[uint8]float32
|
||||
FptrMapUint8Float32 *map[uint8]float32
|
||||
FMapUint8Float64 map[uint8]float64
|
||||
FptrMapUint8Float64 *map[uint8]float64
|
||||
FMapUint8Bool map[uint8]bool
|
||||
FptrMapUint8Bool *map[uint8]bool
|
||||
FMapUint64Intf map[uint64]interface{}
|
||||
FptrMapUint64Intf *map[uint64]interface{}
|
||||
FMapUint64String map[uint64]string
|
||||
FptrMapUint64String *map[uint64]string
|
||||
FMapUint64Bytes map[uint64][]byte
|
||||
FptrMapUint64Bytes *map[uint64][]byte
|
||||
FMapUint64Uint map[uint64]uint
|
||||
FptrMapUint64Uint *map[uint64]uint
|
||||
FMapUint64Uint8 map[uint64]uint8
|
||||
FptrMapUint64Uint8 *map[uint64]uint8
|
||||
FMapUint64Uint64 map[uint64]uint64
|
||||
FptrMapUint64Uint64 *map[uint64]uint64
|
||||
FMapUint64Int map[uint64]int
|
||||
FptrMapUint64Int *map[uint64]int
|
||||
FMapUint64Int64 map[uint64]int64
|
||||
FptrMapUint64Int64 *map[uint64]int64
|
||||
FMapUint64Float32 map[uint64]float32
|
||||
FptrMapUint64Float32 *map[uint64]float32
|
||||
FMapUint64Float64 map[uint64]float64
|
||||
FptrMapUint64Float64 *map[uint64]float64
|
||||
FMapUint64Bool map[uint64]bool
|
||||
FptrMapUint64Bool *map[uint64]bool
|
||||
FMapIntIntf map[int]interface{}
|
||||
FptrMapIntIntf *map[int]interface{}
|
||||
FMapIntString map[int]string
|
||||
FptrMapIntString *map[int]string
|
||||
FMapIntBytes map[int][]byte
|
||||
FptrMapIntBytes *map[int][]byte
|
||||
FMapIntUint map[int]uint
|
||||
FptrMapIntUint *map[int]uint
|
||||
FMapIntUint8 map[int]uint8
|
||||
FptrMapIntUint8 *map[int]uint8
|
||||
FMapIntUint64 map[int]uint64
|
||||
FptrMapIntUint64 *map[int]uint64
|
||||
FMapIntInt map[int]int
|
||||
FptrMapIntInt *map[int]int
|
||||
FMapIntInt64 map[int]int64
|
||||
FptrMapIntInt64 *map[int]int64
|
||||
FMapIntFloat32 map[int]float32
|
||||
FptrMapIntFloat32 *map[int]float32
|
||||
FMapIntFloat64 map[int]float64
|
||||
FptrMapIntFloat64 *map[int]float64
|
||||
FMapIntBool map[int]bool
|
||||
FptrMapIntBool *map[int]bool
|
||||
FMapInt64Intf map[int64]interface{}
|
||||
FptrMapInt64Intf *map[int64]interface{}
|
||||
FMapInt64String map[int64]string
|
||||
FptrMapInt64String *map[int64]string
|
||||
FMapInt64Bytes map[int64][]byte
|
||||
FptrMapInt64Bytes *map[int64][]byte
|
||||
FMapInt64Uint map[int64]uint
|
||||
FptrMapInt64Uint *map[int64]uint
|
||||
FMapInt64Uint8 map[int64]uint8
|
||||
FptrMapInt64Uint8 *map[int64]uint8
|
||||
FMapInt64Uint64 map[int64]uint64
|
||||
FptrMapInt64Uint64 *map[int64]uint64
|
||||
FMapInt64Int map[int64]int
|
||||
FptrMapInt64Int *map[int64]int
|
||||
FMapInt64Int64 map[int64]int64
|
||||
FptrMapInt64Int64 *map[int64]int64
|
||||
FMapInt64Float32 map[int64]float32
|
||||
FptrMapInt64Float32 *map[int64]float32
|
||||
FMapInt64Float64 map[int64]float64
|
||||
FptrMapInt64Float64 *map[int64]float64
|
||||
FMapInt64Bool map[int64]bool
|
||||
FptrMapInt64Bool *map[int64]bool
|
||||
}
|
||||
|
||||
// -----------
|
||||
|
||||
type testMammoth2Binary uint64
|
||||
|
||||
func (x testMammoth2Binary) MarshalBinary() (data []byte, err error) {
|
||||
data = make([]byte, 8)
|
||||
bigen.PutUint64(data, uint64(x))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Binary) UnmarshalBinary(data []byte) (err error) {
|
||||
*x = testMammoth2Binary(bigen.Uint64(data))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Text uint64
|
||||
|
||||
func (x testMammoth2Text) MarshalText() (data []byte, err error) {
|
||||
data = []byte(fmt.Sprintf("%b", uint64(x)))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Text) UnmarshalText(data []byte) (err error) {
|
||||
_, err = fmt.Sscanf(string(data), "%b", (*uint64)(x))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Json uint64
|
||||
|
||||
func (x testMammoth2Json) MarshalJSON() (data []byte, err error) {
|
||||
data = []byte(fmt.Sprintf("%v", uint64(x)))
|
||||
return
|
||||
}
|
||||
func (x *testMammoth2Json) UnmarshalJSON(data []byte) (err error) {
|
||||
_, err = fmt.Sscanf(string(data), "%v", (*uint64)(x))
|
||||
return
|
||||
}
|
||||
|
||||
type testMammoth2Basic [4]uint64
|
||||
|
||||
type TestMammoth2Wrapper struct {
|
||||
V TestMammoth2
|
||||
T testMammoth2Text
|
||||
B testMammoth2Binary
|
||||
J testMammoth2Json
|
||||
C testMammoth2Basic
|
||||
M map[testMammoth2Basic]TestMammoth2
|
||||
L []TestMammoth2
|
||||
A [4]int64
|
||||
}
|
||||
4515
vendor/github.com/ugorji/go/codec/mammoth_generated_test.go
generated
vendored
4515
vendor/github.com/ugorji/go/codec/mammoth_generated_test.go
generated
vendored
File diff suppressed because it is too large
Load Diff
1158
vendor/github.com/ugorji/go/codec/msgpack.go
generated
vendored
1158
vendor/github.com/ugorji/go/codec/msgpack.go
generated
vendored
File diff suppressed because it is too large
Load Diff
136
vendor/github.com/ugorji/go/codec/prebuild.go
generated
vendored
136
vendor/github.com/ugorji/go/codec/prebuild.go
generated
vendored
@@ -1,136 +0,0 @@
|
||||
// +build prebuild
|
||||
|
||||
package main
|
||||
|
||||
// prebuild.go generates sort implementations for
|
||||
// various slice types and combination slice+reflect.Value types.
|
||||
//
|
||||
// The combination slice+reflect.Value types are used
|
||||
// during canonical encode, and the others are used during fast-path
|
||||
// encoding of map keys.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// genInternalSortableTypes returns the types
|
||||
// that are used for fast-path canonical's encoding of maps.
|
||||
//
|
||||
// For now, we only support the highest sizes for
|
||||
// int64, uint64, float64, bool, string, bytes.
|
||||
func genInternalSortableTypes() []string {
|
||||
return []string{
|
||||
"string",
|
||||
// "float32",
|
||||
"float64",
|
||||
// "uint",
|
||||
// "uint8",
|
||||
// "uint16",
|
||||
// "uint32",
|
||||
"uint64",
|
||||
"uintptr",
|
||||
// "int",
|
||||
// "int8",
|
||||
// "int16",
|
||||
// "int32",
|
||||
"int64",
|
||||
"bool",
|
||||
"time",
|
||||
"bytes",
|
||||
}
|
||||
}
|
||||
|
||||
// genInternalSortablePlusTypes returns the types
|
||||
// that are used for reflection-based canonical's encoding of maps.
|
||||
//
|
||||
// For now, we only support the highest sizes for
|
||||
// int64, uint64, float64, bool, string, bytes.
|
||||
func genInternalSortablePlusTypes() []string {
|
||||
return []string{
|
||||
"string",
|
||||
"float64",
|
||||
"uint64",
|
||||
"uintptr",
|
||||
"int64",
|
||||
"bool",
|
||||
"time",
|
||||
"bytes",
|
||||
}
|
||||
}
|
||||
|
||||
func genTypeForShortName(s string) string {
|
||||
switch s {
|
||||
case "time":
|
||||
return "time.Time"
|
||||
case "bytes":
|
||||
return "[]byte"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func genArgs(args ...interface{}) map[string]interface{} {
|
||||
m := make(map[string]interface{}, len(args)/2)
|
||||
for i := 0; i < len(args); {
|
||||
m[args[i].(string)] = args[i+1]
|
||||
i += 2
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func genEndsWith(s0 string, sn ...string) bool {
|
||||
for _, s := range sn {
|
||||
if strings.HasSuffix(s0, s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func chkerr(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func run(fnameIn, fnameOut string) {
|
||||
var err error
|
||||
|
||||
funcs := make(template.FuncMap)
|
||||
funcs["sortables"] = genInternalSortableTypes
|
||||
funcs["sortablesplus"] = genInternalSortablePlusTypes
|
||||
funcs["tshort"] = genTypeForShortName
|
||||
funcs["endswith"] = genEndsWith
|
||||
funcs["args"] = genArgs
|
||||
|
||||
t := template.New("").Funcs(funcs)
|
||||
fin, err := os.Open(fnameIn)
|
||||
chkerr(err)
|
||||
defer fin.Close()
|
||||
fout, err := os.Create(fnameOut)
|
||||
chkerr(err)
|
||||
defer fout.Close()
|
||||
tmplstr, err := ioutil.ReadAll(fin)
|
||||
chkerr(err)
|
||||
t, err = t.Parse(string(tmplstr))
|
||||
chkerr(err)
|
||||
var out bytes.Buffer
|
||||
err = t.Execute(&out, 0)
|
||||
chkerr(err)
|
||||
bout, err := format.Source(out.Bytes())
|
||||
if err != nil {
|
||||
fout.Write(out.Bytes()) // write out if error, so we can still see.
|
||||
}
|
||||
chkerr(err)
|
||||
// write out if error, as much as possible, so we can still see.
|
||||
_, err = fout.Write(bout)
|
||||
chkerr(err)
|
||||
}
|
||||
|
||||
func main() {
|
||||
run("sort-slice.go.tmpl", "sort-slice.generated.go")
|
||||
}
|
||||
30
vendor/github.com/ugorji/go/codec/py_test.go
generated
vendored
30
vendor/github.com/ugorji/go/codec/py_test.go
generated
vendored
@@ -1,30 +0,0 @@
|
||||
// +build x
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// These tests are used to verify msgpack and cbor implementations against their python libraries.
|
||||
// If you have the library installed, you can enable the tests back by running: go test -tags=x .
|
||||
// Look at test.py for how to setup your environment.
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMsgpackPythonGenStreams(t *testing.T) {
|
||||
doTestPythonGenStreams(t, testMsgpackH)
|
||||
}
|
||||
|
||||
func TestCborPythonGenStreams(t *testing.T) {
|
||||
doTestPythonGenStreams(t, testCborH)
|
||||
}
|
||||
|
||||
func TestMsgpackRpcSpecGoClientToPythonSvc(t *testing.T) {
|
||||
doTestMsgpackRpcSpecGoClientToPythonSvc(t)
|
||||
}
|
||||
|
||||
func TestMsgpackRpcSpecPythonClientToGoSvc(t *testing.T) {
|
||||
doTestMsgpackRpcSpecPythonClientToGoSvc(t)
|
||||
}
|
||||
1244
vendor/github.com/ugorji/go/codec/reader.go
generated
vendored
1244
vendor/github.com/ugorji/go/codec/reader.go
generated
vendored
File diff suppressed because it is too large
Load Diff
38
vendor/github.com/ugorji/go/codec/register_ext.go
generated
vendored
38
vendor/github.com/ugorji/go/codec/register_ext.go
generated
vendored
@@ -1,38 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import "reflect"
|
||||
|
||||
// This file exists, so that the files for specific formats do not all import reflect.
|
||||
// This just helps us ensure that reflect package is isolated to a few files.
|
||||
|
||||
// SetInterfaceExt sets an extension
|
||||
func (h *JsonHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
|
||||
return h.SetExt(rt, tag, makeExt(ext))
|
||||
}
|
||||
|
||||
// SetInterfaceExt sets an extension
|
||||
func (h *CborHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
|
||||
return h.SetExt(rt, tag, makeExt(ext))
|
||||
}
|
||||
|
||||
// SetBytesExt sets an extension
|
||||
func (h *MsgpackHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
|
||||
return h.SetExt(rt, tag, makeExt(ext))
|
||||
}
|
||||
|
||||
// SetBytesExt sets an extension
|
||||
func (h *SimpleHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
|
||||
return h.SetExt(rt, tag, makeExt(ext))
|
||||
}
|
||||
|
||||
// SetBytesExt sets an extension
|
||||
func (h *BincHandle) SetBytesExt(rt reflect.Type, tag uint64, ext BytesExt) (err error) {
|
||||
return h.SetExt(rt, tag, makeExt(ext))
|
||||
}
|
||||
|
||||
// func (h *XMLHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
|
||||
// return h.SetExt(rt, tag, &interfaceExtWrapper{InterfaceExt: ext})
|
||||
// }
|
||||
217
vendor/github.com/ugorji/go/codec/rpc.go
generated
vendored
217
vendor/github.com/ugorji/go/codec/rpc.go
generated
vendored
@@ -1,217 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"net/rpc"
|
||||
)
|
||||
|
||||
var errRpcJsonNeedsTermWhitespace = errors.New("rpc requires JsonHandle with TermWhitespace=true")
|
||||
|
||||
// Rpc provides a rpc Server or Client Codec for rpc communication.
|
||||
type Rpc interface {
|
||||
ServerCodec(conn io.ReadWriteCloser, h Handle) rpc.ServerCodec
|
||||
ClientCodec(conn io.ReadWriteCloser, h Handle) rpc.ClientCodec
|
||||
}
|
||||
|
||||
// RPCOptions holds options specific to rpc functionality
|
||||
type RPCOptions struct {
|
||||
// RPCNoBuffer configures whether we attempt to buffer reads and writes during RPC calls.
|
||||
//
|
||||
// Set RPCNoBuffer=true to turn buffering off.
|
||||
// Buffering can still be done if buffered connections are passed in, or
|
||||
// buffering is configured on the handle.
|
||||
RPCNoBuffer bool
|
||||
}
|
||||
|
||||
// rpcCodec defines the struct members and common methods.
|
||||
type rpcCodec struct {
|
||||
c io.Closer
|
||||
r io.Reader
|
||||
w io.Writer
|
||||
f ioFlusher
|
||||
|
||||
dec *Decoder
|
||||
enc *Encoder
|
||||
// bw *bufio.Writer
|
||||
// br *bufio.Reader
|
||||
h Handle
|
||||
|
||||
cls atomicClsErr
|
||||
}
|
||||
|
||||
func newRPCCodec(conn io.ReadWriteCloser, h Handle) rpcCodec {
|
||||
// return newRPCCodec2(bufio.NewReader(conn), bufio.NewWriter(conn), conn, h)
|
||||
return newRPCCodec2(conn, conn, conn, h)
|
||||
}
|
||||
|
||||
func newRPCCodec2(r io.Reader, w io.Writer, c io.Closer, h Handle) rpcCodec {
|
||||
// defensive: ensure that jsonH has TermWhitespace turned on.
|
||||
if jsonH, ok := h.(*JsonHandle); ok && !jsonH.TermWhitespace {
|
||||
panic(errRpcJsonNeedsTermWhitespace)
|
||||
}
|
||||
// always ensure that we use a flusher, and always flush what was written to the connection.
|
||||
// we lose nothing by using a buffered writer internally.
|
||||
f, ok := w.(ioFlusher)
|
||||
bh := basicHandle(h)
|
||||
if !bh.RPCNoBuffer {
|
||||
if bh.WriterBufferSize <= 0 {
|
||||
if !ok {
|
||||
bw := bufio.NewWriter(w)
|
||||
f, w = bw, bw
|
||||
}
|
||||
}
|
||||
if bh.ReaderBufferSize <= 0 {
|
||||
if _, ok = w.(ioPeeker); !ok {
|
||||
if _, ok = w.(ioBuffered); !ok {
|
||||
r = bufio.NewReader(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return rpcCodec{
|
||||
c: c,
|
||||
w: w,
|
||||
r: r,
|
||||
f: f,
|
||||
h: h,
|
||||
enc: NewEncoder(w, h),
|
||||
dec: NewDecoder(r, h),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *rpcCodec) write(obj1, obj2 interface{}, writeObj2 bool) (err error) {
|
||||
if c.c != nil {
|
||||
cls := c.cls.load()
|
||||
if cls.closed {
|
||||
return cls.errClosed
|
||||
}
|
||||
}
|
||||
err = c.enc.Encode(obj1)
|
||||
if err == nil {
|
||||
if writeObj2 {
|
||||
err = c.enc.Encode(obj2)
|
||||
}
|
||||
}
|
||||
if c.f != nil {
|
||||
if err == nil {
|
||||
err = c.f.Flush()
|
||||
} else {
|
||||
_ = c.f.Flush() // swallow flush error, so we maintain prior error on write
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c *rpcCodec) read(obj interface{}) (err error) {
|
||||
if c.c != nil {
|
||||
cls := c.cls.load()
|
||||
if cls.closed {
|
||||
return cls.errClosed
|
||||
}
|
||||
}
|
||||
//If nil is passed in, we should read and discard
|
||||
if obj == nil {
|
||||
// var obj2 interface{}
|
||||
// return c.dec.Decode(&obj2)
|
||||
defer panicToErr(c.dec, &err)
|
||||
c.dec.swallow()
|
||||
return
|
||||
}
|
||||
return c.dec.Decode(obj)
|
||||
}
|
||||
|
||||
func (c *rpcCodec) Close() error {
|
||||
if c.c == nil {
|
||||
return nil
|
||||
}
|
||||
cls := c.cls.load()
|
||||
if cls.closed {
|
||||
return cls.errClosed
|
||||
}
|
||||
cls.errClosed = c.c.Close()
|
||||
cls.closed = true
|
||||
c.cls.store(cls)
|
||||
return cls.errClosed
|
||||
}
|
||||
|
||||
func (c *rpcCodec) ReadResponseBody(body interface{}) error {
|
||||
return c.read(body)
|
||||
}
|
||||
|
||||
// -------------------------------------
|
||||
|
||||
type goRpcCodec struct {
|
||||
rpcCodec
|
||||
}
|
||||
|
||||
func (c *goRpcCodec) WriteRequest(r *rpc.Request, body interface{}) error {
|
||||
return c.write(r, body, true)
|
||||
}
|
||||
|
||||
func (c *goRpcCodec) WriteResponse(r *rpc.Response, body interface{}) error {
|
||||
return c.write(r, body, true)
|
||||
}
|
||||
|
||||
func (c *goRpcCodec) ReadResponseHeader(r *rpc.Response) error {
|
||||
return c.read(r)
|
||||
}
|
||||
|
||||
func (c *goRpcCodec) ReadRequestHeader(r *rpc.Request) error {
|
||||
return c.read(r)
|
||||
}
|
||||
|
||||
func (c *goRpcCodec) ReadRequestBody(body interface{}) error {
|
||||
return c.read(body)
|
||||
}
|
||||
|
||||
// -------------------------------------
|
||||
|
||||
// goRpc is the implementation of Rpc that uses the communication protocol
|
||||
// as defined in net/rpc package.
|
||||
type goRpc struct{}
|
||||
|
||||
// GoRpc implements Rpc using the communication protocol defined in net/rpc package.
|
||||
//
|
||||
// Note: network connection (from net.Dial, of type io.ReadWriteCloser) is not buffered.
|
||||
//
|
||||
// For performance, you should configure WriterBufferSize and ReaderBufferSize on the handle.
|
||||
// This ensures we use an adequate buffer during reading and writing.
|
||||
// If not configured, we will internally initialize and use a buffer during reads and writes.
|
||||
// This can be turned off via the RPCNoBuffer option on the Handle.
|
||||
// var handle codec.JsonHandle
|
||||
// handle.RPCNoBuffer = true // turns off attempt by rpc module to initialize a buffer
|
||||
//
|
||||
// Example 1: one way of configuring buffering explicitly:
|
||||
// var handle codec.JsonHandle // codec handle
|
||||
// handle.ReaderBufferSize = 1024
|
||||
// handle.WriterBufferSize = 1024
|
||||
// var conn io.ReadWriteCloser // connection got from a socket
|
||||
// var serverCodec = GoRpc.ServerCodec(conn, handle)
|
||||
// var clientCodec = GoRpc.ClientCodec(conn, handle)
|
||||
//
|
||||
// Example 2: you can also explicitly create a buffered connection yourself,
|
||||
// and not worry about configuring the buffer sizes in the Handle.
|
||||
// var handle codec.Handle // codec handle
|
||||
// var conn io.ReadWriteCloser // connection got from a socket
|
||||
// var bufconn = struct { // bufconn here is a buffered io.ReadWriteCloser
|
||||
// io.Closer
|
||||
// *bufio.Reader
|
||||
// *bufio.Writer
|
||||
// }{conn, bufio.NewReader(conn), bufio.NewWriter(conn)}
|
||||
// var serverCodec = GoRpc.ServerCodec(bufconn, handle)
|
||||
// var clientCodec = GoRpc.ClientCodec(bufconn, handle)
|
||||
//
|
||||
var GoRpc goRpc
|
||||
|
||||
func (x goRpc) ServerCodec(conn io.ReadWriteCloser, h Handle) rpc.ServerCodec {
|
||||
return &goRpcCodec{newRPCCodec(conn, h)}
|
||||
}
|
||||
|
||||
func (x goRpc) ClientCodec(conn io.ReadWriteCloser, h Handle) rpc.ClientCodec {
|
||||
return &goRpcCodec{newRPCCodec(conn, h)}
|
||||
}
|
||||
349
vendor/github.com/ugorji/go/codec/shared_test.go
generated
vendored
349
vendor/github.com/ugorji/go/codec/shared_test.go
generated
vendored
@@ -1,349 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// This file sets up the variables used, including testInitFns.
|
||||
// Each file should add initialization that should be performed
|
||||
// after flags are parsed.
|
||||
//
|
||||
// init is a multi-step process:
|
||||
// - setup vars (handled by init functions in each file)
|
||||
// - parse flags
|
||||
// - setup derived vars (handled by pre-init registered functions - registered in init function)
|
||||
// - post init (handled by post-init registered functions - registered in init function)
|
||||
// This way, no one has to manage carefully control the initialization
|
||||
// using file names, etc.
|
||||
//
|
||||
// Tests which require external dependencies need the -tag=x parameter.
|
||||
// They should be run as:
|
||||
// go test -tags=x -run=. <other parameters ...>
|
||||
// Benchmarks should also take this parameter, to include the sereal, xdr, etc.
|
||||
// To run against codecgen, etc, make sure you pass extra parameters.
|
||||
// Example usage:
|
||||
// go test "-tags=x codecgen" -bench=. <other parameters ...>
|
||||
//
|
||||
// To fully test everything:
|
||||
// go test -tags=x -benchtime=100ms -tv -bg -bi -brw -bu -v -run=. -bench=.
|
||||
|
||||
// Handling flags
|
||||
// codec_test.go will define a set of global flags for testing, including:
|
||||
// - Use Reset
|
||||
// - Use IO reader/writer (vs direct bytes)
|
||||
// - Set Canonical
|
||||
// - Set InternStrings
|
||||
// - Use Symbols
|
||||
//
|
||||
// This way, we can test them all by running same set of tests with a different
|
||||
// set of flags.
|
||||
//
|
||||
// Following this, all the benchmarks will utilize flags set by codec_test.go
|
||||
// and will not redefine these "global" flags.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// __DO_NOT_REMOVE__NEEDED_FOR_REPLACING__IMPORT_PATH__FOR_CODEC_BENCH__
|
||||
|
||||
type testHED struct {
|
||||
H Handle
|
||||
E *Encoder
|
||||
D *Decoder
|
||||
}
|
||||
|
||||
type ioReaderWrapper struct {
|
||||
r io.Reader
|
||||
}
|
||||
|
||||
func (x ioReaderWrapper) Read(p []byte) (n int, err error) {
|
||||
return x.r.Read(p)
|
||||
}
|
||||
|
||||
type ioWriterWrapper struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (x ioWriterWrapper) Write(p []byte) (n int, err error) {
|
||||
return x.w.Write(p)
|
||||
}
|
||||
|
||||
var (
|
||||
// testNoopH = NoopHandle(8)
|
||||
testMsgpackH = &MsgpackHandle{}
|
||||
testBincH = &BincHandle{}
|
||||
testSimpleH = &SimpleHandle{}
|
||||
testCborH = &CborHandle{}
|
||||
testJsonH = &JsonHandle{}
|
||||
|
||||
testHandles []Handle
|
||||
testPreInitFns []func()
|
||||
testPostInitFns []func()
|
||||
|
||||
testOnce sync.Once
|
||||
|
||||
testHEDs []testHED
|
||||
)
|
||||
|
||||
// flag variables used by tests (and bench)
|
||||
var (
|
||||
testVerbose bool
|
||||
|
||||
//depth of 0 maps to ~400bytes json-encoded string, 1 maps to ~1400 bytes, etc
|
||||
//For depth>1, we likely trigger stack growth for encoders, making benchmarking unreliable.
|
||||
testDepth int
|
||||
|
||||
testMaxInitLen int
|
||||
|
||||
testInitDebug bool
|
||||
testUseReset bool
|
||||
testSkipIntf bool
|
||||
testUseMust bool
|
||||
|
||||
testUseIoEncDec int
|
||||
testUseIoWrapper bool
|
||||
|
||||
testNumRepeatString int
|
||||
|
||||
testRpcBufsize int
|
||||
testMapStringKeyOnly bool
|
||||
)
|
||||
|
||||
// variables that are not flags, but which can configure the handles
|
||||
var (
|
||||
testEncodeOptions EncodeOptions
|
||||
testDecodeOptions DecodeOptions
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetOutput(ioutil.Discard) // don't allow things log to standard out/err
|
||||
testHEDs = make([]testHED, 0, 32)
|
||||
testHandles = append(testHandles,
|
||||
// testNoopH,
|
||||
testMsgpackH, testBincH, testSimpleH, testCborH, testJsonH)
|
||||
// JSON should do HTMLCharsAsIs by default
|
||||
testJsonH.HTMLCharsAsIs = true
|
||||
// set ExplicitRelease on each handle
|
||||
testMsgpackH.ExplicitRelease = true
|
||||
testBincH.ExplicitRelease = true
|
||||
testSimpleH.ExplicitRelease = true
|
||||
testCborH.ExplicitRelease = true
|
||||
testJsonH.ExplicitRelease = true
|
||||
|
||||
testInitFlags()
|
||||
benchInitFlags()
|
||||
}
|
||||
|
||||
func testInitFlags() {
|
||||
// delete(testDecOpts.ExtFuncs, timeTyp)
|
||||
flag.BoolVar(&testVerbose, "tv", false, "Text Extra Verbose Logging if -v if set")
|
||||
flag.BoolVar(&testInitDebug, "tg", false, "Test Init Debug")
|
||||
flag.IntVar(&testUseIoEncDec, "ti", -1, "Use IO Reader/Writer for Marshal/Unmarshal ie >= 0")
|
||||
flag.BoolVar(&testUseIoWrapper, "tiw", false, "Wrap the IO Reader/Writer with a base pass-through reader/writer")
|
||||
|
||||
flag.BoolVar(&testSkipIntf, "tf", false, "Skip Interfaces")
|
||||
flag.BoolVar(&testUseReset, "tr", false, "Use Reset")
|
||||
flag.IntVar(&testNumRepeatString, "trs", 8, "Create string variables by repeating a string N times")
|
||||
flag.BoolVar(&testUseMust, "tm", true, "Use Must(En|De)code")
|
||||
|
||||
flag.IntVar(&testMaxInitLen, "tx", 0, "Max Init Len")
|
||||
|
||||
flag.IntVar(&testDepth, "tsd", 0, "Test Struc Depth")
|
||||
flag.BoolVar(&testMapStringKeyOnly, "tsk", false, "use maps with string keys only")
|
||||
}
|
||||
|
||||
func benchInitFlags() {
|
||||
// flags reproduced here for compatibility (duplicate some in testInitFlags)
|
||||
flag.BoolVar(&testMapStringKeyOnly, "bs", false, "use maps with string keys only")
|
||||
flag.IntVar(&testDepth, "bd", 1, "Bench Depth")
|
||||
}
|
||||
|
||||
func testHEDGet(h Handle) *testHED {
|
||||
for i := range testHEDs {
|
||||
v := &testHEDs[i]
|
||||
if v.H == h {
|
||||
return v
|
||||
}
|
||||
}
|
||||
testHEDs = append(testHEDs, testHED{h, NewEncoder(nil, h), NewDecoder(nil, h)})
|
||||
return &testHEDs[len(testHEDs)-1]
|
||||
}
|
||||
|
||||
func testReinit() {
|
||||
testOnce = sync.Once{}
|
||||
testHEDs = nil
|
||||
}
|
||||
|
||||
func testInitAll() {
|
||||
// only parse it once.
|
||||
if !flag.Parsed() {
|
||||
flag.Parse()
|
||||
}
|
||||
for _, f := range testPreInitFns {
|
||||
f()
|
||||
}
|
||||
for _, f := range testPostInitFns {
|
||||
f()
|
||||
}
|
||||
}
|
||||
|
||||
func sTestCodecEncode(ts interface{}, bsIn []byte, fn func([]byte) *bytes.Buffer,
|
||||
h Handle, bh *BasicHandle) (bs []byte, err error) {
|
||||
// bs = make([]byte, 0, approxSize)
|
||||
var e *Encoder
|
||||
var buf *bytes.Buffer
|
||||
if testUseReset {
|
||||
e = testHEDGet(h).E
|
||||
} else {
|
||||
e = NewEncoder(nil, h)
|
||||
}
|
||||
var oldWriteBufferSize int
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf = fn(bsIn)
|
||||
// set the encode options for using a buffer
|
||||
oldWriteBufferSize = bh.WriterBufferSize
|
||||
bh.WriterBufferSize = testUseIoEncDec
|
||||
if testUseIoWrapper {
|
||||
e.Reset(ioWriterWrapper{buf})
|
||||
} else {
|
||||
e.Reset(buf)
|
||||
}
|
||||
} else {
|
||||
bs = bsIn
|
||||
e.ResetBytes(&bs)
|
||||
}
|
||||
if testUseMust {
|
||||
e.MustEncode(ts)
|
||||
} else {
|
||||
err = e.Encode(ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
bs = buf.Bytes()
|
||||
bh.WriterBufferSize = oldWriteBufferSize
|
||||
}
|
||||
if !testUseReset {
|
||||
e.Release()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func sTestCodecDecode(bs []byte, ts interface{}, h Handle, bh *BasicHandle) (err error) {
|
||||
var d *Decoder
|
||||
// var buf *bytes.Reader
|
||||
if testUseReset {
|
||||
d = testHEDGet(h).D
|
||||
} else {
|
||||
d = NewDecoder(nil, h)
|
||||
}
|
||||
var oldReadBufferSize int
|
||||
if testUseIoEncDec >= 0 {
|
||||
buf := bytes.NewReader(bs)
|
||||
oldReadBufferSize = bh.ReaderBufferSize
|
||||
bh.ReaderBufferSize = testUseIoEncDec
|
||||
if testUseIoWrapper {
|
||||
d.Reset(ioReaderWrapper{buf})
|
||||
} else {
|
||||
d.Reset(buf)
|
||||
}
|
||||
} else {
|
||||
d.ResetBytes(bs)
|
||||
}
|
||||
if testUseMust {
|
||||
d.MustDecode(ts)
|
||||
} else {
|
||||
err = d.Decode(ts)
|
||||
}
|
||||
if testUseIoEncDec >= 0 {
|
||||
bh.ReaderBufferSize = oldReadBufferSize
|
||||
}
|
||||
if !testUseReset {
|
||||
d.Release()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// // --- functions below are used by both benchmarks and tests
|
||||
|
||||
// // log message only when testVerbose = true (ie go test ... -- -tv).
|
||||
// //
|
||||
// // These are for intormational messages that do not necessarily
|
||||
// // help with diagnosing a failure, or which are too large.
|
||||
// func logTv(x interface{}, format string, args ...interface{}) {
|
||||
// if !testVerbose {
|
||||
// return
|
||||
// }
|
||||
// if t, ok := x.(testing.TB); ok { // only available from go 1.9
|
||||
// t.Helper()
|
||||
// }
|
||||
// logT(x, format, args...)
|
||||
// }
|
||||
|
||||
// // logT logs messages when running as go test -v
|
||||
// //
|
||||
// // Use it for diagnostics messages that help diagnost failure,
|
||||
// // and when the output is not too long ie shorter than like 100 characters.
|
||||
// //
|
||||
// // In general, any logT followed by failT should call this.
|
||||
// func logT(x interface{}, format string, args ...interface{}) {
|
||||
// if x == nil {
|
||||
// if len(format) == 0 || format[len(format)-1] != '\n' {
|
||||
// format = format + "\n"
|
||||
// }
|
||||
// fmt.Printf(format, args...)
|
||||
// return
|
||||
// }
|
||||
// if t, ok := x.(testing.TB); ok { // only available from go 1.9
|
||||
// t.Helper()
|
||||
// t.Logf(format, args...)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func failTv(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// if testVerbose {
|
||||
// failTMsg(x, args...)
|
||||
// }
|
||||
// x.FailNow()
|
||||
// }
|
||||
|
||||
// func failT(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// failTMsg(x, args...)
|
||||
// x.FailNow()
|
||||
// }
|
||||
|
||||
// func failTMsg(x testing.TB, args ...interface{}) {
|
||||
// x.Helper()
|
||||
// if len(args) > 0 {
|
||||
// if format, ok := args[0].(string); ok {
|
||||
// logT(x, format, args[1:]...)
|
||||
// } else if len(args) == 1 {
|
||||
// logT(x, "%v", args[0])
|
||||
// } else {
|
||||
// logT(x, "%v", args)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// --- functions below are used only by benchmarks alone
|
||||
|
||||
func fnBenchmarkByteBuf(bsIn []byte) (buf *bytes.Buffer) {
|
||||
// var buf bytes.Buffer
|
||||
// buf.Grow(approxSize)
|
||||
buf = bytes.NewBuffer(bsIn)
|
||||
buf.Truncate(0)
|
||||
return
|
||||
}
|
||||
|
||||
// func benchFnCodecEncode(ts interface{}, bsIn []byte, h Handle) (bs []byte, err error) {
|
||||
// return testCodecEncode(ts, bsIn, fnBenchmarkByteBuf, h)
|
||||
// }
|
||||
|
||||
// func benchFnCodecDecode(bs []byte, ts interface{}, h Handle) (err error) {
|
||||
// return testCodecDecode(bs, ts, h)
|
||||
// }
|
||||
646
vendor/github.com/ugorji/go/codec/simple.go
generated
vendored
646
vendor/github.com/ugorji/go/codec/simple.go
generated
vendored
@@ -1,646 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"math"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
_ uint8 = iota
|
||||
simpleVdNil = 1
|
||||
simpleVdFalse = 2
|
||||
simpleVdTrue = 3
|
||||
simpleVdFloat32 = 4
|
||||
simpleVdFloat64 = 5
|
||||
|
||||
// each lasts for 4 (ie n, n+1, n+2, n+3)
|
||||
simpleVdPosInt = 8
|
||||
simpleVdNegInt = 12
|
||||
|
||||
simpleVdTime = 24
|
||||
|
||||
// containers: each lasts for 4 (ie n, n+1, n+2, ... n+7)
|
||||
simpleVdString = 216
|
||||
simpleVdByteArray = 224
|
||||
simpleVdArray = 232
|
||||
simpleVdMap = 240
|
||||
simpleVdExt = 248
|
||||
)
|
||||
|
||||
type simpleEncDriver struct {
|
||||
noBuiltInTypes
|
||||
encDriverNoopContainerWriter
|
||||
h *SimpleHandle
|
||||
b [8]byte
|
||||
_ [6]uint64 // padding (cache-aligned)
|
||||
e Encoder
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) encoder() *Encoder {
|
||||
return &e.e
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeNil() {
|
||||
e.e.encWr.writen1(simpleVdNil)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeBool(b bool) {
|
||||
if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && !b {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
if b {
|
||||
e.e.encWr.writen1(simpleVdTrue)
|
||||
} else {
|
||||
e.e.encWr.writen1(simpleVdFalse)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeFloat32(f float32) {
|
||||
if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && f == 0.0 {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
e.e.encWr.writen1(simpleVdFloat32)
|
||||
bigenHelper{e.b[:4], e.e.w()}.writeUint32(math.Float32bits(f))
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeFloat64(f float64) {
|
||||
if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && f == 0.0 {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
e.e.encWr.writen1(simpleVdFloat64)
|
||||
bigenHelper{e.b[:8], e.e.w()}.writeUint64(math.Float64bits(f))
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeInt(v int64) {
|
||||
if v < 0 {
|
||||
e.encUint(uint64(-v), simpleVdNegInt)
|
||||
} else {
|
||||
e.encUint(uint64(v), simpleVdPosInt)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeUint(v uint64) {
|
||||
e.encUint(v, simpleVdPosInt)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) encUint(v uint64, bd uint8) {
|
||||
if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && v == 0 {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
if v <= math.MaxUint8 {
|
||||
e.e.encWr.writen2(bd, uint8(v))
|
||||
} else if v <= math.MaxUint16 {
|
||||
e.e.encWr.writen1(bd + 1)
|
||||
bigenHelper{e.b[:2], e.e.w()}.writeUint16(uint16(v))
|
||||
} else if v <= math.MaxUint32 {
|
||||
e.e.encWr.writen1(bd + 2)
|
||||
bigenHelper{e.b[:4], e.e.w()}.writeUint32(uint32(v))
|
||||
} else { // if v <= math.MaxUint64 {
|
||||
e.e.encWr.writen1(bd + 3)
|
||||
bigenHelper{e.b[:8], e.e.w()}.writeUint64(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) encLen(bd byte, length int) {
|
||||
if length == 0 {
|
||||
e.e.encWr.writen1(bd)
|
||||
} else if length <= math.MaxUint8 {
|
||||
e.e.encWr.writen1(bd + 1)
|
||||
e.e.encWr.writen1(uint8(length))
|
||||
} else if length <= math.MaxUint16 {
|
||||
e.e.encWr.writen1(bd + 2)
|
||||
bigenHelper{e.b[:2], e.e.w()}.writeUint16(uint16(length))
|
||||
} else if int64(length) <= math.MaxUint32 {
|
||||
e.e.encWr.writen1(bd + 3)
|
||||
bigenHelper{e.b[:4], e.e.w()}.writeUint32(uint32(length))
|
||||
} else {
|
||||
e.e.encWr.writen1(bd + 4)
|
||||
bigenHelper{e.b[:8], e.e.w()}.writeUint64(uint64(length))
|
||||
}
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeExt(v interface{}, xtag uint64, ext Ext) {
|
||||
var bs []byte
|
||||
if ext == SelfExt {
|
||||
bs = e.e.blist.get(1024)[:0]
|
||||
e.e.sideEncode(v, &bs)
|
||||
} else {
|
||||
bs = ext.WriteExt(v)
|
||||
}
|
||||
if bs == nil {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
e.encodeExtPreamble(uint8(xtag), len(bs))
|
||||
e.e.encWr.writeb(bs)
|
||||
if ext == SelfExt {
|
||||
e.e.blist.put(bs)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeRawExt(re *RawExt) {
|
||||
e.encodeExtPreamble(uint8(re.Tag), len(re.Data))
|
||||
e.e.encWr.writeb(re.Data)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) encodeExtPreamble(xtag byte, length int) {
|
||||
e.encLen(simpleVdExt, length)
|
||||
e.e.encWr.writen1(xtag)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) WriteArrayStart(length int) {
|
||||
e.encLen(simpleVdArray, length)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) WriteMapStart(length int) {
|
||||
e.encLen(simpleVdMap, length)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeString(v string) {
|
||||
if e.h.EncZeroValuesAsNil && e.e.c != containerMapKey && v == "" {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
if e.h.StringToRaw {
|
||||
e.encLen(simpleVdByteArray, len(v))
|
||||
} else {
|
||||
e.encLen(simpleVdString, len(v))
|
||||
}
|
||||
e.e.encWr.writestr(v)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeStringBytesRaw(v []byte) {
|
||||
// if e.h.EncZeroValuesAsNil && e.c != containerMapKey && v == nil {
|
||||
if v == nil {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
e.encLen(simpleVdByteArray, len(v))
|
||||
e.e.encWr.writeb(v)
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) EncodeTime(t time.Time) {
|
||||
// if e.h.EncZeroValuesAsNil && e.c != containerMapKey && t.IsZero() {
|
||||
if t.IsZero() {
|
||||
e.EncodeNil()
|
||||
return
|
||||
}
|
||||
v, err := t.MarshalBinary()
|
||||
if err != nil {
|
||||
e.e.errorv(err)
|
||||
return
|
||||
}
|
||||
// time.Time marshalbinary takes about 14 bytes.
|
||||
e.e.encWr.writen2(simpleVdTime, uint8(len(v)))
|
||||
e.e.encWr.writeb(v)
|
||||
}
|
||||
|
||||
//------------------------------------
|
||||
|
||||
type simpleDecDriver struct {
|
||||
h *SimpleHandle
|
||||
bdRead bool
|
||||
bd byte
|
||||
fnil bool
|
||||
noBuiltInTypes
|
||||
decDriverNoopContainerReader
|
||||
_ [6]uint64 // padding
|
||||
d Decoder
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) decoder() *Decoder {
|
||||
return &d.d
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) readNextBd() {
|
||||
d.bd = d.d.decRd.readn1()
|
||||
d.bdRead = true
|
||||
}
|
||||
|
||||
// func (d *simpleDecDriver) uncacheRead() {
|
||||
// if d.bdRead {
|
||||
// d.d.decRd.unreadn1()
|
||||
// d.bdRead = false
|
||||
// }
|
||||
// }
|
||||
|
||||
func (d *simpleDecDriver) advanceNil() (null bool) {
|
||||
d.fnil = false
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
if d.bd == simpleVdNil {
|
||||
d.bdRead = false
|
||||
d.fnil = true
|
||||
null = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// func (d *simpleDecDriver) Nil() bool {
|
||||
// return d.fnil
|
||||
// }
|
||||
|
||||
func (d *simpleDecDriver) ContainerType() (vt valueType) {
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
d.fnil = false
|
||||
switch d.bd {
|
||||
case simpleVdNil:
|
||||
d.bdRead = false
|
||||
d.fnil = true
|
||||
return valueTypeNil
|
||||
case simpleVdByteArray, simpleVdByteArray + 1,
|
||||
simpleVdByteArray + 2, simpleVdByteArray + 3, simpleVdByteArray + 4:
|
||||
return valueTypeBytes
|
||||
case simpleVdString, simpleVdString + 1,
|
||||
simpleVdString + 2, simpleVdString + 3, simpleVdString + 4:
|
||||
return valueTypeString
|
||||
case simpleVdArray, simpleVdArray + 1,
|
||||
simpleVdArray + 2, simpleVdArray + 3, simpleVdArray + 4:
|
||||
return valueTypeArray
|
||||
case simpleVdMap, simpleVdMap + 1,
|
||||
simpleVdMap + 2, simpleVdMap + 3, simpleVdMap + 4:
|
||||
return valueTypeMap
|
||||
}
|
||||
return valueTypeUnset
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) TryNil() bool {
|
||||
return d.advanceNil()
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) decCheckInteger() (ui uint64, neg bool) {
|
||||
switch d.bd {
|
||||
case simpleVdPosInt:
|
||||
ui = uint64(d.d.decRd.readn1())
|
||||
case simpleVdPosInt + 1:
|
||||
ui = uint64(bigen.Uint16(d.d.decRd.readx(2)))
|
||||
case simpleVdPosInt + 2:
|
||||
ui = uint64(bigen.Uint32(d.d.decRd.readx(4)))
|
||||
case simpleVdPosInt + 3:
|
||||
ui = uint64(bigen.Uint64(d.d.decRd.readx(8)))
|
||||
case simpleVdNegInt:
|
||||
ui = uint64(d.d.decRd.readn1())
|
||||
neg = true
|
||||
case simpleVdNegInt + 1:
|
||||
ui = uint64(bigen.Uint16(d.d.decRd.readx(2)))
|
||||
neg = true
|
||||
case simpleVdNegInt + 2:
|
||||
ui = uint64(bigen.Uint32(d.d.decRd.readx(4)))
|
||||
neg = true
|
||||
case simpleVdNegInt + 3:
|
||||
ui = uint64(bigen.Uint64(d.d.decRd.readx(8)))
|
||||
neg = true
|
||||
default:
|
||||
d.d.errorf("integer only valid from pos/neg integer1..8. Invalid descriptor: %v", d.bd)
|
||||
return
|
||||
}
|
||||
// DO NOT do this check below, because callers may only want the unsigned value:
|
||||
//
|
||||
// if ui > math.MaxInt64 {
|
||||
// d.d.errorf("decIntAny: Integer out of range for signed int64: %v", ui)
|
||||
// return
|
||||
// }
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeInt64() (i int64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
ui, neg := d.decCheckInteger()
|
||||
i = chkOvf.SignedIntV(ui)
|
||||
if neg {
|
||||
i = -i
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeUint64() (ui uint64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
ui, neg := d.decCheckInteger()
|
||||
if neg {
|
||||
d.d.errorf("assigning negative signed value to unsigned type")
|
||||
return
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeFloat64() (f float64) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.bd == simpleVdFloat32 {
|
||||
f = float64(math.Float32frombits(bigen.Uint32(d.d.decRd.readx(4))))
|
||||
} else if d.bd == simpleVdFloat64 {
|
||||
f = math.Float64frombits(bigen.Uint64(d.d.decRd.readx(8)))
|
||||
} else {
|
||||
if d.bd >= simpleVdPosInt && d.bd <= simpleVdNegInt+3 {
|
||||
f = float64(d.DecodeInt64())
|
||||
} else {
|
||||
d.d.errorf("float only valid from float32/64: Invalid descriptor: %v", d.bd)
|
||||
return
|
||||
}
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
// bool can be decoded from bool only (single byte).
|
||||
func (d *simpleDecDriver) DecodeBool() (b bool) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.bd == simpleVdFalse {
|
||||
} else if d.bd == simpleVdTrue {
|
||||
b = true
|
||||
} else {
|
||||
d.d.errorf("cannot decode bool - %s: %x", msgBadDesc, d.bd)
|
||||
return
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) ReadMapStart() (length int) {
|
||||
if d.advanceNil() {
|
||||
return decContainerLenNil
|
||||
}
|
||||
d.bdRead = false
|
||||
return d.decLen()
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) ReadArrayStart() (length int) {
|
||||
if d.advanceNil() {
|
||||
return decContainerLenNil
|
||||
}
|
||||
d.bdRead = false
|
||||
return d.decLen()
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) decLen() int {
|
||||
switch d.bd % 8 {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return int(d.d.decRd.readn1())
|
||||
case 2:
|
||||
return int(bigen.Uint16(d.d.decRd.readx(2)))
|
||||
case 3:
|
||||
ui := uint64(bigen.Uint32(d.d.decRd.readx(4)))
|
||||
if chkOvf.Uint(ui, intBitsize) {
|
||||
d.d.errorf("overflow integer: %v", ui)
|
||||
return 0
|
||||
}
|
||||
return int(ui)
|
||||
case 4:
|
||||
ui := bigen.Uint64(d.d.decRd.readx(8))
|
||||
if chkOvf.Uint(ui, intBitsize) {
|
||||
d.d.errorf("overflow integer: %v", ui)
|
||||
return 0
|
||||
}
|
||||
return int(ui)
|
||||
}
|
||||
d.d.errorf("cannot read length: bd%%8 must be in range 0..4. Got: %d", d.bd%8)
|
||||
return -1
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeStringAsBytes() (s []byte) {
|
||||
return d.DecodeBytes(d.d.b[:], true)
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeBytes(bs []byte, zerocopy bool) (bsOut []byte) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
// check if an "array" of uint8's (see ContainerType for how to infer if an array)
|
||||
if d.bd >= simpleVdArray && d.bd <= simpleVdMap+4 {
|
||||
if len(bs) == 0 && zerocopy {
|
||||
bs = d.d.b[:]
|
||||
}
|
||||
// bsOut, _ = fastpathTV.DecSliceUint8V(bs, true, d.d)
|
||||
slen := d.ReadArrayStart()
|
||||
bs = usableByteSlice(bs, slen)
|
||||
for i := 0; i < len(bs); i++ {
|
||||
bs[i] = uint8(chkOvf.UintV(d.DecodeUint64(), 8))
|
||||
}
|
||||
return bs
|
||||
}
|
||||
|
||||
clen := d.decLen()
|
||||
d.bdRead = false
|
||||
if d.d.bytes && (zerocopy || d.h.ZeroCopy) {
|
||||
return d.d.decRd.rb.readx(uint(clen))
|
||||
}
|
||||
if zerocopy && len(bs) == 0 {
|
||||
bs = d.d.b[:]
|
||||
}
|
||||
return decByteSlice(d.d.r(), clen, d.d.h.MaxInitLen, bs)
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeTime() (t time.Time) {
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
if d.bd != simpleVdTime {
|
||||
d.d.errorf("invalid descriptor for time.Time - expect 0x%x, received 0x%x", simpleVdTime, d.bd)
|
||||
return
|
||||
}
|
||||
d.bdRead = false
|
||||
clen := int(d.d.decRd.readn1())
|
||||
b := d.d.decRd.readx(uint(clen))
|
||||
if err := (&t).UnmarshalBinary(b); err != nil {
|
||||
d.d.errorv(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) {
|
||||
if xtag > 0xff {
|
||||
d.d.errorf("ext: tag must be <= 0xff; got: %v", xtag)
|
||||
return
|
||||
}
|
||||
if d.advanceNil() {
|
||||
return
|
||||
}
|
||||
realxtag1, xbs := d.decodeExtV(ext != nil, uint8(xtag))
|
||||
realxtag := uint64(realxtag1)
|
||||
if ext == nil {
|
||||
re := rv.(*RawExt)
|
||||
re.Tag = realxtag
|
||||
re.Data = detachZeroCopyBytes(d.d.bytes, re.Data, xbs)
|
||||
} else if ext == SelfExt {
|
||||
d.d.sideDecode(rv, xbs)
|
||||
} else {
|
||||
ext.ReadExt(rv, xbs)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) decodeExtV(verifyTag bool, tag byte) (xtag byte, xbs []byte) {
|
||||
switch d.bd {
|
||||
case simpleVdExt, simpleVdExt + 1, simpleVdExt + 2, simpleVdExt + 3, simpleVdExt + 4:
|
||||
l := d.decLen()
|
||||
xtag = d.d.decRd.readn1()
|
||||
if verifyTag && xtag != tag {
|
||||
d.d.errorf("wrong extension tag. Got %b. Expecting: %v", xtag, tag)
|
||||
return
|
||||
}
|
||||
if d.d.bytes {
|
||||
xbs = d.d.decRd.rb.readx(uint(l))
|
||||
} else {
|
||||
xbs = decByteSlice(d.d.r(), l, d.d.h.MaxInitLen, d.d.b[:])
|
||||
}
|
||||
case simpleVdByteArray, simpleVdByteArray + 1,
|
||||
simpleVdByteArray + 2, simpleVdByteArray + 3, simpleVdByteArray + 4:
|
||||
xbs = d.DecodeBytes(nil, true)
|
||||
default:
|
||||
d.d.errorf("ext - %s - expecting extensions/bytearray, got: 0x%x", msgBadDesc, d.bd)
|
||||
return
|
||||
}
|
||||
d.bdRead = false
|
||||
return
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) DecodeNaked() {
|
||||
if !d.bdRead {
|
||||
d.readNextBd()
|
||||
}
|
||||
|
||||
d.fnil = false
|
||||
n := d.d.naked()
|
||||
var decodeFurther bool
|
||||
|
||||
switch d.bd {
|
||||
case simpleVdNil:
|
||||
n.v = valueTypeNil
|
||||
d.fnil = true
|
||||
case simpleVdFalse:
|
||||
n.v = valueTypeBool
|
||||
n.b = false
|
||||
case simpleVdTrue:
|
||||
n.v = valueTypeBool
|
||||
n.b = true
|
||||
case simpleVdPosInt, simpleVdPosInt + 1, simpleVdPosInt + 2, simpleVdPosInt + 3:
|
||||
if d.h.SignedInteger {
|
||||
n.v = valueTypeInt
|
||||
n.i = d.DecodeInt64()
|
||||
} else {
|
||||
n.v = valueTypeUint
|
||||
n.u = d.DecodeUint64()
|
||||
}
|
||||
case simpleVdNegInt, simpleVdNegInt + 1, simpleVdNegInt + 2, simpleVdNegInt + 3:
|
||||
n.v = valueTypeInt
|
||||
n.i = d.DecodeInt64()
|
||||
case simpleVdFloat32:
|
||||
n.v = valueTypeFloat
|
||||
n.f = d.DecodeFloat64()
|
||||
case simpleVdFloat64:
|
||||
n.v = valueTypeFloat
|
||||
n.f = d.DecodeFloat64()
|
||||
case simpleVdTime:
|
||||
n.v = valueTypeTime
|
||||
n.t = d.DecodeTime()
|
||||
case simpleVdString, simpleVdString + 1,
|
||||
simpleVdString + 2, simpleVdString + 3, simpleVdString + 4:
|
||||
n.v = valueTypeString
|
||||
n.s = string(d.DecodeStringAsBytes())
|
||||
case simpleVdByteArray, simpleVdByteArray + 1,
|
||||
simpleVdByteArray + 2, simpleVdByteArray + 3, simpleVdByteArray + 4:
|
||||
fauxUnionReadRawBytes(d, &d.d, n, d.h.RawToString)
|
||||
case simpleVdExt, simpleVdExt + 1, simpleVdExt + 2, simpleVdExt + 3, simpleVdExt + 4:
|
||||
n.v = valueTypeExt
|
||||
l := d.decLen()
|
||||
n.u = uint64(d.d.decRd.readn1())
|
||||
if d.d.bytes {
|
||||
n.l = d.d.decRd.rb.readx(uint(l))
|
||||
} else {
|
||||
n.l = decByteSlice(d.d.r(), l, d.d.h.MaxInitLen, d.d.b[:])
|
||||
}
|
||||
case simpleVdArray, simpleVdArray + 1, simpleVdArray + 2,
|
||||
simpleVdArray + 3, simpleVdArray + 4:
|
||||
n.v = valueTypeArray
|
||||
decodeFurther = true
|
||||
case simpleVdMap, simpleVdMap + 1, simpleVdMap + 2, simpleVdMap + 3, simpleVdMap + 4:
|
||||
n.v = valueTypeMap
|
||||
decodeFurther = true
|
||||
default:
|
||||
d.d.errorf("cannot infer value - %s 0x%x", msgBadDesc, d.bd)
|
||||
}
|
||||
|
||||
if !decodeFurther {
|
||||
d.bdRead = false
|
||||
}
|
||||
}
|
||||
|
||||
//------------------------------------
|
||||
|
||||
// SimpleHandle is a Handle for a very simple encoding format.
|
||||
//
|
||||
// simple is a simplistic codec similar to binc, but not as compact.
|
||||
// - Encoding of a value is always preceded by the descriptor byte (bd)
|
||||
// - True, false, nil are encoded fully in 1 byte (the descriptor)
|
||||
// - Integers (intXXX, uintXXX) are encoded in 1, 2, 4 or 8 bytes (plus a descriptor byte).
|
||||
// There are positive (uintXXX and intXXX >= 0) and negative (intXXX < 0) integers.
|
||||
// - Floats are encoded in 4 or 8 bytes (plus a descriptor byte)
|
||||
// - Length of containers (strings, bytes, array, map, extensions)
|
||||
// are encoded in 0, 1, 2, 4 or 8 bytes.
|
||||
// Zero-length containers have no length encoded.
|
||||
// For others, the number of bytes is given by pow(2, bd%3)
|
||||
// - maps are encoded as [bd] [length] [[key][value]]...
|
||||
// - arrays are encoded as [bd] [length] [value]...
|
||||
// - extensions are encoded as [bd] [length] [tag] [byte]...
|
||||
// - strings/bytearrays are encoded as [bd] [length] [byte]...
|
||||
// - time.Time are encoded as [bd] [length] [byte]...
|
||||
//
|
||||
// The full spec will be published soon.
|
||||
type SimpleHandle struct {
|
||||
binaryEncodingType
|
||||
BasicHandle
|
||||
// EncZeroValuesAsNil says to encode zero values for numbers, bool, string, etc as nil
|
||||
EncZeroValuesAsNil bool
|
||||
|
||||
_ [7]uint64 // padding (cache-aligned)
|
||||
}
|
||||
|
||||
// Name returns the name of the handle: simple
|
||||
func (h *SimpleHandle) Name() string { return "simple" }
|
||||
|
||||
func (h *SimpleHandle) newEncDriver() encDriver {
|
||||
var e = &simpleEncDriver{h: h}
|
||||
e.e.e = e
|
||||
e.e.init(h)
|
||||
e.reset()
|
||||
return e
|
||||
}
|
||||
|
||||
func (h *SimpleHandle) newDecDriver() decDriver {
|
||||
d := &simpleDecDriver{h: h}
|
||||
d.d.d = d
|
||||
d.d.init(h)
|
||||
d.reset()
|
||||
return d
|
||||
}
|
||||
|
||||
func (e *simpleEncDriver) reset() {
|
||||
}
|
||||
|
||||
func (d *simpleDecDriver) reset() {
|
||||
d.bd, d.bdRead = 0, false
|
||||
d.fnil = false
|
||||
}
|
||||
|
||||
var _ decDriver = (*simpleDecDriver)(nil)
|
||||
var _ encDriver = (*simpleEncDriver)(nil)
|
||||
182
vendor/github.com/ugorji/go/codec/sort-slice.generated.go
generated
vendored
182
vendor/github.com/ugorji/go/codec/sort-slice.generated.go
generated
vendored
@@ -1,182 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from sort-slice.go.tmpl - DO NOT EDIT.
|
||||
|
||||
package codec
|
||||
|
||||
import "time"
|
||||
import "reflect"
|
||||
import "bytes"
|
||||
|
||||
type stringSlice []string
|
||||
|
||||
func (p stringSlice) Len() int { return len(p) }
|
||||
func (p stringSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p stringSlice) Less(i, j int) bool {
|
||||
return p[uint(i)] < p[uint(j)]
|
||||
}
|
||||
|
||||
type float64Slice []float64
|
||||
|
||||
func (p float64Slice) Len() int { return len(p) }
|
||||
func (p float64Slice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p float64Slice) Less(i, j int) bool {
|
||||
return p[uint(i)] < p[uint(j)] || isNaN64(p[uint(i)]) && !isNaN64(p[uint(j)])
|
||||
}
|
||||
|
||||
type uint64Slice []uint64
|
||||
|
||||
func (p uint64Slice) Len() int { return len(p) }
|
||||
func (p uint64Slice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p uint64Slice) Less(i, j int) bool {
|
||||
return p[uint(i)] < p[uint(j)]
|
||||
}
|
||||
|
||||
type uintptrSlice []uintptr
|
||||
|
||||
func (p uintptrSlice) Len() int { return len(p) }
|
||||
func (p uintptrSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p uintptrSlice) Less(i, j int) bool {
|
||||
return p[uint(i)] < p[uint(j)]
|
||||
}
|
||||
|
||||
type int64Slice []int64
|
||||
|
||||
func (p int64Slice) Len() int { return len(p) }
|
||||
func (p int64Slice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p int64Slice) Less(i, j int) bool {
|
||||
return p[uint(i)] < p[uint(j)]
|
||||
}
|
||||
|
||||
type boolSlice []bool
|
||||
|
||||
func (p boolSlice) Len() int { return len(p) }
|
||||
func (p boolSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p boolSlice) Less(i, j int) bool {
|
||||
return !p[uint(i)] && p[uint(j)]
|
||||
}
|
||||
|
||||
type timeSlice []time.Time
|
||||
|
||||
func (p timeSlice) Len() int { return len(p) }
|
||||
func (p timeSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p timeSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].Before(p[uint(j)])
|
||||
}
|
||||
|
||||
type bytesSlice [][]byte
|
||||
|
||||
func (p bytesSlice) Len() int { return len(p) }
|
||||
func (p bytesSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p bytesSlice) Less(i, j int) bool {
|
||||
return bytes.Compare(p[uint(i)], p[uint(j)]) == -1
|
||||
}
|
||||
|
||||
type stringRv struct {
|
||||
v string
|
||||
r reflect.Value
|
||||
}
|
||||
type stringRvSlice []stringRv
|
||||
|
||||
func (p stringRvSlice) Len() int { return len(p) }
|
||||
func (p stringRvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p stringRvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v < p[uint(j)].v
|
||||
}
|
||||
|
||||
type float64Rv struct {
|
||||
v float64
|
||||
r reflect.Value
|
||||
}
|
||||
type float64RvSlice []float64Rv
|
||||
|
||||
func (p float64RvSlice) Len() int { return len(p) }
|
||||
func (p float64RvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p float64RvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v < p[uint(j)].v || isNaN64(p[uint(i)].v) && !isNaN64(p[uint(j)].v)
|
||||
}
|
||||
|
||||
type uint64Rv struct {
|
||||
v uint64
|
||||
r reflect.Value
|
||||
}
|
||||
type uint64RvSlice []uint64Rv
|
||||
|
||||
func (p uint64RvSlice) Len() int { return len(p) }
|
||||
func (p uint64RvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p uint64RvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v < p[uint(j)].v
|
||||
}
|
||||
|
||||
type uintptrRv struct {
|
||||
v uintptr
|
||||
r reflect.Value
|
||||
}
|
||||
type uintptrRvSlice []uintptrRv
|
||||
|
||||
func (p uintptrRvSlice) Len() int { return len(p) }
|
||||
func (p uintptrRvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p uintptrRvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v < p[uint(j)].v
|
||||
}
|
||||
|
||||
type int64Rv struct {
|
||||
v int64
|
||||
r reflect.Value
|
||||
}
|
||||
type int64RvSlice []int64Rv
|
||||
|
||||
func (p int64RvSlice) Len() int { return len(p) }
|
||||
func (p int64RvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p int64RvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v < p[uint(j)].v
|
||||
}
|
||||
|
||||
type boolRv struct {
|
||||
v bool
|
||||
r reflect.Value
|
||||
}
|
||||
type boolRvSlice []boolRv
|
||||
|
||||
func (p boolRvSlice) Len() int { return len(p) }
|
||||
func (p boolRvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p boolRvSlice) Less(i, j int) bool {
|
||||
return !p[uint(i)].v && p[uint(j)].v
|
||||
}
|
||||
|
||||
type timeRv struct {
|
||||
v time.Time
|
||||
r reflect.Value
|
||||
}
|
||||
type timeRvSlice []timeRv
|
||||
|
||||
func (p timeRvSlice) Len() int { return len(p) }
|
||||
func (p timeRvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p timeRvSlice) Less(i, j int) bool {
|
||||
return p[uint(i)].v.Before(p[uint(j)].v)
|
||||
}
|
||||
|
||||
type bytesRv struct {
|
||||
v []byte
|
||||
r reflect.Value
|
||||
}
|
||||
type bytesRvSlice []bytesRv
|
||||
|
||||
func (p bytesRvSlice) Len() int { return len(p) }
|
||||
func (p bytesRvSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p bytesRvSlice) Less(i, j int) bool {
|
||||
return bytes.Compare(p[uint(i)].v, p[uint(j)].v) == -1
|
||||
}
|
||||
|
||||
type bytesIntf struct {
|
||||
v []byte
|
||||
i interface{}
|
||||
}
|
||||
type bytesIntfSlice []bytesIntf
|
||||
|
||||
func (p bytesIntfSlice) Len() int { return len(p) }
|
||||
func (p bytesIntfSlice) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p bytesIntfSlice) Less(i, j int) bool {
|
||||
return bytes.Compare(p[uint(i)].v, p[uint(j)].v) == -1
|
||||
}
|
||||
66
vendor/github.com/ugorji/go/codec/sort-slice.go.tmpl
generated
vendored
66
vendor/github.com/ugorji/go/codec/sort-slice.go.tmpl
generated
vendored
@@ -1,66 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// Code generated from sort-slice.go.tmpl - DO NOT EDIT.
|
||||
|
||||
{{/*
|
||||
xxxSlice
|
||||
xxxIntf
|
||||
xxxIntfSlice
|
||||
xxxRv
|
||||
xxxRvSlice
|
||||
|
||||
I'm now going to create them for
|
||||
- sortables
|
||||
- sortablesplus
|
||||
|
||||
With the parameters passed in sortables or sortablesplus,
|
||||
'time, 'bytes' are special, and correspond to time.Time and []byte respectively.
|
||||
*/}}
|
||||
|
||||
package codec
|
||||
|
||||
import "time"
|
||||
import "reflect"
|
||||
import "bytes"
|
||||
|
||||
{{/* func init() { _ = time.Unix } */}}
|
||||
|
||||
{{define "T"}}
|
||||
func (p {{ .Type }}) Len() int { return len(p) }
|
||||
func (p {{ .Type }}) Swap(i, j int) { p[uint(i)], p[uint(j)] = p[uint(j)], p[uint(i)] }
|
||||
func (p {{ .Type }}) Less(i, j int) bool {
|
||||
{{ if eq .Kind "bool" }} return !p[uint(i)]{{.V}} && p[uint(j)]{{.V}}
|
||||
{{ else if eq .Kind "float32" }} return p[uint(i)]{{.V}} < p[uint(j)]{{.V}} || isNaN32(p[uint(i)]{{.V}}) && !isNaN32(p[uint(j)]{{.V}})
|
||||
{{ else if eq .Kind "float64" }} return p[uint(i)]{{.V}} < p[uint(j)]{{.V}} || isNaN64(p[uint(i)]{{.V}}) && !isNaN64(p[uint(j)]{{.V}})
|
||||
{{ else if eq .Kind "time" }} return p[uint(i)]{{.V}}.Before(p[uint(j)]{{.V}})
|
||||
{{ else if eq .Kind "bytes" }} return bytes.Compare(p[uint(i)]{{.V}}, p[uint(j)]{{.V}}) == -1
|
||||
{{ else }} return p[uint(i)]{{.V}} < p[uint(j)]{{.V}}
|
||||
{{ end -}}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{range $i, $v := sortables }}{{ $t := tshort $v }}
|
||||
type {{ $v }}Slice []{{ $t }}
|
||||
{{template "T" args "Kind" $v "Type" (print $v "Slice") "V" ""}}
|
||||
{{end}}
|
||||
|
||||
{{range $i, $v := sortablesplus }}{{ $t := tshort $v }}
|
||||
|
||||
type {{ $v }}Rv struct {
|
||||
v {{ $t }}
|
||||
r reflect.Value
|
||||
}
|
||||
type {{ $v }}RvSlice []{{ $v }}Rv
|
||||
{{template "T" args "Kind" $v "Type" (print $v "RvSlice") "V" ".v"}}
|
||||
|
||||
{{if eq $v "bytes" -}}
|
||||
type {{ $v }}Intf struct {
|
||||
v {{ $t }}
|
||||
i interface{}
|
||||
}
|
||||
type {{ $v }}IntfSlice []{{ $v }}Intf
|
||||
{{template "T" args "Kind" $v "Type" (print $v "IntfSlice") "V" ".v"}}
|
||||
{{end}}
|
||||
|
||||
{{end}}
|
||||
639
vendor/github.com/ugorji/go/codec/test-cbor-goldens.json
generated
vendored
639
vendor/github.com/ugorji/go/codec/test-cbor-goldens.json
generated
vendored
@@ -1,639 +0,0 @@
|
||||
[
|
||||
{
|
||||
"cbor": "AA==",
|
||||
"hex": "00",
|
||||
"roundtrip": true,
|
||||
"decoded": 0
|
||||
},
|
||||
{
|
||||
"cbor": "AQ==",
|
||||
"hex": "01",
|
||||
"roundtrip": true,
|
||||
"decoded": 1
|
||||
},
|
||||
{
|
||||
"cbor": "Cg==",
|
||||
"hex": "0a",
|
||||
"roundtrip": true,
|
||||
"decoded": 10
|
||||
},
|
||||
{
|
||||
"cbor": "Fw==",
|
||||
"hex": "17",
|
||||
"roundtrip": true,
|
||||
"decoded": 23
|
||||
},
|
||||
{
|
||||
"cbor": "GBg=",
|
||||
"hex": "1818",
|
||||
"roundtrip": true,
|
||||
"decoded": 24
|
||||
},
|
||||
{
|
||||
"cbor": "GBk=",
|
||||
"hex": "1819",
|
||||
"roundtrip": true,
|
||||
"decoded": 25
|
||||
},
|
||||
{
|
||||
"cbor": "GGQ=",
|
||||
"hex": "1864",
|
||||
"roundtrip": true,
|
||||
"decoded": 100
|
||||
},
|
||||
{
|
||||
"cbor": "GQPo",
|
||||
"hex": "1903e8",
|
||||
"roundtrip": true,
|
||||
"decoded": 1000
|
||||
},
|
||||
{
|
||||
"cbor": "GgAPQkA=",
|
||||
"hex": "1a000f4240",
|
||||
"roundtrip": true,
|
||||
"decoded": 1000000
|
||||
},
|
||||
{
|
||||
"cbor": "GwAAAOjUpRAA",
|
||||
"hex": "1b000000e8d4a51000",
|
||||
"roundtrip": true,
|
||||
"decoded": 1000000000000
|
||||
},
|
||||
{
|
||||
"cbor": "G///////////",
|
||||
"hex": "1bffffffffffffffff",
|
||||
"roundtrip": true,
|
||||
"decoded": 18446744073709551615
|
||||
},
|
||||
{
|
||||
"cbor": "wkkBAAAAAAAAAAA=",
|
||||
"hex": "c249010000000000000000",
|
||||
"roundtrip": true,
|
||||
"decoded": 18446744073709551616
|
||||
},
|
||||
{
|
||||
"cbor": "O///////////",
|
||||
"hex": "3bffffffffffffffff",
|
||||
"roundtrip": true,
|
||||
"decoded": -18446744073709551616,
|
||||
"skip": true
|
||||
},
|
||||
{
|
||||
"cbor": "w0kBAAAAAAAAAAA=",
|
||||
"hex": "c349010000000000000000",
|
||||
"roundtrip": true,
|
||||
"decoded": -18446744073709551617
|
||||
},
|
||||
{
|
||||
"cbor": "IA==",
|
||||
"hex": "20",
|
||||
"roundtrip": true,
|
||||
"decoded": -1
|
||||
},
|
||||
{
|
||||
"cbor": "KQ==",
|
||||
"hex": "29",
|
||||
"roundtrip": true,
|
||||
"decoded": -10
|
||||
},
|
||||
{
|
||||
"cbor": "OGM=",
|
||||
"hex": "3863",
|
||||
"roundtrip": true,
|
||||
"decoded": -100
|
||||
},
|
||||
{
|
||||
"cbor": "OQPn",
|
||||
"hex": "3903e7",
|
||||
"roundtrip": true,
|
||||
"decoded": -1000
|
||||
},
|
||||
{
|
||||
"cbor": "+QAA",
|
||||
"hex": "f90000",
|
||||
"roundtrip": true,
|
||||
"decoded": 0.0
|
||||
},
|
||||
{
|
||||
"cbor": "+YAA",
|
||||
"hex": "f98000",
|
||||
"roundtrip": true,
|
||||
"decoded": -0.0
|
||||
},
|
||||
{
|
||||
"cbor": "+TwA",
|
||||
"hex": "f93c00",
|
||||
"roundtrip": true,
|
||||
"decoded": 1.0
|
||||
},
|
||||
{
|
||||
"cbor": "+z/xmZmZmZma",
|
||||
"hex": "fb3ff199999999999a",
|
||||
"roundtrip": true,
|
||||
"decoded": 1.1
|
||||
},
|
||||
{
|
||||
"cbor": "+T4A",
|
||||
"hex": "f93e00",
|
||||
"roundtrip": true,
|
||||
"decoded": 1.5
|
||||
},
|
||||
{
|
||||
"cbor": "+Xv/",
|
||||
"hex": "f97bff",
|
||||
"roundtrip": true,
|
||||
"decoded": 65504.0
|
||||
},
|
||||
{
|
||||
"cbor": "+kfDUAA=",
|
||||
"hex": "fa47c35000",
|
||||
"roundtrip": true,
|
||||
"decoded": 100000.0
|
||||
},
|
||||
{
|
||||
"cbor": "+n9///8=",
|
||||
"hex": "fa7f7fffff",
|
||||
"roundtrip": true,
|
||||
"decoded": 3.4028234663852886e+38
|
||||
},
|
||||
{
|
||||
"cbor": "+3435DyIAHWc",
|
||||
"hex": "fb7e37e43c8800759c",
|
||||
"roundtrip": true,
|
||||
"decoded": 1.0e+300
|
||||
},
|
||||
{
|
||||
"cbor": "+QAB",
|
||||
"hex": "f90001",
|
||||
"roundtrip": true,
|
||||
"decoded": 5.960464477539063e-08
|
||||
},
|
||||
{
|
||||
"cbor": "+QQA",
|
||||
"hex": "f90400",
|
||||
"roundtrip": true,
|
||||
"decoded": 6.103515625e-05
|
||||
},
|
||||
{
|
||||
"cbor": "+cQA",
|
||||
"hex": "f9c400",
|
||||
"roundtrip": true,
|
||||
"decoded": -4.0
|
||||
},
|
||||
{
|
||||
"cbor": "+8AQZmZmZmZm",
|
||||
"hex": "fbc010666666666666",
|
||||
"roundtrip": true,
|
||||
"decoded": -4.1
|
||||
},
|
||||
{
|
||||
"cbor": "+XwA",
|
||||
"hex": "f97c00",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "+X4A",
|
||||
"hex": "f97e00",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "NaN"
|
||||
},
|
||||
{
|
||||
"cbor": "+fwA",
|
||||
"hex": "f9fc00",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "-Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "+n+AAAA=",
|
||||
"hex": "fa7f800000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "+n/AAAA=",
|
||||
"hex": "fa7fc00000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "NaN"
|
||||
},
|
||||
{
|
||||
"cbor": "+v+AAAA=",
|
||||
"hex": "faff800000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "-Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "+3/wAAAAAAAA",
|
||||
"hex": "fb7ff0000000000000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "+3/4AAAAAAAA",
|
||||
"hex": "fb7ff8000000000000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "NaN"
|
||||
},
|
||||
{
|
||||
"cbor": "+//wAAAAAAAA",
|
||||
"hex": "fbfff0000000000000",
|
||||
"roundtrip": false,
|
||||
"diagnostic": "-Infinity"
|
||||
},
|
||||
{
|
||||
"cbor": "9A==",
|
||||
"hex": "f4",
|
||||
"roundtrip": true,
|
||||
"decoded": false
|
||||
},
|
||||
{
|
||||
"cbor": "9Q==",
|
||||
"hex": "f5",
|
||||
"roundtrip": true,
|
||||
"decoded": true
|
||||
},
|
||||
{
|
||||
"cbor": "9g==",
|
||||
"hex": "f6",
|
||||
"roundtrip": true,
|
||||
"decoded": null
|
||||
},
|
||||
{
|
||||
"cbor": "9w==",
|
||||
"hex": "f7",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "undefined"
|
||||
},
|
||||
{
|
||||
"cbor": "8A==",
|
||||
"hex": "f0",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "simple(16)"
|
||||
},
|
||||
{
|
||||
"cbor": "+Bg=",
|
||||
"hex": "f818",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "simple(24)"
|
||||
},
|
||||
{
|
||||
"cbor": "+P8=",
|
||||
"hex": "f8ff",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "simple(255)"
|
||||
},
|
||||
{
|
||||
"cbor": "wHQyMDEzLTAzLTIxVDIwOjA0OjAwWg==",
|
||||
"hex": "c074323031332d30332d32315432303a30343a30305a",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "0(\"2013-03-21T20:04:00Z\")"
|
||||
},
|
||||
{
|
||||
"cbor": "wRpRS2ew",
|
||||
"hex": "c11a514b67b0",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "1(1363896240)"
|
||||
},
|
||||
{
|
||||
"cbor": "wftB1FLZ7CAAAA==",
|
||||
"hex": "c1fb41d452d9ec200000",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "1(1363896240.5)"
|
||||
},
|
||||
{
|
||||
"cbor": "10QBAgME",
|
||||
"hex": "d74401020304",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "23(h'01020304')"
|
||||
},
|
||||
{
|
||||
"cbor": "2BhFZElFVEY=",
|
||||
"hex": "d818456449455446",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "24(h'6449455446')"
|
||||
},
|
||||
{
|
||||
"cbor": "2CB2aHR0cDovL3d3dy5leGFtcGxlLmNvbQ==",
|
||||
"hex": "d82076687474703a2f2f7777772e6578616d706c652e636f6d",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "32(\"http://www.example.com\")"
|
||||
},
|
||||
{
|
||||
"cbor": "QA==",
|
||||
"hex": "40",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "h''"
|
||||
},
|
||||
{
|
||||
"cbor": "RAECAwQ=",
|
||||
"hex": "4401020304",
|
||||
"roundtrip": true,
|
||||
"diagnostic": "h'01020304'"
|
||||
},
|
||||
{
|
||||
"cbor": "YA==",
|
||||
"hex": "60",
|
||||
"roundtrip": true,
|
||||
"decoded": ""
|
||||
},
|
||||
{
|
||||
"cbor": "YWE=",
|
||||
"hex": "6161",
|
||||
"roundtrip": true,
|
||||
"decoded": "a"
|
||||
},
|
||||
{
|
||||
"cbor": "ZElFVEY=",
|
||||
"hex": "6449455446",
|
||||
"roundtrip": true,
|
||||
"decoded": "IETF"
|
||||
},
|
||||
{
|
||||
"cbor": "YiJc",
|
||||
"hex": "62225c",
|
||||
"roundtrip": true,
|
||||
"decoded": "\"\\"
|
||||
},
|
||||
{
|
||||
"cbor": "YsO8",
|
||||
"hex": "62c3bc",
|
||||
"roundtrip": true,
|
||||
"decoded": "ü"
|
||||
},
|
||||
{
|
||||
"cbor": "Y+awtA==",
|
||||
"hex": "63e6b0b4",
|
||||
"roundtrip": true,
|
||||
"decoded": "水"
|
||||
},
|
||||
{
|
||||
"cbor": "ZPCQhZE=",
|
||||
"hex": "64f0908591",
|
||||
"roundtrip": true,
|
||||
"decoded": "𐅑"
|
||||
},
|
||||
{
|
||||
"cbor": "gA==",
|
||||
"hex": "80",
|
||||
"roundtrip": true,
|
||||
"decoded": [
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "gwECAw==",
|
||||
"hex": "83010203",
|
||||
"roundtrip": true,
|
||||
"decoded": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "gwGCAgOCBAU=",
|
||||
"hex": "8301820203820405",
|
||||
"roundtrip": true,
|
||||
"decoded": [
|
||||
1,
|
||||
[
|
||||
2,
|
||||
3
|
||||
],
|
||||
[
|
||||
4,
|
||||
5
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "mBkBAgMEBQYHCAkKCwwNDg8QERITFBUWFxgYGBk=",
|
||||
"hex": "98190102030405060708090a0b0c0d0e0f101112131415161718181819",
|
||||
"roundtrip": true,
|
||||
"decoded": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "oA==",
|
||||
"hex": "a0",
|
||||
"roundtrip": true,
|
||||
"decoded": {
|
||||
}
|
||||
},
|
||||
{
|
||||
"cbor": "ogECAwQ=",
|
||||
"hex": "a201020304",
|
||||
"roundtrip": true,
|
||||
"skip": true,
|
||||
"diagnostic": "{1: 2, 3: 4}"
|
||||
},
|
||||
{
|
||||
"cbor": "omFhAWFiggID",
|
||||
"hex": "a26161016162820203",
|
||||
"roundtrip": true,
|
||||
"decoded": {
|
||||
"a": 1,
|
||||
"b": [
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"cbor": "gmFhoWFiYWM=",
|
||||
"hex": "826161a161626163",
|
||||
"roundtrip": true,
|
||||
"decoded": [
|
||||
"a",
|
||||
{
|
||||
"b": "c"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "pWFhYUFhYmFCYWNhQ2FkYURhZWFF",
|
||||
"hex": "a56161614161626142616361436164614461656145",
|
||||
"roundtrip": true,
|
||||
"decoded": {
|
||||
"a": "A",
|
||||
"b": "B",
|
||||
"c": "C",
|
||||
"d": "D",
|
||||
"e": "E"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cbor": "X0IBAkMDBAX/",
|
||||
"hex": "5f42010243030405ff",
|
||||
"roundtrip": false,
|
||||
"skip": true,
|
||||
"diagnostic": "(_ h'0102', h'030405')"
|
||||
},
|
||||
{
|
||||
"cbor": "f2VzdHJlYWRtaW5n/w==",
|
||||
"hex": "7f657374726561646d696e67ff",
|
||||
"roundtrip": false,
|
||||
"decoded": "streaming"
|
||||
},
|
||||
{
|
||||
"cbor": "n/8=",
|
||||
"hex": "9fff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "nwGCAgOfBAX//w==",
|
||||
"hex": "9f018202039f0405ffff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
1,
|
||||
[
|
||||
2,
|
||||
3
|
||||
],
|
||||
[
|
||||
4,
|
||||
5
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "nwGCAgOCBAX/",
|
||||
"hex": "9f01820203820405ff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
1,
|
||||
[
|
||||
2,
|
||||
3
|
||||
],
|
||||
[
|
||||
4,
|
||||
5
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "gwGCAgOfBAX/",
|
||||
"hex": "83018202039f0405ff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
1,
|
||||
[
|
||||
2,
|
||||
3
|
||||
],
|
||||
[
|
||||
4,
|
||||
5
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "gwGfAgP/ggQF",
|
||||
"hex": "83019f0203ff820405",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
1,
|
||||
[
|
||||
2,
|
||||
3
|
||||
],
|
||||
[
|
||||
4,
|
||||
5
|
||||
]
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "nwECAwQFBgcICQoLDA0ODxAREhMUFRYXGBgYGf8=",
|
||||
"hex": "9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "v2FhAWFinwID//8=",
|
||||
"hex": "bf61610161629f0203ffff",
|
||||
"roundtrip": false,
|
||||
"decoded": {
|
||||
"a": 1,
|
||||
"b": [
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"cbor": "gmFhv2FiYWP/",
|
||||
"hex": "826161bf61626163ff",
|
||||
"roundtrip": false,
|
||||
"decoded": [
|
||||
"a",
|
||||
{
|
||||
"b": "c"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cbor": "v2NGdW71Y0FtdCH/",
|
||||
"hex": "bf6346756ef563416d7421ff",
|
||||
"roundtrip": false,
|
||||
"decoded": {
|
||||
"Fun": true,
|
||||
"Amt": -2
|
||||
}
|
||||
}
|
||||
]
|
||||
124
vendor/github.com/ugorji/go/codec/test.py
generated
vendored
124
vendor/github.com/ugorji/go/codec/test.py
generated
vendored
@@ -1,124 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This will create golden files in a directory passed to it.
|
||||
# A Test calls this internally to create the golden files
|
||||
# So it can process them (so we don't have to checkin the files).
|
||||
|
||||
# Ensure msgpack-python and cbor are installed first, using:
|
||||
# sudo apt-get install python-dev
|
||||
# sudo apt-get install python-pip
|
||||
# pip install --user msgpack-python msgpack-rpc-python cbor
|
||||
|
||||
# Ensure all "string" keys are utf strings (else encoded as bytes)
|
||||
|
||||
from __future__ import print_function
|
||||
import cbor, msgpack, msgpackrpc, sys, os, threading
|
||||
|
||||
def get_test_data_list():
|
||||
# get list with all primitive types, and a combo type
|
||||
l0 = [
|
||||
-8,
|
||||
-1616,
|
||||
-32323232,
|
||||
-6464646464646464,
|
||||
192,
|
||||
1616,
|
||||
32323232,
|
||||
6464646464646464,
|
||||
192,
|
||||
-3232.0,
|
||||
-6464646464.0,
|
||||
3232.0,
|
||||
6464.0,
|
||||
6464646464.0,
|
||||
False,
|
||||
True,
|
||||
u"null",
|
||||
None,
|
||||
u"some&day>some<day",
|
||||
1328176922000002000,
|
||||
u"",
|
||||
-2206187877999998000,
|
||||
u"bytestring",
|
||||
270,
|
||||
u"none",
|
||||
-2013855847999995777,
|
||||
#-6795364578871345152,
|
||||
]
|
||||
l1 = [
|
||||
{ "true": True,
|
||||
"false": False },
|
||||
{ "true": u"True",
|
||||
"false": False,
|
||||
"uint16(1616)": 1616 },
|
||||
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
|
||||
"int32":32323232, "bool": True,
|
||||
"LONG STRING": u"123456789012345678901234567890123456789012345678901234567890",
|
||||
"SHORT STRING": u"1234567890" },
|
||||
{ True: "true", 138: False, "false": 200 }
|
||||
]
|
||||
|
||||
l = []
|
||||
l.extend(l0)
|
||||
l.append(l0)
|
||||
l.append(1)
|
||||
l.extend(l1)
|
||||
return l
|
||||
|
||||
def build_test_data(destdir):
|
||||
l = get_test_data_list()
|
||||
for i in range(len(l)):
|
||||
# packer = msgpack.Packer()
|
||||
serialized = msgpack.dumps(l[i])
|
||||
with open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb') as f:
|
||||
f.write(serialized)
|
||||
serialized = cbor.dumps(l[i])
|
||||
with open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb') as f:
|
||||
f.write(serialized)
|
||||
|
||||
def doRpcServer(port, stopTimeSec):
|
||||
class EchoHandler(object):
|
||||
def Echo123(self, msg1, msg2, msg3):
|
||||
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
|
||||
def EchoStruct(self, msg):
|
||||
return ("%s" % msg)
|
||||
|
||||
addr = msgpackrpc.Address('127.0.0.1', port)
|
||||
server = msgpackrpc.Server(EchoHandler())
|
||||
server.listen(addr)
|
||||
# run thread to stop it after stopTimeSec seconds if > 0
|
||||
if stopTimeSec > 0:
|
||||
def myStopRpcServer():
|
||||
server.stop()
|
||||
t = threading.Timer(stopTimeSec, myStopRpcServer)
|
||||
t.start()
|
||||
server.start()
|
||||
|
||||
def doRpcClientToPythonSvc(port):
|
||||
address = msgpackrpc.Address('127.0.0.1', port)
|
||||
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
|
||||
print(client.call("Echo123", "A1", "B2", "C3"))
|
||||
print(client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"}))
|
||||
|
||||
def doRpcClientToGoSvc(port):
|
||||
# print(">>>> port: ", port, " <<<<<")
|
||||
address = msgpackrpc.Address('127.0.0.1', port)
|
||||
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
|
||||
print(client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"]))
|
||||
print(client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"}))
|
||||
|
||||
def doMain(args):
|
||||
if len(args) == 2 and args[0] == "testdata":
|
||||
build_test_data(args[1])
|
||||
elif len(args) == 3 and args[0] == "rpc-server":
|
||||
doRpcServer(int(args[1]), int(args[2]))
|
||||
elif len(args) == 2 and args[0] == "rpc-client-python-service":
|
||||
doRpcClientToPythonSvc(int(args[1]))
|
||||
elif len(args) == 2 and args[0] == "rpc-client-go-service":
|
||||
doRpcClientToGoSvc(int(args[1]))
|
||||
else:
|
||||
print("Usage: test.py " +
|
||||
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
|
||||
|
||||
if __name__ == "__main__":
|
||||
doMain(sys.argv[1:])
|
||||
14057
vendor/github.com/ugorji/go/codec/values_codecgen_generated_test.go
generated
vendored
14057
vendor/github.com/ugorji/go/codec/values_codecgen_generated_test.go
generated
vendored
File diff suppressed because it is too large
Load Diff
346
vendor/github.com/ugorji/go/codec/values_flex_test.go
generated
vendored
346
vendor/github.com/ugorji/go/codec/values_flex_test.go
generated
vendored
@@ -1,346 +0,0 @@
|
||||
// comment this out // // + build testing
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const teststrucflexChanCap = 64
|
||||
|
||||
// This file contains values used by tests alone.
|
||||
// This is where we may try out different things,
|
||||
// that other engines may not support or may barf upon
|
||||
// e.g. custom extensions for wrapped types, maps with non-string keys, etc.
|
||||
|
||||
// some funky types to test codecgen
|
||||
|
||||
type codecgenA struct {
|
||||
ZZ []byte
|
||||
}
|
||||
type codecgenB struct {
|
||||
AA codecgenA
|
||||
}
|
||||
type codecgenC struct {
|
||||
_struct struct{} `codec:",omitempty"`
|
||||
BB codecgenB
|
||||
}
|
||||
|
||||
type TestCodecgenG struct {
|
||||
TestCodecgenG int
|
||||
}
|
||||
type codecgenH struct {
|
||||
TestCodecgenG
|
||||
}
|
||||
type codecgenI struct {
|
||||
codecgenH
|
||||
}
|
||||
|
||||
type codecgenK struct {
|
||||
X int
|
||||
Y string
|
||||
}
|
||||
type codecgenL struct {
|
||||
X int
|
||||
Y uint32
|
||||
}
|
||||
type codecgenM struct {
|
||||
codecgenK
|
||||
codecgenL
|
||||
}
|
||||
|
||||
// some types to test struct keytype
|
||||
|
||||
type testStrucKeyTypeT0 struct {
|
||||
_struct struct{}
|
||||
F int
|
||||
}
|
||||
type testStrucKeyTypeT1 struct {
|
||||
_struct struct{} `codec:",string"`
|
||||
F int `codec:"FFFF"`
|
||||
}
|
||||
type testStrucKeyTypeT2 struct {
|
||||
_struct struct{} `codec:",int"`
|
||||
F int `codec:"-1"`
|
||||
}
|
||||
type testStrucKeyTypeT3 struct {
|
||||
_struct struct{} `codec:",uint"`
|
||||
F int `codec:"1"`
|
||||
}
|
||||
type testStrucKeyTypeT4 struct {
|
||||
_struct struct{} `codec:",float"`
|
||||
F int `codec:"2.5"`
|
||||
}
|
||||
|
||||
// Some unused types just stored here
|
||||
|
||||
type Bbool bool
|
||||
type Aarray [1]string
|
||||
type Sstring string
|
||||
type Sstructsmall struct {
|
||||
A int
|
||||
}
|
||||
|
||||
type Sstructbig struct {
|
||||
A int
|
||||
B bool
|
||||
c string
|
||||
// Sval Sstruct
|
||||
Ssmallptr *Sstructsmall
|
||||
Ssmall *Sstructsmall
|
||||
Sptr *Sstructbig
|
||||
}
|
||||
|
||||
type SstructbigMapBySlice struct {
|
||||
_struct struct{} `codec:",toarray"`
|
||||
A int
|
||||
B bool
|
||||
c string
|
||||
// Sval Sstruct
|
||||
Ssmallptr *Sstructsmall
|
||||
Ssmall *Sstructsmall
|
||||
Sptr *Sstructbig
|
||||
}
|
||||
|
||||
// small struct for testing that codecgen works for unexported types
|
||||
type tLowerFirstLetter struct {
|
||||
I int
|
||||
u uint64
|
||||
S string
|
||||
b []byte
|
||||
}
|
||||
|
||||
// Some used types
|
||||
type wrapInt64 int64
|
||||
type wrapUint8 uint8
|
||||
type wrapBytes []uint8
|
||||
|
||||
type AnonInTestStrucIntf struct {
|
||||
Islice []interface{}
|
||||
Ms map[string]interface{}
|
||||
Nintf interface{} //don't set this, so we can test for nil
|
||||
T time.Time
|
||||
Tptr *time.Time
|
||||
}
|
||||
|
||||
type missingFielderT1 struct {
|
||||
S string
|
||||
B bool
|
||||
f float64
|
||||
i int64
|
||||
}
|
||||
|
||||
func (t *missingFielderT1) CodecMissingField(field []byte, value interface{}) bool {
|
||||
switch string(field) {
|
||||
case "F":
|
||||
t.f = value.(float64)
|
||||
case "I":
|
||||
t.i = value.(int64)
|
||||
default:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (t *missingFielderT1) CodecMissingFields() map[string]interface{} {
|
||||
return map[string]interface{}{"F": t.f, "I": t.i}
|
||||
}
|
||||
|
||||
type missingFielderT2 struct {
|
||||
S string
|
||||
B bool
|
||||
F float64
|
||||
I int64
|
||||
}
|
||||
|
||||
type testSelfExtHelper struct {
|
||||
S string
|
||||
I int64
|
||||
B bool
|
||||
}
|
||||
|
||||
type TestSelfExtImpl struct {
|
||||
testSelfExtHelper
|
||||
}
|
||||
|
||||
type TestSelfExtImpl2 struct {
|
||||
M string
|
||||
O bool
|
||||
}
|
||||
|
||||
type TestTwoNakedInterfaces struct {
|
||||
A interface{}
|
||||
B interface{}
|
||||
}
|
||||
|
||||
var testWRepeated512 wrapBytes
|
||||
var testStrucTime = time.Date(2012, 2, 2, 2, 2, 2, 2000, time.UTC).UTC()
|
||||
|
||||
func init() {
|
||||
var testARepeated512 [512]byte
|
||||
for i := range testARepeated512 {
|
||||
testARepeated512[i] = 'A'
|
||||
}
|
||||
testWRepeated512 = wrapBytes(testARepeated512[:])
|
||||
}
|
||||
|
||||
type TestStrucFlex struct {
|
||||
_struct struct{} `codec:",omitempty"` //set omitempty for every field
|
||||
TestStrucCommon
|
||||
|
||||
Chstr chan string
|
||||
|
||||
Mis map[int]string
|
||||
Mbu64 map[bool]struct{}
|
||||
Miwu64s map[int]wrapUint64Slice
|
||||
Mfwss map[float64]wrapStringSlice
|
||||
Mf32wss map[float32]wrapStringSlice
|
||||
Mui2wss map[uint64]wrapStringSlice
|
||||
|
||||
// DecodeNaked bombs because stringUint64T is decoded as a map,
|
||||
// and a map cannot be the key type of a map.
|
||||
// Ensure this is set to nil if decoding into a nil interface{}.
|
||||
Msu2wss map[stringUint64T]wrapStringSlice
|
||||
|
||||
Ci64 wrapInt64
|
||||
Swrapbytes []wrapBytes
|
||||
Swrapuint8 []wrapUint8
|
||||
|
||||
ArrStrUi64T [4]stringUint64T
|
||||
|
||||
Ui64array [4]uint64
|
||||
Ui64slicearray []*[4]uint64
|
||||
|
||||
SintfAarray []interface{}
|
||||
|
||||
// Ensure this is set to nil if decoding into a nil interface{}.
|
||||
MstrUi64TSelf map[stringUint64T]*stringUint64T
|
||||
|
||||
// make this a ptr, so that it could be set or not.
|
||||
// for comparison (e.g. with msgp), give it a struct tag (so it is not inlined),
|
||||
// make this one omitempty (so it is excluded if nil).
|
||||
*AnonInTestStrucIntf `json:",omitempty"`
|
||||
|
||||
//M map[interface{}]interface{} `json:"-",bson:"-"`
|
||||
Mtsptr map[string]*TestStrucFlex
|
||||
Mts map[string]TestStrucFlex
|
||||
Its []*TestStrucFlex
|
||||
Nteststruc *TestStrucFlex
|
||||
}
|
||||
|
||||
func emptyTestStrucFlex() *TestStrucFlex {
|
||||
var ts TestStrucFlex
|
||||
// we initialize and start draining the chan, so that we can decode into it without it blocking due to no consumer
|
||||
ts.Chstr = make(chan string, teststrucflexChanCap)
|
||||
go func() {
|
||||
for range ts.Chstr {
|
||||
}
|
||||
}() // drain it
|
||||
return &ts
|
||||
}
|
||||
|
||||
func newTestStrucFlex(depth, n int, bench, useInterface, useStringKeyOnly bool) (ts *TestStrucFlex) {
|
||||
ts = &TestStrucFlex{
|
||||
Chstr: make(chan string, teststrucflexChanCap),
|
||||
|
||||
Miwu64s: map[int]wrapUint64Slice{
|
||||
5: []wrapUint64{1, 2, 3, 4, 5},
|
||||
3: []wrapUint64{1, 2, 3},
|
||||
},
|
||||
|
||||
Mf32wss: map[float32]wrapStringSlice{
|
||||
5.0: []wrapString{"1.0", "2.0", "3.0", "4.0", "5.0"},
|
||||
3.0: []wrapString{"1.0", "2.0", "3.0"},
|
||||
},
|
||||
|
||||
Mui2wss: map[uint64]wrapStringSlice{
|
||||
5: []wrapString{"1.0", "2.0", "3.0", "4.0", "5.0"},
|
||||
3: []wrapString{"1.0", "2.0", "3.0"},
|
||||
},
|
||||
|
||||
Mfwss: map[float64]wrapStringSlice{
|
||||
5.0: []wrapString{"1.0", "2.0", "3.0", "4.0", "5.0"},
|
||||
3.0: []wrapString{"1.0", "2.0", "3.0"},
|
||||
},
|
||||
|
||||
// DecodeNaked bombs here, because the stringUint64T is decoded as a map,
|
||||
// and a map cannot be the key type of a map.
|
||||
// Ensure this is set to nil if decoding into a nil interface{}.
|
||||
Msu2wss: map[stringUint64T]wrapStringSlice{
|
||||
stringUint64T{"5", 5}: []wrapString{"1", "2", "3", "4", "5"},
|
||||
stringUint64T{"3", 3}: []wrapString{"1", "2", "3"},
|
||||
},
|
||||
|
||||
Mis: map[int]string{
|
||||
1: "one",
|
||||
22: "twenty two",
|
||||
-44: "minus forty four",
|
||||
},
|
||||
Mbu64: map[bool]struct{}{false: {}, true: {}},
|
||||
|
||||
Ci64: -22,
|
||||
Swrapbytes: []wrapBytes{ // lengths of 1, 2, 4, 8, 16, 32, 64, 128, 256,
|
||||
testWRepeated512[:1],
|
||||
testWRepeated512[:2],
|
||||
testWRepeated512[:4],
|
||||
testWRepeated512[:8],
|
||||
testWRepeated512[:16],
|
||||
testWRepeated512[:32],
|
||||
testWRepeated512[:64],
|
||||
testWRepeated512[:128],
|
||||
testWRepeated512[:256],
|
||||
testWRepeated512[:512],
|
||||
},
|
||||
Swrapuint8: []wrapUint8{
|
||||
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
|
||||
},
|
||||
Ui64array: [4]uint64{4, 16, 64, 256},
|
||||
ArrStrUi64T: [4]stringUint64T{{"4", 4}, {"3", 3}, {"2", 2}, {"1", 1}},
|
||||
SintfAarray: []interface{}{Aarray{"s"}},
|
||||
MstrUi64TSelf: make(map[stringUint64T]*stringUint64T, numStrUi64T),
|
||||
}
|
||||
|
||||
for i := uint64(0); i < numStrUi64T; i++ {
|
||||
ss := stringUint64T{S: strings.Repeat(strconv.FormatUint(i, 10), 4), U: i}
|
||||
// Ensure this is set to nil if decoding into a nil interface{}.
|
||||
ts.MstrUi64TSelf[ss] = &ss
|
||||
}
|
||||
|
||||
numChanSend := cap(ts.Chstr) / 4 // 8
|
||||
for i := 0; i < numChanSend; i++ {
|
||||
ts.Chstr <- strings.Repeat("A", i+1)
|
||||
}
|
||||
|
||||
ts.Ui64slicearray = []*[4]uint64{&ts.Ui64array, &ts.Ui64array}
|
||||
|
||||
if useInterface {
|
||||
ts.AnonInTestStrucIntf = &AnonInTestStrucIntf{
|
||||
Islice: []interface{}{strRpt(n, "true"), true, strRpt(n, "no"), false, uint64(288), float64(0.4)},
|
||||
Ms: map[string]interface{}{
|
||||
strRpt(n, "true"): strRpt(n, "true"),
|
||||
strRpt(n, "int64(9)"): false,
|
||||
},
|
||||
T: testStrucTime,
|
||||
}
|
||||
}
|
||||
|
||||
populateTestStrucCommon(&ts.TestStrucCommon, n, bench, useInterface, useStringKeyOnly)
|
||||
if depth > 0 {
|
||||
depth--
|
||||
if ts.Mtsptr == nil {
|
||||
ts.Mtsptr = make(map[string]*TestStrucFlex)
|
||||
}
|
||||
if ts.Mts == nil {
|
||||
ts.Mts = make(map[string]TestStrucFlex)
|
||||
}
|
||||
ts.Mtsptr["0"] = newTestStrucFlex(depth, n, bench, useInterface, useStringKeyOnly)
|
||||
ts.Mts["0"] = *(ts.Mtsptr["0"])
|
||||
ts.Its = append(ts.Its, ts.Mtsptr["0"])
|
||||
}
|
||||
return
|
||||
}
|
||||
418
vendor/github.com/ugorji/go/codec/values_test.go
generated
vendored
418
vendor/github.com/ugorji/go/codec/values_test.go
generated
vendored
@@ -1,418 +0,0 @@
|
||||
// comment this out // + build testing
|
||||
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
// This file contains values used by tests and benchmarks.
|
||||
// The benchmarks will test performance against other libraries
|
||||
// (encoding/json, json-iterator, bson, gob, etc).
|
||||
// Consequently, we only use values that will parse well in all engines,
|
||||
// and only leverage features that work across multiple libraries for a truer comparison.
|
||||
// For example,
|
||||
// - JSON/BSON do not like maps with keys that are not strings,
|
||||
// so we only use maps with string keys here.
|
||||
// - _struct options are not honored by other libraries,
|
||||
// so we don't use them in this file.
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// func init() {
|
||||
// rt := reflect.TypeOf((*TestStruc)(nil)).Elem()
|
||||
// defTypeInfos.get(rt2id(rt), rt)
|
||||
// }
|
||||
|
||||
const numStrUi64T = 32 // use 8, prefer 32, test with 1024
|
||||
|
||||
type wrapSliceUint64 []uint64
|
||||
type wrapSliceString []string
|
||||
type wrapUint64 uint64
|
||||
type wrapString string
|
||||
type wrapUint64Slice []wrapUint64
|
||||
type wrapStringSlice []wrapString
|
||||
|
||||
// some other types
|
||||
|
||||
type stringUint64T struct {
|
||||
S string
|
||||
U uint64
|
||||
}
|
||||
|
||||
type AnonInTestStruc struct {
|
||||
AS string
|
||||
AI64 int64
|
||||
AI16 int16
|
||||
AUi64 uint64
|
||||
ASslice []string
|
||||
AI64slice []int64
|
||||
AUi64slice []uint64
|
||||
AF64slice []float64
|
||||
AF32slice []float32
|
||||
|
||||
// AMI32U32 map[int32]uint32
|
||||
// AMU32F64 map[uint32]float64 // json/bson do not like it
|
||||
AMSU16 map[string]uint16
|
||||
|
||||
// use these to test 0-len or nil slices/maps/arrays
|
||||
AI64arr0 [0]int64
|
||||
AI64slice0 []int64
|
||||
AUi64sliceN []uint64
|
||||
AMSU16N map[string]uint16
|
||||
AMSU16E map[string]uint16
|
||||
}
|
||||
|
||||
// testSimpleFields is a sub-set of TestStrucCommon
|
||||
type testSimpleFields struct {
|
||||
S string
|
||||
|
||||
I64 int64
|
||||
I8 int8
|
||||
|
||||
Ui64 uint64
|
||||
Ui8 uint8
|
||||
|
||||
F64 float64
|
||||
F32 float32
|
||||
|
||||
B bool
|
||||
|
||||
Sslice []string
|
||||
I16slice []int16
|
||||
Ui64slice []uint64
|
||||
Ui8slice []uint8
|
||||
Bslice []bool
|
||||
|
||||
Iptrslice []*int64
|
||||
|
||||
WrapSliceInt64 wrapSliceUint64
|
||||
WrapSliceString wrapSliceString
|
||||
|
||||
Msi64 map[string]int64
|
||||
}
|
||||
|
||||
type TestStrucCommon struct {
|
||||
S string
|
||||
|
||||
I64 int64
|
||||
I32 int32
|
||||
I16 int16
|
||||
I8 int8
|
||||
|
||||
I64n int64
|
||||
I32n int32
|
||||
I16n int16
|
||||
I8n int8
|
||||
|
||||
Ui64 uint64
|
||||
Ui32 uint32
|
||||
Ui16 uint16
|
||||
Ui8 uint8
|
||||
|
||||
F64 float64
|
||||
F32 float32
|
||||
|
||||
B bool
|
||||
By uint8 // byte: msgp doesn't like byte
|
||||
|
||||
Sslice []string
|
||||
I64slice []int64
|
||||
I16slice []int16
|
||||
Ui64slice []uint64
|
||||
Ui8slice []uint8
|
||||
Bslice []bool
|
||||
Byslice []byte
|
||||
|
||||
BytesSlice [][]byte
|
||||
|
||||
Iptrslice []*int64
|
||||
|
||||
WrapSliceInt64 wrapSliceUint64
|
||||
WrapSliceString wrapSliceString
|
||||
|
||||
Msi64 map[string]int64
|
||||
|
||||
Msbytes map[string][]byte
|
||||
|
||||
Simplef testSimpleFields
|
||||
|
||||
SstrUi64T []stringUint64T
|
||||
MstrUi64T map[string]*stringUint64T
|
||||
|
||||
AnonInTestStruc
|
||||
|
||||
NotAnon AnonInTestStruc
|
||||
|
||||
// R Raw // Testing Raw must be explicitly turned on, so use standalone test
|
||||
// Rext RawExt // Testing RawExt is tricky, so use standalone test
|
||||
|
||||
Nmap map[string]bool //don't set this, so we can test for nil
|
||||
Nslice []byte //don't set this, so we can test for nil
|
||||
Nint64 *int64 //don't set this, so we can test for nil
|
||||
}
|
||||
|
||||
type TestStruc struct {
|
||||
// _struct struct{} `json:",omitempty"` //set omitempty for every field
|
||||
|
||||
TestStrucCommon
|
||||
|
||||
Mtsptr map[string]*TestStruc
|
||||
Mts map[string]TestStruc
|
||||
Its []*TestStruc
|
||||
Nteststruc *TestStruc
|
||||
}
|
||||
|
||||
func populateTestStrucCommon(ts *TestStrucCommon, n int, bench, useInterface, useStringKeyOnly bool) {
|
||||
var i64a, i64b, i64c, i64d int64 = 64, 6464, 646464, 64646464
|
||||
|
||||
// if bench, do not use uint64 values > math.MaxInt64, as bson, etc cannot decode them
|
||||
|
||||
var a = AnonInTestStruc{
|
||||
// There's more leeway in altering this.
|
||||
AS: strRpt(n, "A-String"),
|
||||
AI64: -64646464,
|
||||
AI16: 1616,
|
||||
AUi64: 64646464,
|
||||
// (U+1D11E)G-clef character may be represented in json as "\uD834\uDD1E".
|
||||
// single reverse solidus character may be represented in json as "\u005C".
|
||||
// include these in ASslice below.
|
||||
ASslice: []string{
|
||||
strRpt(n, "Aone"),
|
||||
strRpt(n, "Atwo"),
|
||||
strRpt(n, "Athree"),
|
||||
strRpt(n, "Afour.reverse_solidus.\u005c"),
|
||||
strRpt(n, "Afive.Gclef.\U0001d11E\"ugorji\"done.")},
|
||||
AI64slice: []int64{
|
||||
0, 1, -1, -22, 333, -4444, 55555, -666666,
|
||||
// msgpack ones
|
||||
-48, -32, -24, -8, 32, 127, 192, 255,
|
||||
// standard ones
|
||||
0, -1, 1,
|
||||
math.MaxInt8, math.MaxInt8 + 4, math.MaxInt8 - 4,
|
||||
math.MaxInt16, math.MaxInt16 + 4, math.MaxInt16 - 4,
|
||||
math.MaxInt32, math.MaxInt32 + 4, math.MaxInt32 - 4,
|
||||
math.MaxInt64, math.MaxInt64 - 4,
|
||||
math.MinInt8, math.MinInt8 + 4, math.MinInt8 - 4,
|
||||
math.MinInt16, math.MinInt16 + 4, math.MinInt16 - 4,
|
||||
math.MinInt32, math.MinInt32 + 4, math.MinInt32 - 4,
|
||||
math.MinInt64, math.MinInt64 + 4,
|
||||
},
|
||||
AUi64slice: []uint64{
|
||||
0, 1, 22, 333, 4444, 55555, 666666,
|
||||
// standard ones
|
||||
math.MaxUint8, math.MaxUint8 + 4, math.MaxUint8 - 4,
|
||||
math.MaxUint16, math.MaxUint16 + 4, math.MaxUint16 - 4,
|
||||
math.MaxUint32, math.MaxUint32 + 4, math.MaxUint32 - 4,
|
||||
},
|
||||
AMSU16: map[string]uint16{strRpt(n, "1"): 1, strRpt(n, "22"): 2, strRpt(n, "333"): 3, strRpt(n, "4444"): 4},
|
||||
|
||||
// Note: +/- inf, NaN, and other non-representable numbers should not be explicitly tested here
|
||||
|
||||
AF64slice: []float64{
|
||||
11.11e-11, -11.11e+11,
|
||||
2.222E+12, -2.222E-12,
|
||||
-555.55E-5, 555.55E+5,
|
||||
666.66E-6, -666.66E+6,
|
||||
7777.7777E-7, -7777.7777E-7,
|
||||
-8888.8888E+8, 8888.8888E+8,
|
||||
-99999.9999E+9, 99999.9999E+9,
|
||||
// these below are hairy enough to need strconv.ParseFloat
|
||||
33.33E-33, -33.33E+33,
|
||||
44.44e+44, -44.44e-44,
|
||||
// standard ones
|
||||
0, -1, 1,
|
||||
// math.Inf(1), math.Inf(-1),
|
||||
math.Pi, math.Phi, math.E,
|
||||
math.MaxFloat64, math.SmallestNonzeroFloat64,
|
||||
},
|
||||
AF32slice: []float32{
|
||||
11.11e-1, -11.11e+1,
|
||||
2.222E+2, -2.222E-2,
|
||||
-55.55E-5, 55.55E+5,
|
||||
66.66E-6, -66.66E+6,
|
||||
777.777E-7, -777.777E-7,
|
||||
-8.88E+8, 8.88E-8,
|
||||
-99999.9999E+9, 99999.9999E+9,
|
||||
// these below are hairy enough to need strconv.ParseFloat
|
||||
33.33E-33, -33.33E+33,
|
||||
// standard ones
|
||||
0, -1, 1,
|
||||
// math.Float32frombits(0x7FF00000), math.Float32frombits(0xFFF00000), //+inf and -inf
|
||||
math.MaxFloat32, math.SmallestNonzeroFloat32,
|
||||
},
|
||||
|
||||
AI64slice0: []int64{},
|
||||
AUi64sliceN: nil,
|
||||
AMSU16N: nil,
|
||||
AMSU16E: map[string]uint16{},
|
||||
}
|
||||
|
||||
if !bench {
|
||||
a.AUi64slice = append(a.AUi64slice, math.MaxUint64, math.MaxUint64-4)
|
||||
}
|
||||
*ts = TestStrucCommon{
|
||||
S: strRpt(n, `some really really cool names that are nigerian and american like "ugorji melody nwoke" - get it? `),
|
||||
|
||||
// set the numbers close to the limits
|
||||
I8: math.MaxInt8 * 2 / 3, // 8,
|
||||
I8n: math.MinInt8 * 2 / 3, // 8,
|
||||
I16: math.MaxInt16 * 2 / 3, // 16,
|
||||
I16n: math.MinInt16 * 2 / 3, // 16,
|
||||
I32: math.MaxInt32 * 2 / 3, // 32,
|
||||
I32n: math.MinInt32 * 2 / 3, // 32,
|
||||
I64: math.MaxInt64 * 2 / 3, // 64,
|
||||
I64n: math.MinInt64 * 2 / 3, // 64,
|
||||
|
||||
Ui64: math.MaxUint64 * 2 / 3, // 64
|
||||
Ui32: math.MaxUint32 * 2 / 3, // 32
|
||||
Ui16: math.MaxUint16 * 2 / 3, // 16
|
||||
Ui8: math.MaxUint8 * 2 / 3, // 8
|
||||
|
||||
F32: 3.402823e+38, // max representable float32 without losing precision
|
||||
F64: 3.40281991833838838338e+53,
|
||||
|
||||
B: true,
|
||||
By: 5,
|
||||
|
||||
Sslice: []string{strRpt(n, "one"), strRpt(n, "two"), strRpt(n, "three")},
|
||||
I64slice: []int64{1111, 2222, 3333},
|
||||
I16slice: []int16{44, 55, 66},
|
||||
Ui64slice: []uint64{12121212, 34343434, 56565656},
|
||||
Ui8slice: []uint8{210, 211, 212},
|
||||
Bslice: []bool{true, false, true, false},
|
||||
Byslice: []byte{13, 14, 15},
|
||||
BytesSlice: [][]byte{
|
||||
[]byte(strRpt(n, "one")),
|
||||
[]byte(strRpt(n, "two")),
|
||||
[]byte(strRpt(n, "\"three\"")),
|
||||
},
|
||||
Msi64: map[string]int64{
|
||||
strRpt(n, "one"): 1,
|
||||
strRpt(n, "two"): 2,
|
||||
strRpt(n, "\"three\""): 3,
|
||||
},
|
||||
Msbytes: map[string][]byte{
|
||||
strRpt(n, "one"): []byte(strRpt(n, "one")),
|
||||
strRpt(n, "two"): []byte(strRpt(n, "two")),
|
||||
strRpt(n, "\"three\""): []byte(strRpt(n, "\"three\"")),
|
||||
},
|
||||
WrapSliceInt64: []uint64{4, 16, 64, 256},
|
||||
WrapSliceString: []string{strRpt(n, "4"), strRpt(n, "16"), strRpt(n, "64"), strRpt(n, "256")},
|
||||
|
||||
// R: Raw([]byte("goodbye")),
|
||||
// Rext: RawExt{ 120, []byte("hello"), }, // TODO: don't set this - it's hard to test
|
||||
|
||||
// make Simplef same as top-level
|
||||
// TODO: should this have slightly different values???
|
||||
Simplef: testSimpleFields{
|
||||
S: strRpt(n, `some really really cool names that are nigerian and american like "ugorji melody nwoke" - get it? `),
|
||||
|
||||
// set the numbers close to the limits
|
||||
I8: math.MaxInt8 * 2 / 3, // 8,
|
||||
I64: math.MaxInt64 * 2 / 3, // 64,
|
||||
|
||||
Ui64: math.MaxUint64 * 2 / 3, // 64
|
||||
Ui8: math.MaxUint8 * 2 / 3, // 8
|
||||
|
||||
F32: 3.402823e+38, // max representable float32 without losing precision
|
||||
F64: 3.40281991833838838338e+53,
|
||||
|
||||
B: true,
|
||||
|
||||
Sslice: []string{strRpt(n, "one"), strRpt(n, "two"), strRpt(n, "three")},
|
||||
I16slice: []int16{44, 55, 66},
|
||||
Ui64slice: []uint64{12121212, 34343434, 56565656},
|
||||
Ui8slice: []uint8{210, 211, 212},
|
||||
Bslice: []bool{true, false, true, false},
|
||||
|
||||
Msi64: map[string]int64{
|
||||
strRpt(n, "one"): 1,
|
||||
strRpt(n, "two"): 2,
|
||||
strRpt(n, "\"three\""): 3,
|
||||
},
|
||||
|
||||
WrapSliceInt64: []uint64{4, 16, 64, 256},
|
||||
WrapSliceString: []string{strRpt(n, "4"), strRpt(n, "16"), strRpt(n, "64"), strRpt(n, "256")},
|
||||
},
|
||||
|
||||
SstrUi64T: make([]stringUint64T, numStrUi64T), // {{"1", 1}, {"2", 2}, {"3", 3}, {"4", 4}},
|
||||
MstrUi64T: make(map[string]*stringUint64T, numStrUi64T),
|
||||
AnonInTestStruc: a,
|
||||
NotAnon: a,
|
||||
}
|
||||
|
||||
for i := uint64(0); i < numStrUi64T; i++ {
|
||||
ss := strings.Repeat(strconv.FormatUint(i, 10), int(i)) // 4)
|
||||
ts.SstrUi64T[i] = stringUint64T{S: ss, U: i}
|
||||
ts.MstrUi64T[ss] = &ts.SstrUi64T[i]
|
||||
}
|
||||
|
||||
if bench {
|
||||
ts.Ui64 = math.MaxInt64 * 2 / 3
|
||||
ts.Simplef.Ui64 = ts.Ui64
|
||||
}
|
||||
|
||||
//For benchmarks, some things will not work.
|
||||
if !bench {
|
||||
//json and bson require string keys in maps
|
||||
//ts.M = map[interface{}]interface{}{
|
||||
// true: "true",
|
||||
// int8(9): false,
|
||||
//}
|
||||
//gob cannot encode nil in element in array (encodeArray: nil element)
|
||||
ts.Iptrslice = []*int64{nil, &i64a, nil, &i64b, nil, &i64c, nil, &i64d, nil}
|
||||
// ts.Iptrslice = nil
|
||||
}
|
||||
if !useStringKeyOnly {
|
||||
var _ byte = 0 // so this empty branch doesn't flag a warning
|
||||
// ts.AnonInTestStruc.AMU32F64 = map[uint32]float64{1: 1, 2: 2, 3: 3} // Json/Bson barf
|
||||
}
|
||||
}
|
||||
|
||||
func newTestStruc(depth, n int, bench, useInterface, useStringKeyOnly bool) (ts *TestStruc) {
|
||||
ts = &TestStruc{}
|
||||
populateTestStrucCommon(&ts.TestStrucCommon, n, bench, useInterface, useStringKeyOnly)
|
||||
if depth > 0 {
|
||||
depth--
|
||||
if ts.Mtsptr == nil {
|
||||
ts.Mtsptr = make(map[string]*TestStruc)
|
||||
}
|
||||
if ts.Mts == nil {
|
||||
ts.Mts = make(map[string]TestStruc)
|
||||
}
|
||||
ts.Mtsptr[strRpt(n, "0")] = newTestStruc(depth, n, bench, useInterface, useStringKeyOnly)
|
||||
ts.Mts[strRpt(n, "0")] = *(ts.Mtsptr[strRpt(n, "0")])
|
||||
ts.Its = append(ts.Its, ts.Mtsptr[strRpt(n, "0")])
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var testStrRptMap = make(map[int]map[string]string)
|
||||
|
||||
func strRpt(n int, s string) string {
|
||||
if false {
|
||||
// fmt.Printf(">>>> calling strings.Repeat on n: %d, key: %s\n", n, s)
|
||||
return strings.Repeat(s, n)
|
||||
}
|
||||
m1, ok := testStrRptMap[n]
|
||||
if !ok {
|
||||
// fmt.Printf(">>>> making new map for n: %v\n", n)
|
||||
m1 = make(map[string]string)
|
||||
testStrRptMap[n] = m1
|
||||
}
|
||||
v1, ok := m1[s]
|
||||
if !ok {
|
||||
// fmt.Printf(">>>> creating new entry for key: %s\n", s)
|
||||
v1 = strings.Repeat(s, n)
|
||||
m1[s] = v1
|
||||
}
|
||||
return v1
|
||||
}
|
||||
|
||||
// func wstrRpt(n int, s string) wrapBytes {
|
||||
// return wrapBytes(bytes.Repeat([]byte(s), n))
|
||||
// }
|
||||
278
vendor/github.com/ugorji/go/codec/writer.go
generated
vendored
278
vendor/github.com/ugorji/go/codec/writer.go
generated
vendored
@@ -1,278 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
package codec
|
||||
|
||||
import "io"
|
||||
|
||||
// encWriter abstracts writing to a byte array or to an io.Writer.
|
||||
type encWriter interface {
|
||||
writeb([]byte)
|
||||
writestr(string)
|
||||
writeqstr(string) // write string wrapped in quotes ie "..."
|
||||
writen1(byte)
|
||||
writen2(byte, byte)
|
||||
// writen will write up to 7 bytes at a time.
|
||||
writen(b [rwNLen]byte, num uint8)
|
||||
end()
|
||||
}
|
||||
|
||||
// ---------------------------------------------
|
||||
|
||||
// bufioEncWriter
|
||||
type bufioEncWriter struct {
|
||||
w io.Writer
|
||||
|
||||
buf []byte
|
||||
|
||||
n int
|
||||
|
||||
b [16]byte // scratch buffer and padding (cache-aligned)
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) reset(w io.Writer, bufsize int, blist *bytesFreelist) {
|
||||
z.w = w
|
||||
z.n = 0
|
||||
if bufsize <= 0 {
|
||||
bufsize = defEncByteBufSize
|
||||
}
|
||||
// bufsize must be >= 8, to accomodate writen methods (where n <= 8)
|
||||
if bufsize <= 8 {
|
||||
bufsize = 8
|
||||
}
|
||||
if cap(z.buf) < bufsize {
|
||||
if len(z.buf) > 0 && &z.buf[0] != &z.b[0] {
|
||||
blist.put(z.buf)
|
||||
}
|
||||
if len(z.b) > bufsize {
|
||||
z.buf = z.b[:]
|
||||
} else {
|
||||
z.buf = blist.get(bufsize)
|
||||
}
|
||||
}
|
||||
z.buf = z.buf[:cap(z.buf)]
|
||||
}
|
||||
|
||||
//go:noinline - flush only called intermittently
|
||||
func (z *bufioEncWriter) flushErr() (err error) {
|
||||
n, err := z.w.Write(z.buf[:z.n])
|
||||
z.n -= n
|
||||
if z.n > 0 && err == nil {
|
||||
err = io.ErrShortWrite
|
||||
}
|
||||
if n > 0 && z.n > 0 {
|
||||
copy(z.buf, z.buf[n:z.n+n])
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) flush() {
|
||||
if err := z.flushErr(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writeb(s []byte) {
|
||||
LOOP:
|
||||
a := len(z.buf) - z.n
|
||||
if len(s) > a {
|
||||
z.n += copy(z.buf[z.n:], s[:a])
|
||||
s = s[a:]
|
||||
z.flush()
|
||||
goto LOOP
|
||||
}
|
||||
z.n += copy(z.buf[z.n:], s)
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writestr(s string) {
|
||||
// z.writeb(bytesView(s)) // inlined below
|
||||
LOOP:
|
||||
a := len(z.buf) - z.n
|
||||
if len(s) > a {
|
||||
z.n += copy(z.buf[z.n:], s[:a])
|
||||
s = s[a:]
|
||||
z.flush()
|
||||
goto LOOP
|
||||
}
|
||||
z.n += copy(z.buf[z.n:], s)
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writeqstr(s string) {
|
||||
// z.writen1('"')
|
||||
// z.writestr(s)
|
||||
// z.writen1('"')
|
||||
|
||||
if z.n+len(s)+2 > len(z.buf) {
|
||||
z.flush()
|
||||
}
|
||||
z.buf[z.n] = '"'
|
||||
z.n++
|
||||
LOOP:
|
||||
a := len(z.buf) - z.n
|
||||
if len(s)+1 > a {
|
||||
z.n += copy(z.buf[z.n:], s[:a])
|
||||
s = s[a:]
|
||||
z.flush()
|
||||
goto LOOP
|
||||
}
|
||||
z.n += copy(z.buf[z.n:], s)
|
||||
z.buf[z.n] = '"'
|
||||
z.n++
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writen1(b1 byte) {
|
||||
if 1 > len(z.buf)-z.n {
|
||||
z.flush()
|
||||
}
|
||||
z.buf[z.n] = b1
|
||||
z.n++
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writen2(b1, b2 byte) {
|
||||
if 2 > len(z.buf)-z.n {
|
||||
z.flush()
|
||||
}
|
||||
z.buf[z.n+1] = b2
|
||||
z.buf[z.n] = b1
|
||||
z.n += 2
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) writen(b [rwNLen]byte, num uint8) {
|
||||
if int(num) > len(z.buf)-z.n {
|
||||
z.flush()
|
||||
}
|
||||
copy(z.buf[z.n:], b[:num])
|
||||
z.n += int(num)
|
||||
}
|
||||
|
||||
func (z *bufioEncWriter) endErr() (err error) {
|
||||
if z.n > 0 {
|
||||
err = z.flushErr()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ---------------------------------------------
|
||||
|
||||
// bytesEncAppender implements encWriter and can write to an byte slice.
|
||||
type bytesEncAppender struct {
|
||||
b []byte
|
||||
out *[]byte
|
||||
}
|
||||
|
||||
func (z *bytesEncAppender) writeb(s []byte) {
|
||||
z.b = append(z.b, s...)
|
||||
}
|
||||
func (z *bytesEncAppender) writestr(s string) {
|
||||
z.b = append(z.b, s...)
|
||||
}
|
||||
func (z *bytesEncAppender) writeqstr(s string) {
|
||||
z.b = append(append(append(z.b, '"'), s...), '"')
|
||||
|
||||
// z.b = append(z.b, '"')
|
||||
// z.b = append(z.b, s...)
|
||||
// z.b = append(z.b, '"')
|
||||
}
|
||||
func (z *bytesEncAppender) writen1(b1 byte) {
|
||||
z.b = append(z.b, b1)
|
||||
}
|
||||
func (z *bytesEncAppender) writen2(b1, b2 byte) {
|
||||
z.b = append(z.b, b1, b2) // cost: 81
|
||||
}
|
||||
func (z *bytesEncAppender) writen(s [rwNLen]byte, num uint8) {
|
||||
// if num <= rwNLen {
|
||||
if int(num) <= len(s) {
|
||||
z.b = append(z.b, s[:num]...)
|
||||
}
|
||||
}
|
||||
func (z *bytesEncAppender) endErr() error {
|
||||
*(z.out) = z.b
|
||||
return nil
|
||||
}
|
||||
func (z *bytesEncAppender) reset(in []byte, out *[]byte) {
|
||||
z.b = in[:0]
|
||||
z.out = out
|
||||
}
|
||||
|
||||
// --------------------------------------------------
|
||||
|
||||
type encWr struct {
|
||||
bytes bool // encoding to []byte
|
||||
js bool // is json encoder?
|
||||
be bool // is binary encoder?
|
||||
|
||||
c containerState
|
||||
|
||||
calls uint16
|
||||
|
||||
wb bytesEncAppender
|
||||
wf *bufioEncWriter
|
||||
}
|
||||
|
||||
func (z *encWr) writeb(s []byte) {
|
||||
if z.bytes {
|
||||
z.wb.writeb(s)
|
||||
} else {
|
||||
z.wf.writeb(s)
|
||||
}
|
||||
}
|
||||
func (z *encWr) writeqstr(s string) {
|
||||
if z.bytes {
|
||||
// unfortunately, calling the function prevents inlining it here.
|
||||
// explicitly writing it here will allow it inline.
|
||||
// NOTE: Keep in sync with function implementation.
|
||||
//
|
||||
// z.wb.writeqstr(s)
|
||||
|
||||
z.wb.b = append(append(append(z.wb.b, '"'), s...), '"')
|
||||
} else {
|
||||
z.wf.writeqstr(s)
|
||||
}
|
||||
}
|
||||
func (z *encWr) writestr(s string) {
|
||||
if z.bytes {
|
||||
z.wb.writestr(s)
|
||||
} else {
|
||||
z.wf.writestr(s)
|
||||
}
|
||||
}
|
||||
func (z *encWr) writen1(b1 byte) {
|
||||
if z.bytes {
|
||||
z.wb.writen1(b1)
|
||||
} else {
|
||||
z.wf.writen1(b1)
|
||||
}
|
||||
}
|
||||
func (z *encWr) writen2(b1, b2 byte) {
|
||||
if z.bytes {
|
||||
// unfortunately, calling the function prevents inlining it here.
|
||||
// explicitly writing it here will allow it inline.
|
||||
// NOTE: Keep in sync with function implementation.
|
||||
//
|
||||
// z.wb.writen2(b1, b2)
|
||||
z.wb.b = append(z.wb.b, b1, b2)
|
||||
} else {
|
||||
z.wf.writen2(b1, b2)
|
||||
}
|
||||
}
|
||||
func (z *encWr) writen(b [rwNLen]byte, num uint8) {
|
||||
if z.bytes {
|
||||
z.wb.writen(b, num)
|
||||
} else {
|
||||
z.wf.writen(b, num)
|
||||
}
|
||||
}
|
||||
func (z *encWr) endErr() error {
|
||||
if z.bytes {
|
||||
return z.wb.endErr()
|
||||
}
|
||||
return z.wf.endErr()
|
||||
}
|
||||
|
||||
func (z *encWr) end() {
|
||||
if err := z.endErr(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
var _ encWriter = (*encWr)(nil)
|
||||
502
vendor/github.com/ugorji/go/codec/xml.go
generated
vendored
502
vendor/github.com/ugorji/go/codec/xml.go
generated
vendored
@@ -1,502 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package codec
|
||||
|
||||
/*
|
||||
|
||||
A strict Non-validating namespace-aware XML 1.0 parser and (en|de)coder.
|
||||
|
||||
We are attempting this due to perceived issues with encoding/xml:
|
||||
- Complicated. It tried to do too much, and is not as simple to use as json.
|
||||
- Due to over-engineering, reflection is over-used AND performance suffers:
|
||||
java is 6X faster:http://fabsk.eu/blog/category/informatique/dev/golang/
|
||||
even PYTHON performs better: http://outgoing.typepad.com/outgoing/2014/07/exploring-golang.html
|
||||
|
||||
codec framework will offer the following benefits
|
||||
- VASTLY improved performance (when using reflection-mode or codecgen)
|
||||
- simplicity and consistency: with the rest of the supported formats
|
||||
- all other benefits of codec framework (streaming, codegeneration, etc)
|
||||
|
||||
codec is not a drop-in replacement for encoding/xml.
|
||||
It is a replacement, based on the simplicity and performance of codec.
|
||||
Look at it like JAXB for Go.
|
||||
|
||||
Challenges:
|
||||
- Need to output XML preamble, with all namespaces at the right location in the output.
|
||||
- Each "end" block is dynamic, so we need to maintain a context-aware stack
|
||||
- How to decide when to use an attribute VS an element
|
||||
- How to handle chardata, attr, comment EXPLICITLY.
|
||||
- Should it output fragments?
|
||||
e.g. encoding a bool should just output true OR false, which is not well-formed XML.
|
||||
|
||||
Extend the struct tag. See representative example:
|
||||
type X struct {
|
||||
ID uint8 `codec:"http://ugorji.net/x-namespace xid id,omitempty,toarray,attr,cdata"`
|
||||
// format: [namespace-uri ][namespace-prefix ]local-name, ...
|
||||
}
|
||||
|
||||
Based on this, we encode
|
||||
- fields as elements, BUT
|
||||
encode as attributes if struct tag contains ",attr" and is a scalar (bool, number or string)
|
||||
- text as entity-escaped text, BUT encode as CDATA if struct tag contains ",cdata".
|
||||
|
||||
To handle namespaces:
|
||||
- XMLHandle is denoted as being namespace-aware.
|
||||
Consequently, we WILL use the ns:name pair to encode and decode if defined, else use the plain name.
|
||||
- *Encoder and *Decoder know whether the Handle "prefers" namespaces.
|
||||
- add *Encoder.getEncName(*structFieldInfo).
|
||||
No one calls *structFieldInfo.indexForEncName directly anymore
|
||||
- OR better yet: indexForEncName is namespace-aware, and helper.go is all namespace-aware
|
||||
indexForEncName takes a parameter of the form namespace:local-name OR local-name
|
||||
- add *Decoder.getStructFieldInfo(encName string) // encName here is either like abc, or h1:nsabc
|
||||
by being a method on *Decoder, or maybe a method on the Handle itself.
|
||||
No one accesses .encName anymore
|
||||
- let encode.go and decode.go use these (for consistency)
|
||||
- only problem exists for gen.go, where we create a big switch on encName.
|
||||
Now, we also have to add a switch on strings.endsWith(kName, encNsName)
|
||||
- gen.go will need to have many more methods, and then double-on the 2 switch loops like:
|
||||
switch k {
|
||||
case "abc" : x.abc()
|
||||
case "def" : x.def()
|
||||
default {
|
||||
switch {
|
||||
case !nsAware: panic(...)
|
||||
case strings.endsWith(":abc"): x.abc()
|
||||
case strings.endsWith(":def"): x.def()
|
||||
default: panic(...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
The structure below accommodates this:
|
||||
|
||||
type typeInfo struct {
|
||||
sfi []*structFieldInfo // sorted by encName
|
||||
sfins // sorted by namespace
|
||||
sfia // sorted, to have those with attributes at the top. Needed to write XML appropriately.
|
||||
sfip // unsorted
|
||||
}
|
||||
type structFieldInfo struct {
|
||||
encName
|
||||
nsEncName
|
||||
ns string
|
||||
attr bool
|
||||
cdata bool
|
||||
}
|
||||
|
||||
indexForEncName is now an internal helper function that takes a sorted array
|
||||
(one of ti.sfins or ti.sfi). It is only used by *Encoder.getStructFieldInfo(...)
|
||||
|
||||
There will be a separate parser from the builder.
|
||||
The parser will have a method: next() xmlToken method. It has lookahead support,
|
||||
so you can pop multiple tokens, make a determination, and push them back in the order popped.
|
||||
This will be needed to determine whether we are "nakedly" decoding a container or not.
|
||||
The stack will be implemented using a slice and push/pop happens at the [0] element.
|
||||
|
||||
xmlToken has fields:
|
||||
- type uint8: 0 | ElementStart | ElementEnd | AttrKey | AttrVal | Text
|
||||
- value string
|
||||
- ns string
|
||||
|
||||
SEE: http://www.xml.com/pub/a/98/10/guide0.html?page=3#ENTDECL
|
||||
|
||||
The following are skipped when parsing:
|
||||
- External Entities (from external file)
|
||||
- Notation Declaration e.g. <!NOTATION GIF87A SYSTEM "GIF">
|
||||
- Entity Declarations & References
|
||||
- XML Declaration (assume UTF-8)
|
||||
- XML Directive i.e. <! ... >
|
||||
- Other Declarations: Notation, etc.
|
||||
- Comment
|
||||
- Processing Instruction
|
||||
- schema / DTD for validation:
|
||||
We are not a VALIDATING parser. Validation is done elsewhere.
|
||||
However, some parts of the DTD internal subset are used (SEE BELOW).
|
||||
For Attribute List Declarations e.g.
|
||||
<!ATTLIST foo:oldjoke name ID #REQUIRED label CDATA #IMPLIED status ( funny | notfunny ) 'funny' >
|
||||
We considered using the ATTLIST to get "default" value, but not to validate the contents. (VETOED)
|
||||
|
||||
The following XML features are supported
|
||||
- Namespace
|
||||
- Element
|
||||
- Attribute
|
||||
- cdata
|
||||
- Unicode escape
|
||||
|
||||
The following DTD (when as an internal sub-set) features are supported:
|
||||
- Internal Entities e.g.
|
||||
<!ELEMENT burns "ugorji is cool" > AND entities for the set: [<>&"']
|
||||
- Parameter entities e.g.
|
||||
<!ENTITY % personcontent "ugorji is cool"> <!ELEMENT burns (%personcontent;)*>
|
||||
|
||||
At decode time, a structure containing the following is kept
|
||||
- namespace mapping
|
||||
- default attribute values
|
||||
- all internal entities (<>&"' and others written in the document)
|
||||
|
||||
When decode starts, it parses XML namespace declarations and creates a map in the
|
||||
xmlDecDriver. While parsing, that map continuously gets updated.
|
||||
The only problem happens when a namespace declaration happens on the node that it defines.
|
||||
e.g. <hn:name xmlns:hn="http://www.ugorji.net" >
|
||||
To handle this, each Element must be fully parsed at a time,
|
||||
even if it amounts to multiple tokens which are returned one at a time on request.
|
||||
|
||||
xmlns is a special attribute name.
|
||||
- It is used to define namespaces, including the default
|
||||
- It is never returned as an AttrKey or AttrVal.
|
||||
*We may decide later to allow user to use it e.g. you want to parse the xmlns mappings into a field.*
|
||||
|
||||
Number, bool, null, mapKey, etc can all be decoded from any xmlToken.
|
||||
This accommodates map[int]string for example.
|
||||
|
||||
It should be possible to create a schema from the types,
|
||||
or vice versa (generate types from schema with appropriate tags).
|
||||
This is however out-of-scope from this parsing project.
|
||||
|
||||
We should write all namespace information at the first point that it is referenced in the tree,
|
||||
and use the mapping for all child nodes and attributes. This means that state is maintained
|
||||
at a point in the tree. This also means that calls to Decode or MustDecode will reset some state.
|
||||
|
||||
When decoding, it is important to keep track of entity references and default attribute values.
|
||||
It seems these can only be stored in the DTD components. We should honor them when decoding.
|
||||
|
||||
Configuration for XMLHandle will look like this:
|
||||
|
||||
XMLHandle
|
||||
DefaultNS string
|
||||
// Encoding:
|
||||
NS map[string]string // ns URI to key, used for encoding
|
||||
// Decoding: in case ENTITY declared in external schema or dtd, store info needed here
|
||||
Entities map[string]string // map of entity rep to character
|
||||
|
||||
|
||||
During encode, if a namespace mapping is not defined for a namespace found on a struct,
|
||||
then we create a mapping for it using nsN (where N is 1..1000000, and doesn't conflict
|
||||
with any other namespace mapping).
|
||||
|
||||
Note that different fields in a struct can have different namespaces.
|
||||
However, all fields will default to the namespace on the _struct field (if defined).
|
||||
|
||||
An XML document is a name, a map of attributes and a list of children.
|
||||
Consequently, we cannot "DecodeNaked" into a map[string]interface{} (for example).
|
||||
We have to "DecodeNaked" into something that resembles XML data.
|
||||
|
||||
To support DecodeNaked (decode into nil interface{}), we have to define some "supporting" types:
|
||||
type Name struct { // Preferred. Less allocations due to conversions.
|
||||
Local string
|
||||
Space string
|
||||
}
|
||||
type Element struct {
|
||||
Name Name
|
||||
Attrs map[Name]string
|
||||
Children []interface{} // each child is either *Element or string
|
||||
}
|
||||
Only two "supporting" types are exposed for XML: Name and Element.
|
||||
|
||||
// ------------------
|
||||
|
||||
We considered 'type Name string' where Name is like "Space Local" (space-separated).
|
||||
We decided against it, because each creation of a name would lead to
|
||||
double allocation (first convert []byte to string, then concatenate them into a string).
|
||||
The benefit is that it is faster to read Attrs from a map. But given that Element is a value
|
||||
object, we want to eschew methods and have public exposed variables.
|
||||
|
||||
We also considered the following, where xml types were not value objects, and we used
|
||||
intelligent accessor methods to extract information and for performance.
|
||||
*** WE DECIDED AGAINST THIS. ***
|
||||
type Attr struct {
|
||||
Name Name
|
||||
Value string
|
||||
}
|
||||
// Element is a ValueObject: There are no accessor methods.
|
||||
// Make element self-contained.
|
||||
type Element struct {
|
||||
Name Name
|
||||
attrsMap map[string]string // where key is "Space Local"
|
||||
attrs []Attr
|
||||
childrenT []string
|
||||
childrenE []Element
|
||||
childrenI []int // each child is a index into T or E.
|
||||
}
|
||||
func (x *Element) child(i) interface{} // returns string or *Element
|
||||
|
||||
// ------------------
|
||||
|
||||
Per XML spec and our default handling, white space is always treated as
|
||||
insignificant between elements, except in a text node. The xml:space='preserve'
|
||||
attribute is ignored.
|
||||
|
||||
**Note: there is no xml: namespace. The xml: attributes were defined before namespaces.**
|
||||
**So treat them as just "directives" that should be interpreted to mean something**.
|
||||
|
||||
On encoding, we support indenting aka prettifying markup in the same way we support it for json.
|
||||
|
||||
A document or element can only be encoded/decoded from/to a struct. In this mode:
|
||||
- struct name maps to element name (or tag-info from _struct field)
|
||||
- fields are mapped to child elements or attributes
|
||||
|
||||
A map is either encoded as attributes on current element, or as a set of child elements.
|
||||
Maps are encoded as attributes iff their keys and values are primitives (number, bool, string).
|
||||
|
||||
A list is encoded as a set of child elements.
|
||||
|
||||
Primitives (number, bool, string) are encoded as an element, attribute or text
|
||||
depending on the context.
|
||||
|
||||
Extensions must encode themselves as a text string.
|
||||
|
||||
Encoding is tough, specifically when encoding mappings, because we need to encode
|
||||
as either attribute or element. To do this, we need to default to encoding as attributes,
|
||||
and then let Encoder inform the Handle when to start encoding as nodes.
|
||||
i.e. Encoder does something like:
|
||||
|
||||
h.EncodeMapStart()
|
||||
h.Encode(), h.Encode(), ...
|
||||
h.EncodeMapNotAttrSignal() // this is not a bool, because it's a signal
|
||||
h.Encode(), h.Encode(), ...
|
||||
h.EncodeEnd()
|
||||
|
||||
Only XMLHandle understands this, and will set itself to start encoding as elements.
|
||||
|
||||
This support extends to maps. For example, if a struct field is a map, and it has
|
||||
the struct tag signifying it should be attr, then all its fields are encoded as attributes.
|
||||
e.g.
|
||||
|
||||
type X struct {
|
||||
M map[string]int `codec:"m,attr"` // encode keys as attributes named
|
||||
}
|
||||
|
||||
Question:
|
||||
- if encoding a map, what if map keys have spaces in them???
|
||||
Then they cannot be attributes or child elements. Error.
|
||||
|
||||
Options to consider adding later:
|
||||
- For attribute values, normalize by trimming beginning and ending white space,
|
||||
and converting every white space sequence to a single space.
|
||||
- ATTLIST restrictions are enforced.
|
||||
e.g. default value of xml:space, skipping xml:XYZ style attributes, etc.
|
||||
- Consider supporting NON-STRICT mode (e.g. to handle HTML parsing).
|
||||
Some elements e.g. br, hr, etc need not close and should be auto-closed
|
||||
... (see http://www.w3.org/TR/html4/loose.dtd)
|
||||
An expansive set of entities are pre-defined.
|
||||
- Have easy way to create a HTML parser:
|
||||
add a HTML() method to XMLHandle, that will set Strict=false, specify AutoClose,
|
||||
and add HTML Entities to the list.
|
||||
- Support validating element/attribute XMLName before writing it.
|
||||
Keep this behind a flag, which is set to false by default (for performance).
|
||||
type XMLHandle struct {
|
||||
CheckName bool
|
||||
}
|
||||
|
||||
Misc:
|
||||
|
||||
ROADMAP (1 weeks):
|
||||
- build encoder (1 day)
|
||||
- build decoder (based off xmlParser) (1 day)
|
||||
- implement xmlParser (2 days).
|
||||
Look at encoding/xml for inspiration.
|
||||
- integrate and TEST (1 days)
|
||||
- write article and post it (1 day)
|
||||
|
||||
// ---------- MORE NOTES FROM 2017-11-30 ------------
|
||||
|
||||
when parsing
|
||||
- parse the attributes first
|
||||
- then parse the nodes
|
||||
|
||||
basically:
|
||||
- if encoding a field: we use the field name for the wrapper
|
||||
- if encoding a non-field, then just use the element type name
|
||||
|
||||
map[string]string ==> <map><key>abc</key><value>val</value></map>... or
|
||||
<map key="abc">val</map>... OR
|
||||
<key1>val1</key1><key2>val2</key2>... <- PREFERED
|
||||
[]string ==> <string>v1</string><string>v2</string>...
|
||||
string v1 ==> <string>v1</string>
|
||||
bool true ==> <bool>true</bool>
|
||||
float 1.0 ==> <float>1.0</float>
|
||||
...
|
||||
|
||||
F1 map[string]string ==> <F1><key>abc</key><value>val</value></F1>... OR
|
||||
<F1 key="abc">val</F1>... OR
|
||||
<F1><abc>val</abc>...</F1> <- PREFERED
|
||||
F2 []string ==> <F2>v1</F2><F2>v2</F2>...
|
||||
F3 bool ==> <F3>true</F3>
|
||||
...
|
||||
|
||||
- a scalar is encoded as:
|
||||
(value) of type T ==> <T><value/></T>
|
||||
(value) of field F ==> <F><value/></F>
|
||||
- A kv-pair is encoded as:
|
||||
(key,value) ==> <map><key><value/></key></map> OR <map key="value">
|
||||
(key,value) of field F ==> <F><key><value/></key></F> OR <F key="value">
|
||||
- A map or struct is just a list of kv-pairs
|
||||
- A list is encoded as sequences of same node e.g.
|
||||
<F1 key1="value11">
|
||||
<F1 key2="value12">
|
||||
<F2>value21</F2>
|
||||
<F2>value22</F2>
|
||||
- we may have to singularize the field name, when entering into xml,
|
||||
and pluralize them when encoding.
|
||||
- bi-directional encode->decode->encode is not a MUST.
|
||||
even encoding/xml cannot decode correctly what was encoded:
|
||||
|
||||
see https://play.golang.org/p/224V_nyhMS
|
||||
func main() {
|
||||
fmt.Println("Hello, playground")
|
||||
v := []interface{}{"hello", 1, true, nil, time.Now()}
|
||||
s, err := xml.Marshal(v)
|
||||
fmt.Printf("err: %v, \ns: %s\n", err, s)
|
||||
var v2 []interface{}
|
||||
err = xml.Unmarshal(s, &v2)
|
||||
fmt.Printf("err: %v, \nv2: %v\n", err, v2)
|
||||
type T struct {
|
||||
V []interface{}
|
||||
}
|
||||
v3 := T{V: v}
|
||||
s, err = xml.Marshal(v3)
|
||||
fmt.Printf("err: %v, \ns: %s\n", err, s)
|
||||
var v4 T
|
||||
err = xml.Unmarshal(s, &v4)
|
||||
fmt.Printf("err: %v, \nv4: %v\n", err, v4)
|
||||
}
|
||||
Output:
|
||||
err: <nil>,
|
||||
s: <string>hello</string><int>1</int><bool>true</bool><Time>2009-11-10T23:00:00Z</Time>
|
||||
err: <nil>,
|
||||
v2: [<nil>]
|
||||
err: <nil>,
|
||||
s: <T><V>hello</V><V>1</V><V>true</V><V>2009-11-10T23:00:00Z</V></T>
|
||||
err: <nil>,
|
||||
v4: {[<nil> <nil> <nil> <nil>]}
|
||||
-
|
||||
*/
|
||||
|
||||
// ----------- PARSER -------------------
|
||||
|
||||
type xmlTokenType uint8
|
||||
|
||||
const (
|
||||
_ xmlTokenType = iota << 1
|
||||
xmlTokenElemStart
|
||||
xmlTokenElemEnd
|
||||
xmlTokenAttrKey
|
||||
xmlTokenAttrVal
|
||||
xmlTokenText
|
||||
)
|
||||
|
||||
type xmlToken struct {
|
||||
Type xmlTokenType
|
||||
Value string
|
||||
Namespace string // blank for AttrVal and Text
|
||||
}
|
||||
|
||||
type xmlParser struct {
|
||||
r decReader
|
||||
toks []xmlToken // list of tokens.
|
||||
ptr int // ptr into the toks slice
|
||||
done bool // nothing else to parse. r now returns EOF.
|
||||
}
|
||||
|
||||
func (x *xmlParser) next() (t *xmlToken) {
|
||||
// once x.done, or x.ptr == len(x.toks) == 0, then return nil (to signify finish)
|
||||
if !x.done && len(x.toks) == 0 {
|
||||
x.nextTag()
|
||||
}
|
||||
// parses one element at a time (into possible many tokens)
|
||||
if x.ptr < len(x.toks) {
|
||||
t = &(x.toks[x.ptr])
|
||||
x.ptr++
|
||||
if x.ptr == len(x.toks) {
|
||||
x.ptr = 0
|
||||
x.toks = x.toks[:0]
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// nextTag will parses the next element and fill up toks.
|
||||
// It set done flag if/once EOF is reached.
|
||||
func (x *xmlParser) nextTag() {
|
||||
// ...
|
||||
}
|
||||
|
||||
// ----------- ENCODER -------------------
|
||||
|
||||
type xmlEncDriver struct {
|
||||
e *Encoder
|
||||
w encWriter
|
||||
h *XMLHandle
|
||||
b [64]byte // scratch
|
||||
bs []byte // scratch
|
||||
// s jsonStack
|
||||
noBuiltInTypes
|
||||
}
|
||||
|
||||
// ----------- DECODER -------------------
|
||||
|
||||
type xmlDecDriver struct {
|
||||
d *Decoder
|
||||
h *XMLHandle
|
||||
r decReader // *bytesDecReader decReader
|
||||
ct valueType // container type. one of unset, array or map.
|
||||
bstr [8]byte // scratch used for string \UXXX parsing
|
||||
b [64]byte // scratch
|
||||
|
||||
// wsSkipped bool // whitespace skipped
|
||||
|
||||
// s jsonStack
|
||||
|
||||
noBuiltInTypes
|
||||
}
|
||||
|
||||
// DecodeNaked will decode into an XMLNode
|
||||
|
||||
// XMLName is a value object representing a namespace-aware NAME
|
||||
type XMLName struct {
|
||||
Local string
|
||||
Space string
|
||||
}
|
||||
|
||||
// XMLNode represents a "union" of the different types of XML Nodes.
|
||||
// Only one of fields (Text or *Element) is set.
|
||||
type XMLNode struct {
|
||||
Element *Element
|
||||
Text string
|
||||
}
|
||||
|
||||
// XMLElement is a value object representing an fully-parsed XML element.
|
||||
type XMLElement struct {
|
||||
Name Name
|
||||
Attrs map[XMLName]string
|
||||
// Children is a list of child nodes, each being a *XMLElement or string
|
||||
Children []XMLNode
|
||||
}
|
||||
|
||||
// ----------- HANDLE -------------------
|
||||
|
||||
type XMLHandle struct {
|
||||
BasicHandle
|
||||
textEncodingType
|
||||
|
||||
DefaultNS string
|
||||
NS map[string]string // ns URI to key, for encoding
|
||||
Entities map[string]string // entity representation to string, for encoding.
|
||||
}
|
||||
|
||||
func (h *XMLHandle) newEncDriver(e *Encoder) encDriver {
|
||||
return &xmlEncDriver{e: e, w: e.w, h: h}
|
||||
}
|
||||
|
||||
func (h *XMLHandle) newDecDriver(d *Decoder) decDriver {
|
||||
// d := xmlDecDriver{r: r.(*bytesDecReader), h: h}
|
||||
hd := xmlDecDriver{d: d, r: d.r, h: h}
|
||||
hd.n.bytes = d.b[:]
|
||||
return &hd
|
||||
}
|
||||
|
||||
var _ decDriver = (*xmlDecDriver)(nil)
|
||||
var _ encDriver = (*xmlEncDriver)(nil)
|
||||
458
vendor/github.com/ugorji/go/codec/z_all_test.go
generated
vendored
458
vendor/github.com/ugorji/go/codec/z_all_test.go
generated
vendored
@@ -1,458 +0,0 @@
|
||||
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||
|
||||
// +build alltests
|
||||
// +build go1.7
|
||||
|
||||
package codec
|
||||
|
||||
// Run this using:
|
||||
// go test -tags=alltests -run=Suite -coverprofile=cov.out
|
||||
// go tool cover -html=cov.out
|
||||
//
|
||||
// Because build tags are a build time parameter, we will have to test out the
|
||||
// different tags separately.
|
||||
// Tags: x codecgen safe appengine notfastpath
|
||||
//
|
||||
// These tags should be added to alltests, e.g.
|
||||
// go test '-tags=alltests x codecgen' -run=Suite -coverprofile=cov.out
|
||||
//
|
||||
// To run all tests before submitting code, run:
|
||||
// a=( "" "safe" "codecgen" "notfastpath" "codecgen notfastpath" "codecgen safe" "safe notfastpath" )
|
||||
// for i in "${a[@]}"; do echo ">>>> TAGS: $i"; go test "-tags=alltests $i" -run=Suite; done
|
||||
//
|
||||
// This only works on go1.7 and above. This is when subtests and suites were supported.
|
||||
|
||||
import "testing"
|
||||
|
||||
// func TestMain(m *testing.M) {
|
||||
// println("calling TestMain")
|
||||
// // set some parameters
|
||||
// exitcode := m.Run()
|
||||
// os.Exit(exitcode)
|
||||
// }
|
||||
|
||||
func testGroupResetFlags() {
|
||||
testUseMust = false
|
||||
testUseIoEncDec = -1
|
||||
testUseReset = false
|
||||
testMaxInitLen = 0
|
||||
testUseIoWrapper = false
|
||||
testNumRepeatString = 8
|
||||
testDepth = 0
|
||||
testDecodeOptions = DecodeOptions{}
|
||||
testEncodeOptions = EncodeOptions{}
|
||||
}
|
||||
|
||||
func testSuite(t *testing.T, f func(t *testing.T)) {
|
||||
// find . -name "*_test.go" | xargs grep -e 'flag.' | cut -d '&' -f 2 | cut -d ',' -f 1 | grep -e '^test'
|
||||
// Disregard the following: testInitDebug, testSkipIntf, testJsonIndent (Need a test for it)
|
||||
|
||||
testReinit() // so flag.Parse() is called first, and never called again
|
||||
|
||||
testGroupResetFlags()
|
||||
|
||||
testReinit()
|
||||
t.Run("optionsFalse", f)
|
||||
|
||||
testUseMust = true
|
||||
testUseIoEncDec = 0
|
||||
testUseReset = true
|
||||
|
||||
// xdebugf("with StructToArray=true")
|
||||
testDecodeOptions.ZeroCopy = true
|
||||
testDecodeOptions.InternString = true
|
||||
testDecodeOptions.MapValueReset = true
|
||||
// testDecodeOptions.SignedInteger = true
|
||||
// testDecodeOptions.SliceElementReset = true
|
||||
// testDecodeOptions.InterfaceReset = true
|
||||
// testDecodeOptions.PreferArrayOverSlice = true
|
||||
// testDecodeOptions.DeleteOnNilMapValue = true
|
||||
// testDecodeOptions.RawToString = true
|
||||
|
||||
testEncodeOptions.StructToArray = true
|
||||
testEncodeOptions.Canonical = true
|
||||
testEncodeOptions.CheckCircularRef = true
|
||||
testEncodeOptions.RecursiveEmptyCheck = true
|
||||
// testEncodeOptions.Raw = true
|
||||
// testEncodeOptions.StringToRaw = true
|
||||
|
||||
testReinit()
|
||||
t.Run("optionsTrue", f)
|
||||
|
||||
// xdebugf("setting StructToArray=false")
|
||||
testEncodeOptions.StructToArray = false
|
||||
testDepth = 6
|
||||
testReinit()
|
||||
t.Run("optionsTrue-deepstruct", f)
|
||||
testDepth = 0
|
||||
|
||||
// testEncodeOptions.AsSymbols = AsSymbolAll
|
||||
testUseIoWrapper = true
|
||||
testReinit()
|
||||
t.Run("optionsTrue-ioWrapper", f)
|
||||
|
||||
// testUseIoEncDec = -1
|
||||
|
||||
// make buffer small enough so that we have to re-fill multiple times.
|
||||
testSkipRPCTests = true
|
||||
testUseIoEncDec = 128
|
||||
// testDecodeOptions.ReaderBufferSize = 128
|
||||
// testEncodeOptions.WriterBufferSize = 128
|
||||
testReinit()
|
||||
t.Run("optionsTrue-bufio", f)
|
||||
// testDecodeOptions.ReaderBufferSize = 0
|
||||
// testEncodeOptions.WriterBufferSize = 0
|
||||
testUseIoEncDec = -1
|
||||
testSkipRPCTests = false
|
||||
|
||||
testNumRepeatString = 32
|
||||
testReinit()
|
||||
t.Run("optionsTrue-largestrings", f)
|
||||
|
||||
// The following here MUST be tested individually, as they create
|
||||
// side effects i.e. the decoded value is different.
|
||||
// testDecodeOptions.MapValueReset = true // ok - no side effects
|
||||
// testDecodeOptions.InterfaceReset = true // error??? because we do deepEquals to verify
|
||||
// testDecodeOptions.ErrorIfNoField = true // error, as expected, as fields not there
|
||||
// testDecodeOptions.ErrorIfNoArrayExpand = true // no error, but no error case either
|
||||
// testDecodeOptions.PreferArrayOverSlice = true // error??? because slice != array.
|
||||
// .... however, update deepEqual to take this option
|
||||
// testReinit()
|
||||
// t.Run("optionsTrue-resetOptions", f)
|
||||
|
||||
testGroupResetFlags()
|
||||
}
|
||||
|
||||
/*
|
||||
find . -name "codec_test.go" | xargs grep -e '^func Test' | \
|
||||
cut -d '(' -f 1 | cut -d ' ' -f 2 | \
|
||||
while read f; do echo "t.Run(\"$f\", $f)"; done
|
||||
*/
|
||||
|
||||
func testCodecGroup(t *testing.T) {
|
||||
// println("running testcodecsuite")
|
||||
// <setup code>
|
||||
|
||||
testJsonGroup(t)
|
||||
testBincGroup(t)
|
||||
testCborGroup(t)
|
||||
testMsgpackGroup(t)
|
||||
testSimpleGroup(t)
|
||||
// testSimpleMammothGroup(t)
|
||||
// testRpcGroup(t)
|
||||
testNonHandlesGroup(t)
|
||||
|
||||
// <tear-down code>
|
||||
}
|
||||
|
||||
func testJsonGroup(t *testing.T) {
|
||||
t.Run("TestJsonCodecsTable", TestJsonCodecsTable)
|
||||
t.Run("TestJsonCodecsMisc", TestJsonCodecsMisc)
|
||||
t.Run("TestJsonCodecsEmbeddedPointer", TestJsonCodecsEmbeddedPointer)
|
||||
t.Run("TestJsonCodecChan", TestJsonCodecChan)
|
||||
t.Run("TestJsonStdEncIntf", TestJsonStdEncIntf)
|
||||
t.Run("TestJsonMammothA", TestJsonMammothA)
|
||||
t.Run("TestJsonRaw", TestJsonRaw)
|
||||
t.Run("TestJsonRpcGo", TestJsonRpcGo)
|
||||
t.Run("TestJsonLargeInteger", TestJsonLargeInteger)
|
||||
t.Run("TestJsonDecodeNonStringScalarInStringContext", TestJsonDecodeNonStringScalarInStringContext)
|
||||
t.Run("TestJsonEncodeIndent", TestJsonEncodeIndent)
|
||||
|
||||
t.Run("TestJsonSwallowAndZero", TestJsonSwallowAndZero)
|
||||
t.Run("TestJsonRawExt", TestJsonRawExt)
|
||||
t.Run("TestJsonMapStructKey", TestJsonMapStructKey)
|
||||
t.Run("TestJsonDecodeNilMapValue", TestJsonDecodeNilMapValue)
|
||||
t.Run("TestJsonEmbeddedFieldPrecedence", TestJsonEmbeddedFieldPrecedence)
|
||||
t.Run("TestJsonLargeContainerLen", TestJsonLargeContainerLen)
|
||||
t.Run("TestJsonMammothMapsAndSlices", TestJsonMammothMapsAndSlices)
|
||||
t.Run("TestJsonTime", TestJsonTime)
|
||||
t.Run("TestJsonUintToInt", TestJsonUintToInt)
|
||||
t.Run("TestJsonDifferentMapOrSliceType", TestJsonDifferentMapOrSliceType)
|
||||
t.Run("TestJsonScalars", TestJsonScalars)
|
||||
t.Run("TestJsonOmitempty", TestJsonOmitempty)
|
||||
t.Run("TestJsonIntfMapping", TestJsonIntfMapping)
|
||||
t.Run("TestJsonMissingFields", TestJsonMissingFields)
|
||||
t.Run("TestJsonMaxDepth", TestJsonMaxDepth)
|
||||
t.Run("TestJsonSelfExt", TestJsonSelfExt)
|
||||
t.Run("TestJsonBytesEncodedAsArray", TestJsonBytesEncodedAsArray)
|
||||
t.Run("TestJsonStrucEncDec", TestJsonStrucEncDec)
|
||||
t.Run("TestJsonMapEncodeForCanonical", TestJsonMapEncodeForCanonical)
|
||||
t.Run("TestJsonRawToStringToRawEtc", TestJsonRawToStringToRawEtc)
|
||||
t.Run("TestJsonStructKeyType", TestJsonStructKeyType)
|
||||
t.Run("TestJsonPreferArrayOverSlice", TestJsonPreferArrayOverSlice)
|
||||
t.Run("TestJsonZeroCopyBytes", TestJsonZeroCopyBytes)
|
||||
|
||||
t.Run("TestJsonInvalidUnicode", TestJsonInvalidUnicode)
|
||||
}
|
||||
|
||||
func testBincGroup(t *testing.T) {
|
||||
t.Run("TestBincCodecsTable", TestBincCodecsTable)
|
||||
t.Run("TestBincCodecsMisc", TestBincCodecsMisc)
|
||||
t.Run("TestBincCodecsEmbeddedPointer", TestBincCodecsEmbeddedPointer)
|
||||
t.Run("TestBincStdEncIntf", TestBincStdEncIntf)
|
||||
t.Run("TestBincMammoth", TestBincMammoth)
|
||||
t.Run("TestBincRaw", TestBincRaw)
|
||||
t.Run("TestBincRpcGo", TestBincRpcGo)
|
||||
t.Run("TestBincUnderlyingType", TestBincUnderlyingType)
|
||||
|
||||
t.Run("TestBincSwallowAndZero", TestBincSwallowAndZero)
|
||||
t.Run("TestBincRawExt", TestBincRawExt)
|
||||
t.Run("TestBincMapStructKey", TestBincMapStructKey)
|
||||
t.Run("TestBincDecodeNilMapValue", TestBincDecodeNilMapValue)
|
||||
t.Run("TestBincEmbeddedFieldPrecedence", TestBincEmbeddedFieldPrecedence)
|
||||
t.Run("TestBincLargeContainerLen", TestBincLargeContainerLen)
|
||||
t.Run("TestBincMammothMapsAndSlices", TestBincMammothMapsAndSlices)
|
||||
t.Run("TestBincTime", TestBincTime)
|
||||
t.Run("TestBincUintToInt", TestBincUintToInt)
|
||||
t.Run("TestBincDifferentMapOrSliceType", TestBincDifferentMapOrSliceType)
|
||||
t.Run("TestBincScalars", TestBincScalars)
|
||||
t.Run("TestBincOmitempty", TestBincOmitempty)
|
||||
t.Run("TestBincIntfMapping", TestBincIntfMapping)
|
||||
t.Run("TestBincMissingFields", TestBincMissingFields)
|
||||
t.Run("TestBincMaxDepth", TestBincMaxDepth)
|
||||
t.Run("TestBincSelfExt", TestBincSelfExt)
|
||||
t.Run("TestBincBytesEncodedAsArray", TestBincBytesEncodedAsArray)
|
||||
t.Run("TestBincStrucEncDec", TestBincStrucEncDec)
|
||||
t.Run("TestBincMapEncodeForCanonical", TestBincMapEncodeForCanonical)
|
||||
t.Run("TestBincRawToStringToRawEtc", TestBincRawToStringToRawEtc)
|
||||
t.Run("TestBincStructKeyType", TestBincStructKeyType)
|
||||
t.Run("TestBincPreferArrayOverSlice", TestBincPreferArrayOverSlice)
|
||||
t.Run("TestBincZeroCopyBytes", TestBincZeroCopyBytes)
|
||||
}
|
||||
|
||||
func testCborGroup(t *testing.T) {
|
||||
t.Run("TestCborCodecsTable", TestCborCodecsTable)
|
||||
t.Run("TestCborCodecsMisc", TestCborCodecsMisc)
|
||||
t.Run("TestCborCodecsEmbeddedPointer", TestCborCodecsEmbeddedPointer)
|
||||
t.Run("TestCborCodecChan", TestCborCodecChan)
|
||||
t.Run("TestCborStdEncIntf", TestCborStdEncIntf)
|
||||
t.Run("TestCborMammoth", TestCborMammoth)
|
||||
t.Run("TestCborRaw", TestCborRaw)
|
||||
t.Run("TestCborRpcGo", TestCborRpcGo)
|
||||
|
||||
t.Run("TestCborSwallowAndZero", TestCborSwallowAndZero)
|
||||
t.Run("TestCborRawExt", TestCborRawExt)
|
||||
t.Run("TestCborMapStructKey", TestCborMapStructKey)
|
||||
t.Run("TestCborDecodeNilMapValue", TestCborDecodeNilMapValue)
|
||||
t.Run("TestCborEmbeddedFieldPrecedence", TestCborEmbeddedFieldPrecedence)
|
||||
t.Run("TestCborLargeContainerLen", TestCborLargeContainerLen)
|
||||
t.Run("TestCborMammothMapsAndSlices", TestCborMammothMapsAndSlices)
|
||||
t.Run("TestCborTime", TestCborTime)
|
||||
t.Run("TestCborUintToInt", TestCborUintToInt)
|
||||
t.Run("TestCborDifferentMapOrSliceType", TestCborDifferentMapOrSliceType)
|
||||
t.Run("TestCborScalars", TestCborScalars)
|
||||
t.Run("TestCborOmitempty", TestCborOmitempty)
|
||||
t.Run("TestCborIntfMapping", TestCborIntfMapping)
|
||||
t.Run("TestCborMissingFields", TestCborMissingFields)
|
||||
t.Run("TestCborMaxDepth", TestCborMaxDepth)
|
||||
t.Run("TestCborSelfExt", TestCborSelfExt)
|
||||
t.Run("TestCborBytesEncodedAsArray", TestCborBytesEncodedAsArray)
|
||||
t.Run("TestCborStrucEncDec", TestCborStrucEncDec)
|
||||
t.Run("TestCborMapEncodeForCanonical", TestCborMapEncodeForCanonical)
|
||||
t.Run("TestCborRawToStringToRawEtc", TestCborRawToStringToRawEtc)
|
||||
t.Run("TestCborStructKeyType", TestCborStructKeyType)
|
||||
t.Run("TestCborPreferArrayOverSlice", TestCborPreferArrayOverSlice)
|
||||
t.Run("TestCborZeroCopyBytes", TestCborZeroCopyBytes)
|
||||
|
||||
t.Run("TestCborHalfFloat", TestCborHalfFloat)
|
||||
t.Run("TestCborSkipTags", TestCborSkipTags)
|
||||
}
|
||||
|
||||
func testMsgpackGroup(t *testing.T) {
|
||||
t.Run("TestMsgpackCodecsTable", TestMsgpackCodecsTable)
|
||||
t.Run("TestMsgpackCodecsMisc", TestMsgpackCodecsMisc)
|
||||
t.Run("TestMsgpackCodecsEmbeddedPointer", TestMsgpackCodecsEmbeddedPointer)
|
||||
t.Run("TestMsgpackStdEncIntf", TestMsgpackStdEncIntf)
|
||||
t.Run("TestMsgpackMammoth", TestMsgpackMammoth)
|
||||
t.Run("TestMsgpackRaw", TestMsgpackRaw)
|
||||
t.Run("TestMsgpackRpcGo", TestMsgpackRpcGo)
|
||||
t.Run("TestMsgpackRpcSpec", TestMsgpackRpcSpec)
|
||||
t.Run("TestMsgpackSwallowAndZero", TestMsgpackSwallowAndZero)
|
||||
t.Run("TestMsgpackRawExt", TestMsgpackRawExt)
|
||||
t.Run("TestMsgpackMapStructKey", TestMsgpackMapStructKey)
|
||||
t.Run("TestMsgpackDecodeNilMapValue", TestMsgpackDecodeNilMapValue)
|
||||
t.Run("TestMsgpackEmbeddedFieldPrecedence", TestMsgpackEmbeddedFieldPrecedence)
|
||||
t.Run("TestMsgpackLargeContainerLen", TestMsgpackLargeContainerLen)
|
||||
t.Run("TestMsgpackMammothMapsAndSlices", TestMsgpackMammothMapsAndSlices)
|
||||
t.Run("TestMsgpackTime", TestMsgpackTime)
|
||||
t.Run("TestMsgpackUintToInt", TestMsgpackUintToInt)
|
||||
t.Run("TestMsgpackDifferentMapOrSliceType", TestMsgpackDifferentMapOrSliceType)
|
||||
t.Run("TestMsgpackScalars", TestMsgpackScalars)
|
||||
t.Run("TestMsgpackOmitempty", TestMsgpackOmitempty)
|
||||
t.Run("TestMsgpackIntfMapping", TestMsgpackIntfMapping)
|
||||
t.Run("TestMsgpackMissingFields", TestMsgpackMissingFields)
|
||||
t.Run("TestMsgpackMaxDepth", TestMsgpackMaxDepth)
|
||||
t.Run("TestMsgpackSelfExt", TestMsgpackSelfExt)
|
||||
t.Run("TestMsgpackBytesEncodedAsArray", TestMsgpackBytesEncodedAsArray)
|
||||
t.Run("TestMsgpackStrucEncDec", TestMsgpackStrucEncDec)
|
||||
t.Run("TestMsgpackMapEncodeForCanonical", TestMsgpackMapEncodeForCanonical)
|
||||
t.Run("TestMsgpackRawToStringToRawEtc", TestMsgpackRawToStringToRawEtc)
|
||||
t.Run("TestMsgpackStructKeyType", TestMsgpackStructKeyType)
|
||||
t.Run("TestMsgpackPreferArrayOverSlice", TestMsgpackPreferArrayOverSlice)
|
||||
t.Run("TestMsgpackZeroCopyBytes", TestMsgpackZeroCopyBytes)
|
||||
|
||||
t.Run("TestMsgpackDecodeMapAndExtSizeMismatch", TestMsgpackDecodeMapAndExtSizeMismatch)
|
||||
}
|
||||
|
||||
func testSimpleGroup(t *testing.T) {
|
||||
t.Run("TestSimpleCodecsTable", TestSimpleCodecsTable)
|
||||
t.Run("TestSimpleCodecsMisc", TestSimpleCodecsMisc)
|
||||
t.Run("TestSimpleCodecsEmbeddedPointer", TestSimpleCodecsEmbeddedPointer)
|
||||
t.Run("TestSimpleStdEncIntf", TestSimpleStdEncIntf)
|
||||
t.Run("TestSimpleMammoth", TestSimpleMammoth)
|
||||
t.Run("TestSimpleRaw", TestSimpleRaw)
|
||||
t.Run("TestSimpleRpcGo", TestSimpleRpcGo)
|
||||
t.Run("TestSimpleSwallowAndZero", TestSimpleSwallowAndZero)
|
||||
t.Run("TestSimpleRawExt", TestSimpleRawExt)
|
||||
t.Run("TestSimpleMapStructKey", TestSimpleMapStructKey)
|
||||
t.Run("TestSimpleDecodeNilMapValue", TestSimpleDecodeNilMapValue)
|
||||
t.Run("TestSimpleEmbeddedFieldPrecedence", TestSimpleEmbeddedFieldPrecedence)
|
||||
t.Run("TestSimpleLargeContainerLen", TestSimpleLargeContainerLen)
|
||||
t.Run("TestSimpleMammothMapsAndSlices", TestSimpleMammothMapsAndSlices)
|
||||
t.Run("TestSimpleTime", TestSimpleTime)
|
||||
t.Run("TestSimpleUintToInt", TestSimpleUintToInt)
|
||||
t.Run("TestSimpleDifferentMapOrSliceType", TestSimpleDifferentMapOrSliceType)
|
||||
t.Run("TestSimpleScalars", TestSimpleScalars)
|
||||
t.Run("TestSimpleOmitempty", TestSimpleOmitempty)
|
||||
t.Run("TestSimpleIntfMapping", TestSimpleIntfMapping)
|
||||
t.Run("TestSimpleMissingFields", TestSimpleMissingFields)
|
||||
t.Run("TestSimpleMaxDepth", TestSimpleMaxDepth)
|
||||
t.Run("TestSimpleSelfExt", TestSimpleSelfExt)
|
||||
t.Run("TestSimpleBytesEncodedAsArray", TestSimpleBytesEncodedAsArray)
|
||||
t.Run("TestSimpleStrucEncDec", TestSimpleStrucEncDec)
|
||||
t.Run("TestSimpleMapEncodeForCanonical", TestSimpleMapEncodeForCanonical)
|
||||
t.Run("TestSimpleRawToStringToRawEtc", TestSimpleRawToStringToRawEtc)
|
||||
t.Run("TestSimpleStructKeyType", TestSimpleStructKeyType)
|
||||
t.Run("TestSimplePreferArrayOverSlice", TestSimplePreferArrayOverSlice)
|
||||
t.Run("TestSimpleZeroCopyBytes", TestSimpleZeroCopyBytes)
|
||||
}
|
||||
|
||||
func testSimpleMammothGroup(t *testing.T) {
|
||||
t.Run("TestSimpleMammothMapsAndSlices", TestSimpleMammothMapsAndSlices)
|
||||
}
|
||||
|
||||
func testRpcGroup(t *testing.T) {
|
||||
t.Run("TestBincRpcGo", TestBincRpcGo)
|
||||
t.Run("TestSimpleRpcGo", TestSimpleRpcGo)
|
||||
t.Run("TestMsgpackRpcGo", TestMsgpackRpcGo)
|
||||
t.Run("TestCborRpcGo", TestCborRpcGo)
|
||||
t.Run("TestJsonRpcGo", TestJsonRpcGo)
|
||||
t.Run("TestMsgpackRpcSpec", TestMsgpackRpcSpec)
|
||||
}
|
||||
|
||||
func testNonHandlesGroup(t *testing.T) {
|
||||
// grep "func Test" codec_test.go | grep -v -E '(Cbor|Json|Simple|Msgpack|Binc)'
|
||||
t.Run("TestBufioDecReader", TestBufioDecReader)
|
||||
t.Run("TestAtomic", TestAtomic)
|
||||
t.Run("TestAllEncCircularRef", TestAllEncCircularRef)
|
||||
t.Run("TestAllAnonCycle", TestAllAnonCycle)
|
||||
t.Run("TestMultipleEncDec", TestMultipleEncDec)
|
||||
t.Run("TestAllErrWriter", TestAllErrWriter)
|
||||
t.Run("TestMapRangeIndex", TestMapRangeIndex)
|
||||
}
|
||||
|
||||
func TestCodecSuite(t *testing.T) {
|
||||
testSuite(t, testCodecGroup)
|
||||
|
||||
testGroupResetFlags()
|
||||
|
||||
oldIndent, oldCharsAsis, oldPreferFloat, oldMapKeyAsString :=
|
||||
testJsonH.Indent, testJsonH.HTMLCharsAsIs, testJsonH.PreferFloat, testJsonH.MapKeyAsString
|
||||
|
||||
testMaxInitLen = 10
|
||||
testJsonH.Indent = 8
|
||||
testJsonH.HTMLCharsAsIs = true
|
||||
testJsonH.MapKeyAsString = true
|
||||
// testJsonH.PreferFloat = true
|
||||
testReinit()
|
||||
t.Run("json-spaces-htmlcharsasis-initLen10", testJsonGroup)
|
||||
|
||||
testMaxInitLen = 10
|
||||
testJsonH.Indent = -1
|
||||
testJsonH.HTMLCharsAsIs = false
|
||||
testJsonH.MapKeyAsString = true
|
||||
// testJsonH.PreferFloat = false
|
||||
testReinit()
|
||||
t.Run("json-tabs-initLen10", testJsonGroup)
|
||||
|
||||
testJsonH.Indent, testJsonH.HTMLCharsAsIs, testJsonH.PreferFloat, testJsonH.MapKeyAsString =
|
||||
oldIndent, oldCharsAsis, oldPreferFloat, oldMapKeyAsString
|
||||
|
||||
oldIndefLen := testCborH.IndefiniteLength
|
||||
testCborH.IndefiniteLength = true
|
||||
testReinit()
|
||||
t.Run("cbor-indefinitelength", testCborGroup)
|
||||
testCborH.IndefiniteLength = oldIndefLen
|
||||
|
||||
oldTimeRFC3339 := testCborH.TimeRFC3339
|
||||
testCborH.TimeRFC3339 = !testCborH.TimeRFC3339
|
||||
testReinit()
|
||||
t.Run("cbor-rfc3339", testCborGroup)
|
||||
testCborH.TimeRFC3339 = oldTimeRFC3339
|
||||
|
||||
oldSkipUnexpectedTags := testCborH.SkipUnexpectedTags
|
||||
testCborH.SkipUnexpectedTags = !testCborH.SkipUnexpectedTags
|
||||
testReinit()
|
||||
t.Run("cbor-skip-tags", testCborGroup)
|
||||
testCborH.SkipUnexpectedTags = oldSkipUnexpectedTags
|
||||
|
||||
oldSymbols := testBincH.AsSymbols
|
||||
|
||||
testBincH.AsSymbols = 2 // AsSymbolNone
|
||||
testReinit()
|
||||
t.Run("binc-no-symbols", testBincGroup)
|
||||
|
||||
testBincH.AsSymbols = 1 // AsSymbolAll
|
||||
testReinit()
|
||||
t.Run("binc-all-symbols", testBincGroup)
|
||||
|
||||
testBincH.AsSymbols = oldSymbols
|
||||
|
||||
oldWriteExt := testMsgpackH.WriteExt
|
||||
oldNoFixedNum := testMsgpackH.NoFixedNum
|
||||
|
||||
testMsgpackH.WriteExt = !testMsgpackH.WriteExt
|
||||
testReinit()
|
||||
t.Run("msgpack-inverse-writeext", testMsgpackGroup)
|
||||
|
||||
testMsgpackH.WriteExt = oldWriteExt
|
||||
|
||||
testMsgpackH.NoFixedNum = !testMsgpackH.NoFixedNum
|
||||
testReinit()
|
||||
t.Run("msgpack-fixednum", testMsgpackGroup)
|
||||
|
||||
testMsgpackH.NoFixedNum = oldNoFixedNum
|
||||
|
||||
oldEncZeroValuesAsNil := testSimpleH.EncZeroValuesAsNil
|
||||
testSimpleH.EncZeroValuesAsNil = !testSimpleH.EncZeroValuesAsNil
|
||||
testUseMust = true
|
||||
testReinit()
|
||||
t.Run("simple-enczeroasnil", testSimpleMammothGroup) // testSimpleGroup
|
||||
testSimpleH.EncZeroValuesAsNil = oldEncZeroValuesAsNil
|
||||
|
||||
oldRpcBufsize := testRpcBufsize
|
||||
testRpcBufsize = 0
|
||||
t.Run("rpc-buf-0", testRpcGroup)
|
||||
testRpcBufsize = 0
|
||||
t.Run("rpc-buf-00", testRpcGroup)
|
||||
testRpcBufsize = 0
|
||||
t.Run("rpc-buf-000", testRpcGroup)
|
||||
testRpcBufsize = 16
|
||||
t.Run("rpc-buf-16", testRpcGroup)
|
||||
testRpcBufsize = 2048
|
||||
t.Run("rpc-buf-2048", testRpcGroup)
|
||||
testRpcBufsize = oldRpcBufsize
|
||||
|
||||
testGroupResetFlags()
|
||||
}
|
||||
|
||||
// func TestCodecSuite(t *testing.T) {
|
||||
// testReinit() // so flag.Parse() is called first, and never called again
|
||||
// testDecodeOptions, testEncodeOptions = DecodeOptions{}, EncodeOptions{}
|
||||
// testGroupResetFlags()
|
||||
// testReinit()
|
||||
// t.Run("optionsFalse", func(t *testing.T) {
|
||||
// t.Run("TestJsonMammothMapsAndSlices", TestJsonMammothMapsAndSlices)
|
||||
// })
|
||||
// }
|
||||
3
vendor/github.com/ugorji/go/go.mod
generated
vendored
3
vendor/github.com/ugorji/go/go.mod
generated
vendored
@@ -1,3 +0,0 @@
|
||||
module github.com/ugorji/go
|
||||
|
||||
require github.com/ugorji/go/codec v1.1.7
|
||||
47
vendor/github.com/ugorji/go/msgpack.org.md
generated
vendored
47
vendor/github.com/ugorji/go/msgpack.org.md
generated
vendored
@@ -1,47 +0,0 @@
|
||||
**MessagePack and [Binc](http://github.com/ugorji/binc) Codec for [Go](http://golang.org) Language.**
|
||||
|
||||
*A High Performance, Feature-Rich, Idiomatic encode/decode and rpc library*.
|
||||
|
||||
To install:
|
||||
|
||||
go get github.com/ugorji/go/codec
|
||||
|
||||
Source: [http://github.com/ugorji/go]
|
||||
Online documentation: [http://godoc.org/github.com/ugorji/go/codec]
|
||||
|
||||
Typical usage:
|
||||
|
||||
```go
|
||||
// create and use decoder/encoder
|
||||
var (
|
||||
v interface{} // value to decode/encode into
|
||||
r io.Reader
|
||||
w io.Writer
|
||||
b []byte
|
||||
mh codec.MsgpackHandle
|
||||
)
|
||||
|
||||
dec = codec.NewDecoder(r, &mh)
|
||||
dec = codec.NewDecoderBytes(b, &mh)
|
||||
err = dec.Decode(&v)
|
||||
|
||||
enc = codec.NewEncoder(w, &mh)
|
||||
enc = codec.NewEncoderBytes(&b, &mh)
|
||||
err = enc.Encode(v)
|
||||
|
||||
//RPC Server
|
||||
go func() {
|
||||
for {
|
||||
conn, err := listener.Accept()
|
||||
rpcCodec := codec.GoRpc.ServerCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ServerCodec(conn, h)
|
||||
rpc.ServeCodec(rpcCodec)
|
||||
}
|
||||
}()
|
||||
|
||||
//RPC Communication (client side)
|
||||
conn, err = net.Dial("tcp", "localhost:5555")
|
||||
rpcCodec := codec.GoRpc.ClientCodec(conn, h)
|
||||
//OR rpcCodec := codec.MsgpackSpecRpc.ClientCodec(conn, h)
|
||||
client := rpc.NewClientWithCodec(rpcCodec)
|
||||
```
|
||||
Reference in New Issue
Block a user