vendor: github.com/coreos/etcd v3.3.25
full diff: https://github.com/coreos/etcd/compare/v3.3.12...v3.3.25 Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
This commit is contained in:
parent
6202322b60
commit
2bef937507
138 changed files with 13060 additions and 56337 deletions
|
@ -66,13 +66,15 @@ github.com/moby/ipvs 4566ccea0e08d68e9614c3e7a64a
|
|||
github.com/BurntSushi/toml 3012a1dbe2e4bd1391d42b32f0577cb7bbc7f005 # v0.3.1
|
||||
github.com/samuel/go-zookeeper d0e0d8e11f318e000a8cc434616d69e329edc374
|
||||
github.com/deckarep/golang-set ef32fa3046d9f249d399f98ebaf9be944430fd1d
|
||||
github.com/coreos/etcd d57e8b8d97adfc4a6c224fe116714bf1a1f3beb9 # v3.3.12
|
||||
github.com/coreos/etcd 2c834459e1aab78a5d5219c7dfe42335fc4b617a # v3.3.25
|
||||
github.com/coreos/go-semver 8ab6407b697782a06568d4b7f1db25550ec2e4c6 # v0.2.0
|
||||
github.com/ugorji/go b4c50a2b199d93b13dc15e78929cfb23bfdf21ab # v1.1.1
|
||||
github.com/hashicorp/consul 9a9cc9341bb487651a0399e3fc5e1e8a42e62dd9 # v0.5.2
|
||||
github.com/miekg/dns 6c0c4e6581f8e173cc562c8b3363ab984e4ae071 # v1.1.27
|
||||
github.com/ishidawataru/sctp 6e2cb1366111dcf547c13531e3a263a067715847
|
||||
go.etcd.io/bbolt 232d8fc87f50244f9c808f4745759e08a304c029 # v1.3.5
|
||||
github.com/json-iterator/go a1ca0830781e007c66b225121d2cdb3a649421f6 # v1.1.10
|
||||
github.com/modern-go/concurrent bacd9c7ef1dd9b15be4a9909b8ac7a4e313eec94 # 1.0.3
|
||||
github.com/modern-go/reflect2 94122c33edd36123c84d5368cfb2b69df93a0ec8 # v1.0.1
|
||||
|
||||
# get graph and distribution packages
|
||||
github.com/docker/distribution 0d3efadf0154c2b8a4e7b6621fff9809655cc580
|
||||
|
|
72
vendor/github.com/coreos/etcd/client/json.go
generated
vendored
Normal file
72
vendor/github.com/coreos/etcd/client/json.go
generated
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
// Copyright 2019 The etcd Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package client
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/modern-go/reflect2"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type customNumberExtension struct {
|
||||
jsoniter.DummyExtension
|
||||
}
|
||||
|
||||
func (cne *customNumberExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder {
|
||||
if typ.String() == "interface {}" {
|
||||
return customNumberDecoder{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type customNumberDecoder struct {
|
||||
}
|
||||
|
||||
func (customNumberDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
|
||||
switch iter.WhatIsNext() {
|
||||
case jsoniter.NumberValue:
|
||||
var number jsoniter.Number
|
||||
iter.ReadVal(&number)
|
||||
i64, err := strconv.ParseInt(string(number), 10, 64)
|
||||
if err == nil {
|
||||
*(*interface{})(ptr) = i64
|
||||
return
|
||||
}
|
||||
f64, err := strconv.ParseFloat(string(number), 64)
|
||||
if err == nil {
|
||||
*(*interface{})(ptr) = f64
|
||||
return
|
||||
}
|
||||
iter.ReportError("DecodeNumber", err.Error())
|
||||
default:
|
||||
*(*interface{})(ptr) = iter.Read()
|
||||
}
|
||||
}
|
||||
|
||||
// caseSensitiveJsonIterator returns a jsoniterator API that's configured to be
|
||||
// case-sensitive when unmarshalling, and otherwise compatible with
|
||||
// the encoding/json standard library.
|
||||
func caseSensitiveJsonIterator() jsoniter.API {
|
||||
config := jsoniter.Config{
|
||||
EscapeHTML: true,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
CaseSensitive: true,
|
||||
}.Froze()
|
||||
// Force jsoniter to decode number to interface{} via int64/float64, if possible.
|
||||
config.RegisterExtension(&customNumberExtension{})
|
||||
return config
|
||||
}
|
5218
vendor/github.com/coreos/etcd/client/keys.generated.go
generated
vendored
5218
vendor/github.com/coreos/etcd/client/keys.generated.go
generated
vendored
File diff suppressed because it is too large
Load diff
7
vendor/github.com/coreos/etcd/client/keys.go
generated
vendored
7
vendor/github.com/coreos/etcd/client/keys.go
generated
vendored
|
@ -14,8 +14,6 @@
|
|||
|
||||
package client
|
||||
|
||||
//go:generate codecgen -d 1819 -r "Node|Response|Nodes" -o keys.generated.go keys.go
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
@ -28,7 +26,6 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/coreos/etcd/pkg/pathutil"
|
||||
"github.com/ugorji/go/codec"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -656,9 +653,11 @@ func unmarshalHTTPResponse(code int, header http.Header, body []byte) (res *Resp
|
|||
return res, err
|
||||
}
|
||||
|
||||
var jsonIterator = caseSensitiveJsonIterator()
|
||||
|
||||
func unmarshalSuccessfulKeysResponse(header http.Header, body []byte) (*Response, error) {
|
||||
var res Response
|
||||
err := codec.NewDecoderBytes(body, new(codec.JsonHandle)).Decode(&res)
|
||||
err := jsonIterator.Unmarshal(body, &res)
|
||||
if err != nil {
|
||||
return nil, ErrInvalidJSON
|
||||
}
|
||||
|
|
5
vendor/github.com/coreos/etcd/pkg/fileutil/dir_unix.go
generated
vendored
5
vendor/github.com/coreos/etcd/pkg/fileutil/dir_unix.go
generated
vendored
|
@ -18,5 +18,10 @@ package fileutil
|
|||
|
||||
import "os"
|
||||
|
||||
const (
|
||||
// PrivateDirMode grants owner to make/remove files inside the directory.
|
||||
PrivateDirMode = 0700
|
||||
)
|
||||
|
||||
// OpenDir opens a directory for syncing.
|
||||
func OpenDir(path string) (*os.File, error) { return os.Open(path) }
|
||||
|
|
5
vendor/github.com/coreos/etcd/pkg/fileutil/dir_windows.go
generated
vendored
5
vendor/github.com/coreos/etcd/pkg/fileutil/dir_windows.go
generated
vendored
|
@ -21,6 +21,11 @@ import (
|
|||
"syscall"
|
||||
)
|
||||
|
||||
const (
|
||||
// PrivateDirMode grants owner to make/remove files inside the directory.
|
||||
PrivateDirMode = 0777
|
||||
)
|
||||
|
||||
// OpenDir opens a directory in windows with write access for syncing.
|
||||
func OpenDir(path string) (*os.File, error) {
|
||||
fd, err := openDir(path)
|
||||
|
|
43
vendor/github.com/coreos/etcd/pkg/fileutil/fileutil.go
generated
vendored
43
vendor/github.com/coreos/etcd/pkg/fileutil/fileutil.go
generated
vendored
|
@ -29,8 +29,6 @@ import (
|
|||
const (
|
||||
// PrivateFileMode grants owner to read/write a file.
|
||||
PrivateFileMode = 0600
|
||||
// PrivateDirMode grants owner to make/remove files inside the directory.
|
||||
PrivateDirMode = 0700
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -65,14 +63,22 @@ func ReadDir(dirpath string) ([]string, error) {
|
|||
// TouchDirAll is similar to os.MkdirAll. It creates directories with 0700 permission if any directory
|
||||
// does not exists. TouchDirAll also ensures the given directory is writable.
|
||||
func TouchDirAll(dir string) error {
|
||||
// If path is already a directory, MkdirAll does nothing
|
||||
// and returns nil.
|
||||
err := os.MkdirAll(dir, PrivateDirMode)
|
||||
if err != nil {
|
||||
// if mkdirAll("a/text") and "text" is not
|
||||
// a directory, this will return syscall.ENOTDIR
|
||||
return err
|
||||
// If path is already a directory, MkdirAll does nothing and returns nil, so,
|
||||
// first check if dir exist with an expected permission mode.
|
||||
if Exist(dir) {
|
||||
err := CheckDirPermission(dir, PrivateDirMode)
|
||||
if err != nil {
|
||||
plog.Warningf("check file permission: %v", err)
|
||||
}
|
||||
} else {
|
||||
err := os.MkdirAll(dir, PrivateDirMode)
|
||||
if err != nil {
|
||||
// if mkdirAll("a/text") and "text" is not
|
||||
// a directory, this will return syscall.ENOTDIR
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return IsDirWriteable(dir)
|
||||
}
|
||||
|
||||
|
@ -120,3 +126,22 @@ func ZeroToEnd(f *os.File) error {
|
|||
_, err = f.Seek(off, io.SeekStart)
|
||||
return err
|
||||
}
|
||||
|
||||
// CheckDirPermission checks permission on an existing dir.
|
||||
// Returns error if dir is empty or exist with a different permission than specified.
|
||||
func CheckDirPermission(dir string, perm os.FileMode) error {
|
||||
if !Exist(dir) {
|
||||
return fmt.Errorf("directory %q empty, cannot check permission.", dir)
|
||||
}
|
||||
//check the existing permission on the directory
|
||||
dirInfo, err := os.Stat(dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dirMode := dirInfo.Mode().Perm()
|
||||
if dirMode != perm {
|
||||
err = fmt.Errorf("directory %q exist, but the permission is %q. The recommended permission is %q to prevent possible unprivileged access to the data.", dir, dirInfo.Mode(), os.FileMode(PrivateDirMode))
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
14
vendor/github.com/coreos/etcd/pkg/fileutil/purge.go
generated
vendored
14
vendor/github.com/coreos/etcd/pkg/fileutil/purge.go
generated
vendored
|
@ -23,13 +23,23 @@ import (
|
|||
)
|
||||
|
||||
func PurgeFile(dirname string, suffix string, max uint, interval time.Duration, stop <-chan struct{}) <-chan error {
|
||||
return purgeFile(dirname, suffix, max, interval, stop, nil)
|
||||
return purgeFile(dirname, suffix, max, interval, stop, nil, nil)
|
||||
}
|
||||
|
||||
func PurgeFileWithDoneNotify(dirname string, suffix string, max uint, interval time.Duration, stop <-chan struct{}) (<-chan struct{}, <-chan error) {
|
||||
doneC := make(chan struct{})
|
||||
errC := purgeFile(dirname, suffix, max, interval, stop, nil, doneC)
|
||||
return doneC, errC
|
||||
}
|
||||
|
||||
// purgeFile is the internal implementation for PurgeFile which can post purged files to purgec if non-nil.
|
||||
func purgeFile(dirname string, suffix string, max uint, interval time.Duration, stop <-chan struct{}, purgec chan<- string) <-chan error {
|
||||
// if donec is non-nil, the function closes it to notify its exit.
|
||||
func purgeFile(dirname string, suffix string, max uint, interval time.Duration, stop <-chan struct{}, purgec chan<- string, donec chan<- struct{}) <-chan error {
|
||||
errC := make(chan error, 1)
|
||||
go func() {
|
||||
if donec != nil {
|
||||
defer close(donec)
|
||||
}
|
||||
for {
|
||||
fnames, err := ReadDir(dirname)
|
||||
if err != nil {
|
||||
|
|
23
vendor/github.com/coreos/etcd/pkg/ioutil/pagewriter.go
generated
vendored
23
vendor/github.com/coreos/etcd/pkg/ioutil/pagewriter.go
generated
vendored
|
@ -95,12 +95,23 @@ func (pw *PageWriter) Write(p []byte) (n int, err error) {
|
|||
return n, werr
|
||||
}
|
||||
|
||||
// Flush flushes buffered data.
|
||||
func (pw *PageWriter) Flush() error {
|
||||
if pw.bufferedBytes == 0 {
|
||||
return nil
|
||||
}
|
||||
_, err := pw.w.Write(pw.buf[:pw.bufferedBytes])
|
||||
pw.pageOffset = (pw.pageOffset + pw.bufferedBytes) % pw.pageBytes
|
||||
pw.bufferedBytes = 0
|
||||
_, err := pw.flush()
|
||||
return err
|
||||
}
|
||||
|
||||
// FlushN flushes buffered data and returns the number of written bytes.
|
||||
func (pw *PageWriter) FlushN() (int, error) {
|
||||
return pw.flush()
|
||||
}
|
||||
|
||||
func (pw *PageWriter) flush() (int, error) {
|
||||
if pw.bufferedBytes == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
n, err := pw.w.Write(pw.buf[:pw.bufferedBytes])
|
||||
pw.pageOffset = (pw.pageOffset + pw.bufferedBytes) % pw.pageBytes
|
||||
pw.bufferedBytes = 0
|
||||
return n, err
|
||||
}
|
||||
|
|
2
vendor/github.com/coreos/etcd/raft/logger.go
generated
vendored
2
vendor/github.com/coreos/etcd/raft/logger.go
generated
vendored
|
@ -114,7 +114,7 @@ func (l *DefaultLogger) Fatalf(format string, v ...interface{}) {
|
|||
}
|
||||
|
||||
func (l *DefaultLogger) Panic(v ...interface{}) {
|
||||
l.Logger.Panic(v)
|
||||
l.Logger.Panic(v...)
|
||||
}
|
||||
|
||||
func (l *DefaultLogger) Panicf(format string, v ...interface{}) {
|
||||
|
|
1
vendor/github.com/coreos/etcd/raft/raft.go
generated
vendored
1
vendor/github.com/coreos/etcd/raft/raft.go
generated
vendored
|
@ -663,6 +663,7 @@ func (r *raft) becomePreCandidate() {
|
|||
r.step = stepCandidate
|
||||
r.votes = make(map[uint64]bool)
|
||||
r.tick = r.tickElection
|
||||
r.lead = None
|
||||
r.state = StatePreCandidate
|
||||
r.logger.Infof("%x became pre-candidate at term %d", r.id, r.Term)
|
||||
}
|
||||
|
|
1073
vendor/github.com/coreos/etcd/raft/raftpb/raft.pb.go
generated
vendored
1073
vendor/github.com/coreos/etcd/raft/raftpb/raft.pb.go
generated
vendored
File diff suppressed because it is too large
Load diff
161
vendor/github.com/coreos/etcd/snap/snappb/snap.pb.go
generated
vendored
161
vendor/github.com/coreos/etcd/snap/snappb/snap.pb.go
generated
vendored
|
@ -1,27 +1,16 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: snap.proto
|
||||
|
||||
/*
|
||||
Package snappb is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
snap.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Snapshot
|
||||
*/
|
||||
package snappb
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
|
||||
fmt "fmt"
|
||||
io "io"
|
||||
math "math"
|
||||
math_bits "math/bits"
|
||||
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
|
||||
io "io"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
@ -36,23 +25,68 @@ var _ = math.Inf
|
|||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
type Snapshot struct {
|
||||
Crc uint32 `protobuf:"varint,1,opt,name=crc" json:"crc"`
|
||||
Data []byte `protobuf:"bytes,2,opt,name=data" json:"data,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Crc uint32 `protobuf:"varint,1,opt,name=crc" json:"crc"`
|
||||
Data []byte `protobuf:"bytes,2,opt,name=data" json:"data,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Snapshot) Reset() { *m = Snapshot{} }
|
||||
func (m *Snapshot) String() string { return proto.CompactTextString(m) }
|
||||
func (*Snapshot) ProtoMessage() {}
|
||||
func (*Snapshot) Descriptor() ([]byte, []int) { return fileDescriptorSnap, []int{0} }
|
||||
func (m *Snapshot) Reset() { *m = Snapshot{} }
|
||||
func (m *Snapshot) String() string { return proto.CompactTextString(m) }
|
||||
func (*Snapshot) ProtoMessage() {}
|
||||
func (*Snapshot) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_f2e3c045ebf84d00, []int{0}
|
||||
}
|
||||
func (m *Snapshot) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *Snapshot) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Snapshot.Merge(m, src)
|
||||
}
|
||||
func (m *Snapshot) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *Snapshot) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Snapshot.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Snapshot proto.InternalMessageInfo
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Snapshot)(nil), "snappb.snapshot")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("snap.proto", fileDescriptor_f2e3c045ebf84d00) }
|
||||
|
||||
var fileDescriptor_f2e3c045ebf84d00 = []byte{
|
||||
// 126 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2a, 0xce, 0x4b, 0x2c,
|
||||
0xd0, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0xb1, 0x0b, 0x92, 0xa4, 0x44, 0xd2, 0xf3,
|
||||
0xd3, 0xf3, 0xc1, 0x42, 0xfa, 0x20, 0x16, 0x44, 0x56, 0xc9, 0x8c, 0x8b, 0x03, 0x24, 0x5f, 0x9c,
|
||||
0x91, 0x5f, 0x22, 0x24, 0xc6, 0xc5, 0x9c, 0x5c, 0x94, 0x2c, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0xeb,
|
||||
0xc4, 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x48, 0x40, 0x48, 0x88, 0x8b, 0x25, 0x25, 0xb1, 0x24,
|
||||
0x51, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xcc, 0x76, 0x12, 0x39, 0xf1, 0x50, 0x8e, 0xe1,
|
||||
0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf1, 0x58, 0x8e,
|
||||
0x01, 0x10, 0x00, 0x00, 0xff, 0xff, 0xd8, 0x0f, 0x32, 0xb2, 0x78, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *Snapshot) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalTo(dAtA)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -60,35 +94,47 @@ func (m *Snapshot) Marshal() (dAtA []byte, err error) {
|
|||
}
|
||||
|
||||
func (m *Snapshot) MarshalTo(dAtA []byte) (int, error) {
|
||||
var i int
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *Snapshot) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
dAtA[i] = 0x8
|
||||
i++
|
||||
i = encodeVarintSnap(dAtA, i, uint64(m.Crc))
|
||||
if m.Data != nil {
|
||||
dAtA[i] = 0x12
|
||||
i++
|
||||
i = encodeVarintSnap(dAtA, i, uint64(len(m.Data)))
|
||||
i += copy(dAtA[i:], m.Data)
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
i += copy(dAtA[i:], m.XXX_unrecognized)
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
return i, nil
|
||||
if m.Data != nil {
|
||||
i -= len(m.Data)
|
||||
copy(dAtA[i:], m.Data)
|
||||
i = encodeVarintSnap(dAtA, i, uint64(len(m.Data)))
|
||||
i--
|
||||
dAtA[i] = 0x12
|
||||
}
|
||||
i = encodeVarintSnap(dAtA, i, uint64(m.Crc))
|
||||
i--
|
||||
dAtA[i] = 0x8
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func encodeVarintSnap(dAtA []byte, offset int, v uint64) int {
|
||||
offset -= sovSnap(v)
|
||||
base := offset
|
||||
for v >= 1<<7 {
|
||||
dAtA[offset] = uint8(v&0x7f | 0x80)
|
||||
v >>= 7
|
||||
offset++
|
||||
}
|
||||
dAtA[offset] = uint8(v)
|
||||
return offset + 1
|
||||
return base
|
||||
}
|
||||
func (m *Snapshot) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
n += 1 + sovSnap(uint64(m.Crc))
|
||||
|
@ -103,14 +149,7 @@ func (m *Snapshot) Size() (n int) {
|
|||
}
|
||||
|
||||
func sovSnap(x uint64) (n int) {
|
||||
for {
|
||||
n++
|
||||
x >>= 7
|
||||
if x == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return n
|
||||
return (math_bits.Len64(x|1) + 6) / 7
|
||||
}
|
||||
func sozSnap(x uint64) (n int) {
|
||||
return sovSnap(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||
|
@ -130,7 +169,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -158,7 +197,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.Crc |= (uint32(b) & 0x7F) << shift
|
||||
m.Crc |= uint32(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -177,7 +216,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
byteLen |= (int(b) & 0x7F) << shift
|
||||
byteLen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -186,6 +225,9 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
return ErrInvalidLengthSnap
|
||||
}
|
||||
postIndex := iNdEx + byteLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSnap
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -203,6 +245,9 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
if skippy < 0 {
|
||||
return ErrInvalidLengthSnap
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthSnap
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -270,10 +315,13 @@ func skipSnap(dAtA []byte) (n int, err error) {
|
|||
break
|
||||
}
|
||||
}
|
||||
iNdEx += length
|
||||
if length < 0 {
|
||||
return 0, ErrInvalidLengthSnap
|
||||
}
|
||||
iNdEx += length
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthSnap
|
||||
}
|
||||
return iNdEx, nil
|
||||
case 3:
|
||||
for {
|
||||
|
@ -302,6 +350,9 @@ func skipSnap(dAtA []byte) (n int, err error) {
|
|||
return 0, err
|
||||
}
|
||||
iNdEx = start + next
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthSnap
|
||||
}
|
||||
}
|
||||
return iNdEx, nil
|
||||
case 4:
|
||||
|
@ -320,17 +371,3 @@ var (
|
|||
ErrInvalidLengthSnap = fmt.Errorf("proto: negative length found during unmarshaling")
|
||||
ErrIntOverflowSnap = fmt.Errorf("proto: integer overflow")
|
||||
)
|
||||
|
||||
func init() { proto.RegisterFile("snap.proto", fileDescriptorSnap) }
|
||||
|
||||
var fileDescriptorSnap = []byte{
|
||||
// 126 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2a, 0xce, 0x4b, 0x2c,
|
||||
0xd0, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0xb1, 0x0b, 0x92, 0xa4, 0x44, 0xd2, 0xf3,
|
||||
0xd3, 0xf3, 0xc1, 0x42, 0xfa, 0x20, 0x16, 0x44, 0x56, 0xc9, 0x8c, 0x8b, 0x03, 0x24, 0x5f, 0x9c,
|
||||
0x91, 0x5f, 0x22, 0x24, 0xc6, 0xc5, 0x9c, 0x5c, 0x94, 0x2c, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0xeb,
|
||||
0xc4, 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x48, 0x40, 0x48, 0x88, 0x8b, 0x25, 0x25, 0xb1, 0x24,
|
||||
0x51, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xcc, 0x76, 0x12, 0x39, 0xf1, 0x50, 0x8e, 0xe1,
|
||||
0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf1, 0x58, 0x8e,
|
||||
0x01, 0x10, 0x00, 0x00, 0xff, 0xff, 0xd8, 0x0f, 0x32, 0xb2, 0x78, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
|
82
vendor/github.com/coreos/etcd/snap/snapshotter.go
generated
vendored
82
vendor/github.com/coreos/etcd/snap/snapshotter.go
generated
vendored
|
@ -23,6 +23,7 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -31,7 +32,7 @@ import (
|
|||
"github.com/coreos/etcd/raft"
|
||||
"github.com/coreos/etcd/raft/raftpb"
|
||||
"github.com/coreos/etcd/snap/snappb"
|
||||
|
||||
"github.com/coreos/etcd/wal/walpb"
|
||||
"github.com/coreos/pkg/capnslog"
|
||||
)
|
||||
|
||||
|
@ -80,9 +81,8 @@ func (s *Snapshotter) save(snapshot *raftpb.Snapshot) error {
|
|||
d, err := snap.Marshal()
|
||||
if err != nil {
|
||||
return err
|
||||
} else {
|
||||
marshallingDurations.Observe(float64(time.Since(start)) / float64(time.Second))
|
||||
}
|
||||
marshallingDurations.Observe(float64(time.Since(start)) / float64(time.Second))
|
||||
|
||||
err = pioutil.WriteAndSyncFile(filepath.Join(s.dir, fname), d, 0666)
|
||||
if err == nil {
|
||||
|
@ -97,20 +97,35 @@ func (s *Snapshotter) save(snapshot *raftpb.Snapshot) error {
|
|||
}
|
||||
|
||||
func (s *Snapshotter) Load() (*raftpb.Snapshot, error) {
|
||||
return s.loadMatching(func(*raftpb.Snapshot) bool { return true })
|
||||
}
|
||||
|
||||
// LoadNewestAvailable loads the newest snapshot available that is in walSnaps.
|
||||
func (s *Snapshotter) LoadNewestAvailable(walSnaps []walpb.Snapshot) (*raftpb.Snapshot, error) {
|
||||
return s.loadMatching(func(snapshot *raftpb.Snapshot) bool {
|
||||
m := snapshot.Metadata
|
||||
for i := len(walSnaps) - 1; i >= 0; i-- {
|
||||
if m.Term == walSnaps[i].Term && m.Index == walSnaps[i].Index {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// loadMatching returns the newest snapshot where matchFn returns true.
|
||||
func (s *Snapshotter) loadMatching(matchFn func(*raftpb.Snapshot) bool) (*raftpb.Snapshot, error) {
|
||||
names, err := s.snapNames()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var snap *raftpb.Snapshot
|
||||
for _, name := range names {
|
||||
if snap, err = loadSnap(s.dir, name); err == nil {
|
||||
break
|
||||
if snap, err = loadSnap(s.dir, name); err == nil && matchFn(snap) {
|
||||
return snap, nil
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, ErrNoSnapshot
|
||||
}
|
||||
return snap, nil
|
||||
return nil, ErrNoSnapshot
|
||||
}
|
||||
|
||||
func loadSnap(dir, name string) (*raftpb.Snapshot, error) {
|
||||
|
@ -172,6 +187,10 @@ func (s *Snapshotter) snapNames() ([]string, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
names, err = s.cleanupSnapdir(names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
snaps := checkSuffix(names)
|
||||
if len(snaps) == 0 {
|
||||
return nil, ErrNoSnapshot
|
||||
|
@ -202,3 +221,48 @@ func renameBroken(path string) {
|
|||
plog.Warningf("cannot rename broken snapshot file %v to %v: %v", path, brokenPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
// cleanupSnapdir removes any files that should not be in the snapshot directory:
|
||||
// - db.tmp prefixed files that can be orphaned by defragmentation
|
||||
func (s *Snapshotter) cleanupSnapdir(filenames []string) (names []string, err error) {
|
||||
for _, filename := range filenames {
|
||||
if strings.HasPrefix(filename, "db.tmp") {
|
||||
plog.Infof("found orphaned defragmentation file; deleting: %s", filename)
|
||||
if rmErr := os.Remove(filepath.Join(s.dir, filename)); rmErr != nil && !os.IsNotExist(rmErr) {
|
||||
return nil, fmt.Errorf("failed to remove orphaned defragmentation file %s: %v", filename, rmErr)
|
||||
}
|
||||
continue
|
||||
}
|
||||
names = append(names, filename)
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (s *Snapshotter) ReleaseSnapDBs(snap raftpb.Snapshot) error {
|
||||
dir, err := os.Open(s.dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dir.Close()
|
||||
filenames, err := dir.Readdirnames(-1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, filename := range filenames {
|
||||
if strings.HasSuffix(filename, ".snap.db") {
|
||||
hexIndex := strings.TrimSuffix(filepath.Base(filename), ".snap.db")
|
||||
index, err := strconv.ParseUint(hexIndex, 16, 64)
|
||||
if err != nil {
|
||||
plog.Warningf("failed to parse index from filename: %s (%v)", filename, err)
|
||||
continue
|
||||
}
|
||||
if index < snap.Metadata.Index {
|
||||
plog.Infof("found orphaned .snap.db file; deleting %q", filename)
|
||||
if rmErr := os.Remove(filepath.Join(s.dir, filename)); rmErr != nil && !os.IsNotExist(rmErr) {
|
||||
plog.Warningf("failed to remove orphaned .snap.db file: %s (%v)", filename, rmErr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
2
vendor/github.com/coreos/etcd/version/version.go
generated
vendored
2
vendor/github.com/coreos/etcd/version/version.go
generated
vendored
|
@ -26,7 +26,7 @@ import (
|
|||
var (
|
||||
// MinClusterVersion is the min cluster version this etcd binary is compatible with.
|
||||
MinClusterVersion = "3.0.0"
|
||||
Version = "3.3.12"
|
||||
Version = "3.3.25"
|
||||
APIVersion = "unknown"
|
||||
|
||||
// Git SHA Value will be set during build
|
||||
|
|
8
vendor/github.com/coreos/etcd/wal/decoder.go
generated
vendored
8
vendor/github.com/coreos/etcd/wal/decoder.go
generated
vendored
|
@ -59,6 +59,11 @@ func (d *decoder) decode(rec *walpb.Record) error {
|
|||
return d.decodeRecord(rec)
|
||||
}
|
||||
|
||||
// raft max message size is set to 1 MB in etcd server
|
||||
// assume projects set reasonable message size limit,
|
||||
// thus entry size should never exceed 10 MB
|
||||
const maxWALEntrySizeLimit = int64(10 * 1024 * 1024)
|
||||
|
||||
func (d *decoder) decodeRecord(rec *walpb.Record) error {
|
||||
if len(d.brs) == 0 {
|
||||
return io.EOF
|
||||
|
@ -79,6 +84,9 @@ func (d *decoder) decodeRecord(rec *walpb.Record) error {
|
|||
}
|
||||
|
||||
recBytes, padBytes := decodeFrameSize(l)
|
||||
if recBytes >= maxWALEntrySizeLimit-padBytes {
|
||||
return ErrMaxWALEntrySizeLimitExceeded
|
||||
}
|
||||
|
||||
data := make([]byte, recBytes+padBytes)
|
||||
if _, err = io.ReadFull(d.brs[0], data); err != nil {
|
||||
|
|
12
vendor/github.com/coreos/etcd/wal/encoder.go
generated
vendored
12
vendor/github.com/coreos/etcd/wal/encoder.go
generated
vendored
|
@ -92,7 +92,8 @@ func (e *encoder) encode(rec *walpb.Record) error {
|
|||
if padBytes != 0 {
|
||||
data = append(data, make([]byte, padBytes)...)
|
||||
}
|
||||
_, err = e.bw.Write(data)
|
||||
n, err = e.bw.Write(data)
|
||||
walWriteBytes.Add(float64(n))
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -108,13 +109,16 @@ func encodeFrameSize(dataBytes int) (lenField uint64, padBytes int) {
|
|||
|
||||
func (e *encoder) flush() error {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
return e.bw.Flush()
|
||||
n, err := e.bw.FlushN()
|
||||
e.mu.Unlock()
|
||||
walWriteBytes.Add(float64(n))
|
||||
return err
|
||||
}
|
||||
|
||||
func writeUint64(w io.Writer, n uint64, buf []byte) error {
|
||||
// http://golang.org/src/encoding/binary/binary.go
|
||||
binary.LittleEndian.PutUint64(buf, n)
|
||||
_, err := w.Write(buf)
|
||||
nv, err := w.Write(buf)
|
||||
walWriteBytes.Add(float64(nv))
|
||||
return err
|
||||
}
|
||||
|
|
7
vendor/github.com/coreos/etcd/wal/metrics.go
generated
vendored
7
vendor/github.com/coreos/etcd/wal/metrics.go
generated
vendored
|
@ -24,8 +24,15 @@ var (
|
|||
Help: "The latency distributions of fsync called by wal.",
|
||||
Buckets: prometheus.ExponentialBuckets(0.001, 2, 14),
|
||||
})
|
||||
walWriteBytes = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: "etcd",
|
||||
Subsystem: "disk",
|
||||
Name: "wal_write_bytes_total",
|
||||
Help: "Total number of bytes written in WAL.",
|
||||
})
|
||||
)
|
||||
|
||||
func init() {
|
||||
prometheus.MustRegister(syncDurations)
|
||||
prometheus.MustRegister(walWriteBytes)
|
||||
}
|
||||
|
|
5
vendor/github.com/coreos/etcd/wal/repair.go
generated
vendored
5
vendor/github.com/coreos/etcd/wal/repair.go
generated
vendored
|
@ -18,6 +18,7 @@ import (
|
|||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/coreos/etcd/pkg/fileutil"
|
||||
"github.com/coreos/etcd/wal/walpb"
|
||||
|
@ -76,10 +77,14 @@ func Repair(dirpath string) bool {
|
|||
plog.Errorf("could not repair %v, failed to truncate file", f.Name())
|
||||
return false
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
if err = fileutil.Fsync(f.File); err != nil {
|
||||
plog.Errorf("could not repair %v, failed to sync file", f.Name())
|
||||
return false
|
||||
}
|
||||
syncDurations.Observe(time.Since(start).Seconds())
|
||||
|
||||
return true
|
||||
default:
|
||||
plog.Errorf("could not repair error (%v)", err)
|
||||
|
|
268
vendor/github.com/coreos/etcd/wal/wal.go
generated
vendored
268
vendor/github.com/coreos/etcd/wal/wal.go
generated
vendored
|
@ -55,12 +55,15 @@ var (
|
|||
|
||||
plog = capnslog.NewPackageLogger("github.com/coreos/etcd", "wal")
|
||||
|
||||
ErrMetadataConflict = errors.New("wal: conflicting metadata found")
|
||||
ErrFileNotFound = errors.New("wal: file not found")
|
||||
ErrCRCMismatch = errors.New("wal: crc mismatch")
|
||||
ErrSnapshotMismatch = errors.New("wal: snapshot mismatch")
|
||||
ErrSnapshotNotFound = errors.New("wal: snapshot not found")
|
||||
crcTable = crc32.MakeTable(crc32.Castagnoli)
|
||||
ErrMetadataConflict = errors.New("wal: conflicting metadata found")
|
||||
ErrFileNotFound = errors.New("wal: file not found")
|
||||
ErrCRCMismatch = errors.New("wal: crc mismatch")
|
||||
ErrSnapshotMismatch = errors.New("wal: snapshot mismatch")
|
||||
ErrSnapshotNotFound = errors.New("wal: snapshot not found")
|
||||
ErrSliceOutOfRange = errors.New("wal: slice bounds out of range")
|
||||
ErrMaxWALEntrySizeLimitExceeded = errors.New("wal: max entry size limit exceeded")
|
||||
ErrDecoderNotFound = errors.New("wal: decoder not found")
|
||||
crcTable = crc32.MakeTable(crc32.Castagnoli)
|
||||
)
|
||||
|
||||
// WAL is a logical representation of the stable storage.
|
||||
|
@ -90,7 +93,8 @@ type WAL struct {
|
|||
}
|
||||
|
||||
// Create creates a WAL ready for appending records. The given metadata is
|
||||
// recorded at the head of each WAL file, and can be retrieved with ReadAll.
|
||||
// recorded at the head of each WAL file, and can be retrieved with ReadAll
|
||||
// after the file is Open.
|
||||
func Create(dirpath string, metadata []byte) (*WAL, error) {
|
||||
if Exist(dirpath) {
|
||||
return nil, os.ErrExist
|
||||
|
@ -147,9 +151,13 @@ func Create(dirpath string, metadata []byte) (*WAL, error) {
|
|||
if perr != nil {
|
||||
return nil, perr
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
if perr = fileutil.Fsync(pdir); perr != nil {
|
||||
return nil, perr
|
||||
}
|
||||
syncDurations.Observe(time.Since(start).Seconds())
|
||||
|
||||
if perr = pdir.Close(); err != nil {
|
||||
return nil, perr
|
||||
}
|
||||
|
@ -223,44 +231,16 @@ func OpenForRead(dirpath string, snap walpb.Snapshot) (*WAL, error) {
|
|||
}
|
||||
|
||||
func openAtIndex(dirpath string, snap walpb.Snapshot, write bool) (*WAL, error) {
|
||||
names, err := readWalNames(dirpath)
|
||||
names, nameIndex, err := selectWALFiles(dirpath, snap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nameIndex, ok := searchIndex(names, snap.Index)
|
||||
if !ok || !isValidSeq(names[nameIndex:]) {
|
||||
return nil, ErrFileNotFound
|
||||
rs, ls, closer, err := openWALFiles(dirpath, names, nameIndex, write)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// open the wal files
|
||||
rcs := make([]io.ReadCloser, 0)
|
||||
rs := make([]io.Reader, 0)
|
||||
ls := make([]*fileutil.LockedFile, 0)
|
||||
for _, name := range names[nameIndex:] {
|
||||
p := filepath.Join(dirpath, name)
|
||||
if write {
|
||||
l, err := fileutil.TryLockFile(p, os.O_RDWR, fileutil.PrivateFileMode)
|
||||
if err != nil {
|
||||
closeAll(rcs...)
|
||||
return nil, err
|
||||
}
|
||||
ls = append(ls, l)
|
||||
rcs = append(rcs, l)
|
||||
} else {
|
||||
rf, err := os.OpenFile(p, os.O_RDONLY, fileutil.PrivateFileMode)
|
||||
if err != nil {
|
||||
closeAll(rcs...)
|
||||
return nil, err
|
||||
}
|
||||
ls = append(ls, nil)
|
||||
rcs = append(rcs, rf)
|
||||
}
|
||||
rs = append(rs, rcs[len(rcs)-1])
|
||||
}
|
||||
|
||||
closer := func() error { return closeAll(rcs...) }
|
||||
|
||||
// create a WAL ready for reading
|
||||
w := &WAL{
|
||||
dir: dirpath,
|
||||
|
@ -284,6 +264,52 @@ func openAtIndex(dirpath string, snap walpb.Snapshot, write bool) (*WAL, error)
|
|||
return w, nil
|
||||
}
|
||||
|
||||
func selectWALFiles(dirpath string, snap walpb.Snapshot) ([]string, int, error) {
|
||||
names, err := readWalNames(dirpath)
|
||||
if err != nil {
|
||||
return nil, -1, err
|
||||
}
|
||||
|
||||
nameIndex, ok := searchIndex(names, snap.Index)
|
||||
if !ok || !isValidSeq(names[nameIndex:]) {
|
||||
err = ErrFileNotFound
|
||||
return nil, -1, err
|
||||
}
|
||||
|
||||
return names, nameIndex, nil
|
||||
}
|
||||
|
||||
func openWALFiles(dirpath string, names []string, nameIndex int, write bool) ([]io.Reader, []*fileutil.LockedFile, func() error, error) {
|
||||
rcs := make([]io.ReadCloser, 0)
|
||||
rs := make([]io.Reader, 0)
|
||||
ls := make([]*fileutil.LockedFile, 0)
|
||||
for _, name := range names[nameIndex:] {
|
||||
p := filepath.Join(dirpath, name)
|
||||
if write {
|
||||
l, err := fileutil.TryLockFile(p, os.O_RDWR, fileutil.PrivateFileMode)
|
||||
if err != nil {
|
||||
closeAll(rcs...)
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
ls = append(ls, l)
|
||||
rcs = append(rcs, l)
|
||||
} else {
|
||||
rf, err := os.OpenFile(p, os.O_RDONLY, fileutil.PrivateFileMode)
|
||||
if err != nil {
|
||||
closeAll(rcs...)
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
ls = append(ls, nil)
|
||||
rcs = append(rcs, rf)
|
||||
}
|
||||
rs = append(rs, rcs[len(rcs)-1])
|
||||
}
|
||||
|
||||
closer := func() error { return closeAll(rcs...) }
|
||||
|
||||
return rs, ls, closer, nil
|
||||
}
|
||||
|
||||
// ReadAll reads out records of the current WAL.
|
||||
// If opened in write mode, it must read out all records until EOF. Or an error
|
||||
// will be returned.
|
||||
|
@ -299,6 +325,10 @@ func (w *WAL) ReadAll() (metadata []byte, state raftpb.HardState, ents []raftpb.
|
|||
defer w.mu.Unlock()
|
||||
|
||||
rec := &walpb.Record{}
|
||||
|
||||
if w.decoder == nil {
|
||||
return nil, state, nil, ErrDecoderNotFound
|
||||
}
|
||||
decoder := w.decoder
|
||||
|
||||
var match bool
|
||||
|
@ -306,8 +336,15 @@ func (w *WAL) ReadAll() (metadata []byte, state raftpb.HardState, ents []raftpb.
|
|||
switch rec.Type {
|
||||
case entryType:
|
||||
e := mustUnmarshalEntry(rec.Data)
|
||||
// 0 <= e.Index-w.start.Index - 1 < len(ents)
|
||||
if e.Index > w.start.Index {
|
||||
ents = append(ents[:e.Index-w.start.Index-1], e)
|
||||
// prevent "panic: runtime error: slice bounds out of range [:13038096702221461992] with capacity 0"
|
||||
up := e.Index - w.start.Index - 1
|
||||
if up > uint64(len(ents)) {
|
||||
// return error before append call causes runtime panic
|
||||
return nil, state, nil, ErrSliceOutOfRange
|
||||
}
|
||||
ents = append(ents[:up], e)
|
||||
}
|
||||
w.enti = e.Index
|
||||
case stateType:
|
||||
|
@ -398,6 +435,150 @@ func (w *WAL) ReadAll() (metadata []byte, state raftpb.HardState, ents []raftpb.
|
|||
return metadata, state, ents, err
|
||||
}
|
||||
|
||||
// ValidSnapshotEntries returns all the valid snapshot entries in the wal logs in the given directory.
|
||||
// Snapshot entries are valid if their index is less than or equal to the most recent committed hardstate.
|
||||
func ValidSnapshotEntries(walDir string) ([]walpb.Snapshot, error) {
|
||||
var snaps []walpb.Snapshot
|
||||
var state raftpb.HardState
|
||||
var err error
|
||||
|
||||
rec := &walpb.Record{}
|
||||
names, err := readWalNames(walDir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// open wal files in read mode, so that there is no conflict
|
||||
// when the same WAL is opened elsewhere in write mode
|
||||
rs, _, closer, err := openWALFiles(walDir, names, 0, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if closer != nil {
|
||||
closer()
|
||||
}
|
||||
}()
|
||||
|
||||
// create a new decoder from the readers on the WAL files
|
||||
decoder := newDecoder(rs...)
|
||||
|
||||
for err = decoder.decode(rec); err == nil; err = decoder.decode(rec) {
|
||||
switch rec.Type {
|
||||
case snapshotType:
|
||||
var loadedSnap walpb.Snapshot
|
||||
pbutil.MustUnmarshal(&loadedSnap, rec.Data)
|
||||
snaps = append(snaps, loadedSnap)
|
||||
case stateType:
|
||||
state = mustUnmarshalState(rec.Data)
|
||||
case crcType:
|
||||
crc := decoder.crc.Sum32()
|
||||
// current crc of decoder must match the crc of the record.
|
||||
// do no need to match 0 crc, since the decoder is a new one at this case.
|
||||
if crc != 0 && rec.Validate(crc) != nil {
|
||||
return nil, ErrCRCMismatch
|
||||
}
|
||||
decoder.updateCRC(rec.Crc)
|
||||
}
|
||||
}
|
||||
// We do not have to read out all the WAL entries
|
||||
// as the decoder is opened in read mode.
|
||||
if err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// filter out any snaps that are newer than the committed hardstate
|
||||
n := 0
|
||||
for _, s := range snaps {
|
||||
if s.Index <= state.Commit {
|
||||
snaps[n] = s
|
||||
n++
|
||||
}
|
||||
}
|
||||
snaps = snaps[:n:n]
|
||||
|
||||
return snaps, nil
|
||||
}
|
||||
|
||||
// Verify reads through the given WAL and verifies that it is not corrupted.
|
||||
// It creates a new decoder to read through the records of the given WAL.
|
||||
// It does not conflict with any open WAL, but it is recommended not to
|
||||
// call this function after opening the WAL for writing.
|
||||
// If it cannot read out the expected snap, it will return ErrSnapshotNotFound.
|
||||
// If the loaded snap doesn't match with the expected one, it will
|
||||
// return error ErrSnapshotMismatch.
|
||||
func Verify(walDir string, snap walpb.Snapshot) error {
|
||||
var metadata []byte
|
||||
var err error
|
||||
var match bool
|
||||
|
||||
rec := &walpb.Record{}
|
||||
|
||||
names, nameIndex, err := selectWALFiles(walDir, snap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// open wal files in read mode, so that there is no conflict
|
||||
// when the same WAL is opened elsewhere in write mode
|
||||
rs, _, closer, err := openWALFiles(walDir, names, nameIndex, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// create a new decoder from the readers on the WAL files
|
||||
decoder := newDecoder(rs...)
|
||||
|
||||
for err = decoder.decode(rec); err == nil; err = decoder.decode(rec) {
|
||||
switch rec.Type {
|
||||
case metadataType:
|
||||
if metadata != nil && !bytes.Equal(metadata, rec.Data) {
|
||||
return ErrMetadataConflict
|
||||
}
|
||||
metadata = rec.Data
|
||||
case crcType:
|
||||
crc := decoder.crc.Sum32()
|
||||
// Current crc of decoder must match the crc of the record.
|
||||
// We need not match 0 crc, since the decoder is a new one at this point.
|
||||
if crc != 0 && rec.Validate(crc) != nil {
|
||||
return ErrCRCMismatch
|
||||
}
|
||||
decoder.updateCRC(rec.Crc)
|
||||
case snapshotType:
|
||||
var loadedSnap walpb.Snapshot
|
||||
pbutil.MustUnmarshal(&loadedSnap, rec.Data)
|
||||
if loadedSnap.Index == snap.Index {
|
||||
if loadedSnap.Term != snap.Term {
|
||||
return ErrSnapshotMismatch
|
||||
}
|
||||
match = true
|
||||
}
|
||||
// We ignore all entry and state type records as these
|
||||
// are not necessary for validating the WAL contents
|
||||
case entryType:
|
||||
case stateType:
|
||||
default:
|
||||
return fmt.Errorf("unexpected block type %d", rec.Type)
|
||||
}
|
||||
}
|
||||
|
||||
if closer != nil {
|
||||
closer()
|
||||
}
|
||||
|
||||
// We do not have to read out all the WAL entries
|
||||
// as the decoder is opened in read mode.
|
||||
if err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return err
|
||||
}
|
||||
|
||||
if !match {
|
||||
return ErrSnapshotNotFound
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// cut closes current file written and creates a new one ready to append.
|
||||
// cut first creates a temp wal file and writes necessary headers into it.
|
||||
// Then cut atomically rename temp wal file to a wal file.
|
||||
|
@ -451,9 +632,12 @@ func (w *WAL) cut() error {
|
|||
if err = os.Rename(newTail.Name(), fpath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
if err = fileutil.Fsync(w.dirFile); err != nil {
|
||||
return err
|
||||
}
|
||||
syncDurations.Observe(time.Since(start).Seconds())
|
||||
|
||||
// reopen newTail with its new path so calls to Name() match the wal filename format
|
||||
newTail.Close()
|
||||
|
@ -495,6 +679,10 @@ func (w *WAL) sync() error {
|
|||
return err
|
||||
}
|
||||
|
||||
func (w *WAL) Sync() error {
|
||||
return w.sync()
|
||||
}
|
||||
|
||||
// ReleaseLockTo releases the locks, which has smaller index than the given index
|
||||
// except the largest one among them.
|
||||
// For example, if WAL is holding lock 1,2,3,4,5,6, ReleaseLockTo(4) will release
|
||||
|
|
262
vendor/github.com/coreos/etcd/wal/walpb/record.pb.go
generated
vendored
262
vendor/github.com/coreos/etcd/wal/walpb/record.pb.go
generated
vendored
|
@ -1,28 +1,16 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: record.proto
|
||||
|
||||
/*
|
||||
Package walpb is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
record.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Record
|
||||
Snapshot
|
||||
*/
|
||||
package walpb
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
|
||||
fmt "fmt"
|
||||
io "io"
|
||||
math "math"
|
||||
math_bits "math/bits"
|
||||
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
|
||||
io "io"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
@ -37,36 +25,115 @@ var _ = math.Inf
|
|||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
type Record struct {
|
||||
Type int64 `protobuf:"varint,1,opt,name=type" json:"type"`
|
||||
Crc uint32 `protobuf:"varint,2,opt,name=crc" json:"crc"`
|
||||
Data []byte `protobuf:"bytes,3,opt,name=data" json:"data,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Type int64 `protobuf:"varint,1,opt,name=type" json:"type"`
|
||||
Crc uint32 `protobuf:"varint,2,opt,name=crc" json:"crc"`
|
||||
Data []byte `protobuf:"bytes,3,opt,name=data" json:"data,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Record) Reset() { *m = Record{} }
|
||||
func (m *Record) String() string { return proto.CompactTextString(m) }
|
||||
func (*Record) ProtoMessage() {}
|
||||
func (*Record) Descriptor() ([]byte, []int) { return fileDescriptorRecord, []int{0} }
|
||||
func (m *Record) Reset() { *m = Record{} }
|
||||
func (m *Record) String() string { return proto.CompactTextString(m) }
|
||||
func (*Record) ProtoMessage() {}
|
||||
func (*Record) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_bf94fd919e302a1d, []int{0}
|
||||
}
|
||||
func (m *Record) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *Record) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_Record.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *Record) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Record.Merge(m, src)
|
||||
}
|
||||
func (m *Record) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *Record) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Record.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Record proto.InternalMessageInfo
|
||||
|
||||
type Snapshot struct {
|
||||
Index uint64 `protobuf:"varint,1,opt,name=index" json:"index"`
|
||||
Term uint64 `protobuf:"varint,2,opt,name=term" json:"term"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Index uint64 `protobuf:"varint,1,opt,name=index" json:"index"`
|
||||
Term uint64 `protobuf:"varint,2,opt,name=term" json:"term"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Snapshot) Reset() { *m = Snapshot{} }
|
||||
func (m *Snapshot) String() string { return proto.CompactTextString(m) }
|
||||
func (*Snapshot) ProtoMessage() {}
|
||||
func (*Snapshot) Descriptor() ([]byte, []int) { return fileDescriptorRecord, []int{1} }
|
||||
func (m *Snapshot) Reset() { *m = Snapshot{} }
|
||||
func (m *Snapshot) String() string { return proto.CompactTextString(m) }
|
||||
func (*Snapshot) ProtoMessage() {}
|
||||
func (*Snapshot) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_bf94fd919e302a1d, []int{1}
|
||||
}
|
||||
func (m *Snapshot) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *Snapshot) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Snapshot.Merge(m, src)
|
||||
}
|
||||
func (m *Snapshot) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *Snapshot) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Snapshot.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Snapshot proto.InternalMessageInfo
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Record)(nil), "walpb.Record")
|
||||
proto.RegisterType((*Snapshot)(nil), "walpb.Snapshot")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("record.proto", fileDescriptor_bf94fd919e302a1d) }
|
||||
|
||||
var fileDescriptor_bf94fd919e302a1d = []byte{
|
||||
// 186 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0x4a, 0x4d, 0xce,
|
||||
0x2f, 0x4a, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2d, 0x4f, 0xcc, 0x29, 0x48, 0x92,
|
||||
0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x8b, 0xe8, 0x83, 0x58, 0x10, 0x49, 0x25, 0x3f, 0x2e, 0xb6,
|
||||
0x20, 0xb0, 0x62, 0x21, 0x09, 0x2e, 0x96, 0x92, 0xca, 0x82, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d,
|
||||
0x66, 0x27, 0x96, 0x13, 0xf7, 0xe4, 0x19, 0x82, 0xc0, 0x22, 0x42, 0x62, 0x5c, 0xcc, 0xc9, 0x45,
|
||||
0xc9, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xbc, 0x50, 0x09, 0x90, 0x80, 0x90, 0x10, 0x17, 0x4b, 0x4a,
|
||||
0x62, 0x49, 0xa2, 0x04, 0xb3, 0x02, 0xa3, 0x06, 0x4f, 0x10, 0x98, 0xad, 0xe4, 0xc0, 0xc5, 0x11,
|
||||
0x9c, 0x97, 0x58, 0x50, 0x9c, 0x91, 0x5f, 0x22, 0x24, 0xc5, 0xc5, 0x9a, 0x99, 0x97, 0x92, 0x5a,
|
||||
0x01, 0x36, 0x92, 0x05, 0xaa, 0x13, 0x22, 0x04, 0xb6, 0x2d, 0xb5, 0x28, 0x17, 0x6c, 0x28, 0x0b,
|
||||
0xdc, 0xb6, 0xd4, 0xa2, 0x5c, 0x27, 0x91, 0x13, 0x0f, 0xe5, 0x18, 0x4e, 0x3c, 0x92, 0x63, 0xbc,
|
||||
0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x19, 0x8f, 0xe5, 0x18, 0x00, 0x01, 0x00, 0x00,
|
||||
0xff, 0xff, 0x7f, 0x5e, 0x5c, 0x46, 0xd3, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *Record) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalTo(dAtA)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -74,32 +141,39 @@ func (m *Record) Marshal() (dAtA []byte, err error) {
|
|||
}
|
||||
|
||||
func (m *Record) MarshalTo(dAtA []byte) (int, error) {
|
||||
var i int
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *Record) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
dAtA[i] = 0x8
|
||||
i++
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Type))
|
||||
dAtA[i] = 0x10
|
||||
i++
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Crc))
|
||||
if m.Data != nil {
|
||||
dAtA[i] = 0x1a
|
||||
i++
|
||||
i = encodeVarintRecord(dAtA, i, uint64(len(m.Data)))
|
||||
i += copy(dAtA[i:], m.Data)
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
i += copy(dAtA[i:], m.XXX_unrecognized)
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
return i, nil
|
||||
if m.Data != nil {
|
||||
i -= len(m.Data)
|
||||
copy(dAtA[i:], m.Data)
|
||||
i = encodeVarintRecord(dAtA, i, uint64(len(m.Data)))
|
||||
i--
|
||||
dAtA[i] = 0x1a
|
||||
}
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Crc))
|
||||
i--
|
||||
dAtA[i] = 0x10
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Type))
|
||||
i--
|
||||
dAtA[i] = 0x8
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func (m *Snapshot) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalTo(dAtA)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -107,32 +181,43 @@ func (m *Snapshot) Marshal() (dAtA []byte, err error) {
|
|||
}
|
||||
|
||||
func (m *Snapshot) MarshalTo(dAtA []byte) (int, error) {
|
||||
var i int
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *Snapshot) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
dAtA[i] = 0x8
|
||||
i++
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Index))
|
||||
dAtA[i] = 0x10
|
||||
i++
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Term))
|
||||
if m.XXX_unrecognized != nil {
|
||||
i += copy(dAtA[i:], m.XXX_unrecognized)
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
return i, nil
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Term))
|
||||
i--
|
||||
dAtA[i] = 0x10
|
||||
i = encodeVarintRecord(dAtA, i, uint64(m.Index))
|
||||
i--
|
||||
dAtA[i] = 0x8
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func encodeVarintRecord(dAtA []byte, offset int, v uint64) int {
|
||||
offset -= sovRecord(v)
|
||||
base := offset
|
||||
for v >= 1<<7 {
|
||||
dAtA[offset] = uint8(v&0x7f | 0x80)
|
||||
v >>= 7
|
||||
offset++
|
||||
}
|
||||
dAtA[offset] = uint8(v)
|
||||
return offset + 1
|
||||
return base
|
||||
}
|
||||
func (m *Record) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
n += 1 + sovRecord(uint64(m.Type))
|
||||
|
@ -148,6 +233,9 @@ func (m *Record) Size() (n int) {
|
|||
}
|
||||
|
||||
func (m *Snapshot) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
n += 1 + sovRecord(uint64(m.Index))
|
||||
|
@ -159,14 +247,7 @@ func (m *Snapshot) Size() (n int) {
|
|||
}
|
||||
|
||||
func sovRecord(x uint64) (n int) {
|
||||
for {
|
||||
n++
|
||||
x >>= 7
|
||||
if x == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return n
|
||||
return (math_bits.Len64(x|1) + 6) / 7
|
||||
}
|
||||
func sozRecord(x uint64) (n int) {
|
||||
return sovRecord(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||
|
@ -186,7 +267,7 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -214,7 +295,7 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.Type |= (int64(b) & 0x7F) << shift
|
||||
m.Type |= int64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -233,7 +314,7 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.Crc |= (uint32(b) & 0x7F) << shift
|
||||
m.Crc |= uint32(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -252,7 +333,7 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
byteLen |= (int(b) & 0x7F) << shift
|
||||
byteLen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -261,6 +342,9 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
return ErrInvalidLengthRecord
|
||||
}
|
||||
postIndex := iNdEx + byteLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthRecord
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -278,6 +362,9 @@ func (m *Record) Unmarshal(dAtA []byte) error {
|
|||
if skippy < 0 {
|
||||
return ErrInvalidLengthRecord
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthRecord
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -306,7 +393,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -334,7 +421,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.Index |= (uint64(b) & 0x7F) << shift
|
||||
m.Index |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -353,7 +440,7 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.Term |= (uint64(b) & 0x7F) << shift
|
||||
m.Term |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -367,6 +454,9 @@ func (m *Snapshot) Unmarshal(dAtA []byte) error {
|
|||
if skippy < 0 {
|
||||
return ErrInvalidLengthRecord
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthRecord
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -434,10 +524,13 @@ func skipRecord(dAtA []byte) (n int, err error) {
|
|||
break
|
||||
}
|
||||
}
|
||||
iNdEx += length
|
||||
if length < 0 {
|
||||
return 0, ErrInvalidLengthRecord
|
||||
}
|
||||
iNdEx += length
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthRecord
|
||||
}
|
||||
return iNdEx, nil
|
||||
case 3:
|
||||
for {
|
||||
|
@ -466,6 +559,9 @@ func skipRecord(dAtA []byte) (n int, err error) {
|
|||
return 0, err
|
||||
}
|
||||
iNdEx = start + next
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthRecord
|
||||
}
|
||||
}
|
||||
return iNdEx, nil
|
||||
case 4:
|
||||
|
@ -484,21 +580,3 @@ var (
|
|||
ErrInvalidLengthRecord = fmt.Errorf("proto: negative length found during unmarshaling")
|
||||
ErrIntOverflowRecord = fmt.Errorf("proto: integer overflow")
|
||||
)
|
||||
|
||||
func init() { proto.RegisterFile("record.proto", fileDescriptorRecord) }
|
||||
|
||||
var fileDescriptorRecord = []byte{
|
||||
// 186 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0x4a, 0x4d, 0xce,
|
||||
0x2f, 0x4a, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2d, 0x4f, 0xcc, 0x29, 0x48, 0x92,
|
||||
0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x8b, 0xe8, 0x83, 0x58, 0x10, 0x49, 0x25, 0x3f, 0x2e, 0xb6,
|
||||
0x20, 0xb0, 0x62, 0x21, 0x09, 0x2e, 0x96, 0x92, 0xca, 0x82, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d,
|
||||
0x66, 0x27, 0x96, 0x13, 0xf7, 0xe4, 0x19, 0x82, 0xc0, 0x22, 0x42, 0x62, 0x5c, 0xcc, 0xc9, 0x45,
|
||||
0xc9, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xbc, 0x50, 0x09, 0x90, 0x80, 0x90, 0x10, 0x17, 0x4b, 0x4a,
|
||||
0x62, 0x49, 0xa2, 0x04, 0xb3, 0x02, 0xa3, 0x06, 0x4f, 0x10, 0x98, 0xad, 0xe4, 0xc0, 0xc5, 0x11,
|
||||
0x9c, 0x97, 0x58, 0x50, 0x9c, 0x91, 0x5f, 0x22, 0x24, 0xc5, 0xc5, 0x9a, 0x99, 0x97, 0x92, 0x5a,
|
||||
0x01, 0x36, 0x92, 0x05, 0xaa, 0x13, 0x22, 0x04, 0xb6, 0x2d, 0xb5, 0x28, 0x17, 0x6c, 0x28, 0x0b,
|
||||
0xdc, 0xb6, 0xd4, 0xa2, 0x5c, 0x27, 0x91, 0x13, 0x0f, 0xe5, 0x18, 0x4e, 0x3c, 0x92, 0x63, 0xbc,
|
||||
0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x19, 0x8f, 0xe5, 0x18, 0x00, 0x01, 0x00, 0x00,
|
||||
0xff, 0xff, 0x7f, 0x5e, 0x5c, 0x46, 0xd3, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
|
5
vendor/github.com/ugorji/go/LICENSE → vendor/github.com/json-iterator/go/LICENSE
generated
vendored
5
vendor/github.com/ugorji/go/LICENSE → vendor/github.com/json-iterator/go/LICENSE
generated
vendored
|
@ -1,7 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2012-2015 Ugorji Nwoke.
|
||||
All rights reserved.
|
||||
Copyright (c) 2016 json-iterator
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
87
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
87
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go)
|
||||
[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
|
||||
[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
|
||||
[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
|
||||
[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
|
||||
|
||||
A high-performance 100% compatible drop-in replacement of "encoding/json"
|
||||
|
||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||
|
||||
# Benchmark
|
||||
|
||||
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
|
||||
|
||||
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --------------- | ----------- | ---------------- | ---------------- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
Always benchmark with your own workload.
|
||||
The result depends heavily on the data input.
|
||||
|
||||
# Usage
|
||||
|
||||
100% compatibility with standard lib
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
|
||||
|
||||
# How to get
|
||||
|
||||
```
|
||||
go get github.com/json-iterator/go
|
||||
```
|
||||
|
||||
# Contribution Welcomed !
|
||||
|
||||
Contributors
|
||||
|
||||
- [thockin](https://github.com/thockin)
|
||||
- [mattn](https://github.com/mattn)
|
||||
- [cch123](https://github.com/cch123)
|
||||
- [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
- [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
|
150
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
150
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
// RawMessage to make replace json with jsoniter
|
||||
type RawMessage []byte
|
||||
|
||||
// Unmarshal adapts to json/encoding Unmarshal API
|
||||
//
|
||||
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
|
||||
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
|
||||
// Get quick method to get value from deeply nested JSON structure
|
||||
func Get(data []byte, path ...interface{}) Any {
|
||||
return ConfigDefault.Get(data, path...)
|
||||
}
|
||||
|
||||
// Marshal adapts to json/encoding Marshal API
|
||||
//
|
||||
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
|
||||
// Refer to https://godoc.org/encoding/json#Marshal for more information
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
return ConfigDefault.Marshal(v)
|
||||
}
|
||||
|
||||
// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
|
||||
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
return ConfigDefault.MarshalIndent(v, prefix, indent)
|
||||
}
|
||||
|
||||
// MarshalToString convenient method to write as string instead of []byte
|
||||
func MarshalToString(v interface{}) (string, error) {
|
||||
return ConfigDefault.MarshalToString(v)
|
||||
}
|
||||
|
||||
// NewDecoder adapts to json/stream NewDecoder API.
|
||||
//
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// Instead of a json/encoding Decoder, an Decoder is returned
|
||||
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
|
||||
func NewDecoder(reader io.Reader) *Decoder {
|
||||
return ConfigDefault.NewDecoder(reader)
|
||||
}
|
||||
|
||||
// Decoder reads and decodes JSON values from an input stream.
|
||||
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
|
||||
type Decoder struct {
|
||||
iter *Iterator
|
||||
}
|
||||
|
||||
// Decode decode JSON into interface{}
|
||||
func (adapter *Decoder) Decode(obj interface{}) error {
|
||||
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
|
||||
if !adapter.iter.loadMore() {
|
||||
return io.EOF
|
||||
}
|
||||
}
|
||||
adapter.iter.ReadVal(obj)
|
||||
err := adapter.iter.Error
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return adapter.iter.Error
|
||||
}
|
||||
|
||||
// More is there more?
|
||||
func (adapter *Decoder) More() bool {
|
||||
iter := adapter.iter
|
||||
if iter.Error != nil {
|
||||
return false
|
||||
}
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
return false
|
||||
}
|
||||
iter.unreadByte()
|
||||
return c != ']' && c != '}'
|
||||
}
|
||||
|
||||
// Buffered remaining buffer
|
||||
func (adapter *Decoder) Buffered() io.Reader {
|
||||
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
|
||||
return bytes.NewReader(remaining)
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (adapter *Decoder) UseNumber() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.UseNumber = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (adapter *Decoder) DisallowUnknownFields() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.DisallowUnknownFields = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// NewEncoder same as json.NewEncoder
|
||||
func NewEncoder(writer io.Writer) *Encoder {
|
||||
return ConfigDefault.NewEncoder(writer)
|
||||
}
|
||||
|
||||
// Encoder same as json.Encoder
|
||||
type Encoder struct {
|
||||
stream *Stream
|
||||
}
|
||||
|
||||
// Encode encode interface{} as JSON to io.Writer
|
||||
func (adapter *Encoder) Encode(val interface{}) error {
|
||||
adapter.stream.WriteVal(val)
|
||||
adapter.stream.WriteRaw("\n")
|
||||
adapter.stream.Flush()
|
||||
return adapter.stream.Error
|
||||
}
|
||||
|
||||
// SetIndent set the indention. Prefix is not supported
|
||||
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.IndentionStep = len(indent)
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// SetEscapeHTML escape html by default, set to false to disable
|
||||
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.EscapeHTML = escapeHTML
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
return ConfigDefault.Valid(data)
|
||||
}
|
325
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
325
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
|
@ -0,0 +1,325 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Any generic object representation.
|
||||
// The lazy json implementation holds []byte and parse lazily.
|
||||
type Any interface {
|
||||
LastError() error
|
||||
ValueType() ValueType
|
||||
MustBeValid() Any
|
||||
ToBool() bool
|
||||
ToInt() int
|
||||
ToInt32() int32
|
||||
ToInt64() int64
|
||||
ToUint() uint
|
||||
ToUint32() uint32
|
||||
ToUint64() uint64
|
||||
ToFloat32() float32
|
||||
ToFloat64() float64
|
||||
ToString() string
|
||||
ToVal(val interface{})
|
||||
Get(path ...interface{}) Any
|
||||
Size() int
|
||||
Keys() []string
|
||||
GetInterface() interface{}
|
||||
WriteTo(stream *Stream)
|
||||
}
|
||||
|
||||
type baseAny struct{}
|
||||
|
||||
func (any *baseAny) Get(path ...interface{}) Any {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *baseAny) Size() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *baseAny) Keys() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (any *baseAny) ToVal(obj interface{}) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// WrapInt32 turn int32 into Any interface
|
||||
func WrapInt32(val int32) Any {
|
||||
return &int32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapInt64 turn int64 into Any interface
|
||||
func WrapInt64(val int64) Any {
|
||||
return &int64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint32 turn uint32 into Any interface
|
||||
func WrapUint32(val uint32) Any {
|
||||
return &uint32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint64 turn uint64 into Any interface
|
||||
func WrapUint64(val uint64) Any {
|
||||
return &uint64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapFloat64 turn float64 into Any interface
|
||||
func WrapFloat64(val float64) Any {
|
||||
return &floatAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapString turn string into Any interface
|
||||
func WrapString(val string) Any {
|
||||
return &stringAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// Wrap turn a go object into Any interface
|
||||
func Wrap(val interface{}) Any {
|
||||
if val == nil {
|
||||
return &nilAny{}
|
||||
}
|
||||
asAny, isAny := val.(Any)
|
||||
if isAny {
|
||||
return asAny
|
||||
}
|
||||
typ := reflect2.TypeOf(val)
|
||||
switch typ.Kind() {
|
||||
case reflect.Slice:
|
||||
return wrapArray(val)
|
||||
case reflect.Struct:
|
||||
return wrapStruct(val)
|
||||
case reflect.Map:
|
||||
return wrapMap(val)
|
||||
case reflect.String:
|
||||
return WrapString(val.(string))
|
||||
case reflect.Int:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapInt32(int32(val.(int)))
|
||||
}
|
||||
return WrapInt64(int64(val.(int)))
|
||||
case reflect.Int8:
|
||||
return WrapInt32(int32(val.(int8)))
|
||||
case reflect.Int16:
|
||||
return WrapInt32(int32(val.(int16)))
|
||||
case reflect.Int32:
|
||||
return WrapInt32(val.(int32))
|
||||
case reflect.Int64:
|
||||
return WrapInt64(val.(int64))
|
||||
case reflect.Uint:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapUint32(uint32(val.(uint)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uint)))
|
||||
case reflect.Uintptr:
|
||||
if ptrSize == 32 {
|
||||
return WrapUint32(uint32(val.(uintptr)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uintptr)))
|
||||
case reflect.Uint8:
|
||||
return WrapUint32(uint32(val.(uint8)))
|
||||
case reflect.Uint16:
|
||||
return WrapUint32(uint32(val.(uint16)))
|
||||
case reflect.Uint32:
|
||||
return WrapUint32(uint32(val.(uint32)))
|
||||
case reflect.Uint64:
|
||||
return WrapUint64(val.(uint64))
|
||||
case reflect.Float32:
|
||||
return WrapFloat64(float64(val.(float32)))
|
||||
case reflect.Float64:
|
||||
return WrapFloat64(val.(float64))
|
||||
case reflect.Bool:
|
||||
if val.(bool) == true {
|
||||
return &trueAny{}
|
||||
}
|
||||
return &falseAny{}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
|
||||
}
|
||||
|
||||
// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
|
||||
func (iter *Iterator) ReadAny() Any {
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readAny() Any {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case '"':
|
||||
iter.unreadByte()
|
||||
return &stringAny{baseAny{}, iter.ReadString()}
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
return &nilAny{}
|
||||
case 't':
|
||||
iter.skipThreeBytes('r', 'u', 'e') // true
|
||||
return &trueAny{}
|
||||
case 'f':
|
||||
iter.skipFourBytes('a', 'l', 's', 'e') // false
|
||||
return &falseAny{}
|
||||
case '{':
|
||||
return iter.readObjectAny()
|
||||
case '[':
|
||||
return iter.readArrayAny()
|
||||
case '-':
|
||||
return iter.readNumberAny(false)
|
||||
case 0:
|
||||
return &invalidAny{baseAny{}, errors.New("input is empty")}
|
||||
default:
|
||||
return iter.readNumberAny(true)
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readNumberAny(positive bool) Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipNumber()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipObject()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readArrayAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipArray()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func locateObjectField(iter *Iterator, target string) []byte {
|
||||
var found []byte
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
if field == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locateArrayElement(iter *Iterator, target int) []byte {
|
||||
var found []byte
|
||||
n := 0
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
if n == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
n++
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locatePath(iter *Iterator, path []interface{}) Any {
|
||||
for i, pathKeyObj := range path {
|
||||
switch pathKey := pathKeyObj.(type) {
|
||||
case string:
|
||||
valueBytes := locateObjectField(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int:
|
||||
valueBytes := locateArrayElement(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int32:
|
||||
if '*' == pathKey {
|
||||
return iter.readAny().Get(path[i:]...)
|
||||
}
|
||||
return newInvalidAny(path[i:])
|
||||
default:
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return &invalidAny{baseAny{}, iter.Error}
|
||||
}
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
|
||||
|
||||
func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type anyCodec struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
return any.Size() == 0
|
||||
}
|
||||
|
||||
type directAnyCodec struct {
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*(*Any)(ptr) = iter.readAny()
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
any := *(*Any)(ptr)
|
||||
if any == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
any := *(*Any)(ptr)
|
||||
return any.Size() == 0
|
||||
}
|
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
|
@ -0,0 +1,278 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type arrayLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToBool() bool {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.ReadArray()
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt() int {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt32() int32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt64() int64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint() uint {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint32() uint32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint64() uint64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat32() float32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat64() float64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToVal(val interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(val)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateArrayElement(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
arr := make([]Any, 0)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
found := iter.readAny().Get(path[1:]...)
|
||||
if found.ValueType() != InvalidValue {
|
||||
arr = append(arr, found)
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapArray(arr)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
size++
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type arrayAny struct {
|
||||
baseAny
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapArray(val interface{}) *arrayAny {
|
||||
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *arrayAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToBool() bool {
|
||||
return any.val.Len() != 0
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt() int {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt32() int32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt64() int64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint() uint {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint32() uint32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint64() uint64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat32() float32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat64() float64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToString() string {
|
||||
str, _ := MarshalToString(any.val.Interface())
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *arrayAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
if firstPath < 0 || firstPath >= any.val.Len() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(any.val.Index(firstPath).Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := make([]Any, 0)
|
||||
for i := 0; i < any.val.Len(); i++ {
|
||||
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll = append(mappedAll, mapped)
|
||||
}
|
||||
}
|
||||
return wrapArray(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *arrayAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *arrayAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
|||
package jsoniter
|
||||
|
||||
type trueAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *trueAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt32() int32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt64() int64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint() uint {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint32() uint32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint64() uint64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat32() float32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat64() float64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToString() string {
|
||||
return "true"
|
||||
}
|
||||
|
||||
func (any *trueAny) WriteTo(stream *Stream) {
|
||||
stream.WriteTrue()
|
||||
}
|
||||
|
||||
func (any *trueAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) GetInterface() interface{} {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *trueAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
type falseAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *falseAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToString() string {
|
||||
return "false"
|
||||
}
|
||||
|
||||
func (any *falseAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFalse()
|
||||
}
|
||||
|
||||
func (any *falseAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) GetInterface() interface{} {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *falseAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type floatAny struct {
|
||||
baseAny
|
||||
val float64
|
||||
}
|
||||
|
||||
func (any *floatAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *floatAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *floatAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint() uint {
|
||||
if any.val > 0 {
|
||||
return uint(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint32() uint32 {
|
||||
if any.val > 0 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint64() uint64 {
|
||||
if any.val > 0 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat64() float64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *floatAny) ToString() string {
|
||||
return strconv.FormatFloat(any.val, 'E', -1, 64)
|
||||
}
|
||||
|
||||
func (any *floatAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFloat64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int32Any struct {
|
||||
baseAny
|
||||
val int32
|
||||
}
|
||||
|
||||
func (any *int32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt32() int32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *int32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int64Any struct {
|
||||
baseAny
|
||||
val int64
|
||||
}
|
||||
|
||||
func (any *int64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt64() int64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToString() string {
|
||||
return strconv.FormatInt(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *int64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
|
||||
type invalidAny struct {
|
||||
baseAny
|
||||
err error
|
||||
}
|
||||
|
||||
func newInvalidAny(path []interface{}) *invalidAny {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *invalidAny) ValueType() ValueType {
|
||||
return InvalidValue
|
||||
}
|
||||
|
||||
func (any *invalidAny) MustBeValid() Any {
|
||||
panic(any.err)
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *invalidAny) WriteTo(stream *Stream) {
|
||||
}
|
||||
|
||||
func (any *invalidAny) Get(path ...interface{}) Any {
|
||||
if any.err == nil {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *invalidAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
package jsoniter
|
||||
|
||||
type nilAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *nilAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) ValueType() ValueType {
|
||||
return NilValue
|
||||
}
|
||||
|
||||
func (any *nilAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *nilAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *nilAny) WriteTo(stream *Stream) {
|
||||
stream.WriteNil()
|
||||
}
|
||||
|
||||
func (any *nilAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
|
@ -0,0 +1,123 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type numberLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt() int {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt32() int32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt64() int64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint() uint {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint32() uint32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint64() uint64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat32() float32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat64() float64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
|
@ -0,0 +1,374 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type objectLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToVal(obj interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(obj)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateObjectField(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
mapped := locatePath(iter, path[1:])
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[field] = mapped
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Keys() []string {
|
||||
keys := []string{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
keys = append(keys, field)
|
||||
return true
|
||||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
size++
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type objectAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapStruct(val interface{}) *objectAny {
|
||||
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *objectAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *objectAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectAny) ToBool() bool {
|
||||
return any.val.NumField() != 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *objectAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
field := any.val.FieldByName(firstPath)
|
||||
if !field.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(field.Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
field := any.val.Field(i)
|
||||
if field.CanInterface() {
|
||||
mapped := Wrap(field.Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[any.val.Type().Field(i).Name] = mapped
|
||||
}
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.NumField())
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
keys = append(keys, any.val.Type().Field(i).Name)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectAny) Size() int {
|
||||
return any.val.NumField()
|
||||
}
|
||||
|
||||
func (any *objectAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *objectAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
||||
|
||||
type mapAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapMap(val interface{}) *mapAny {
|
||||
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *mapAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *mapAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *mapAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *mapAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *mapAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *mapAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keyAsStr := key.String()
|
||||
element := Wrap(any.val.MapIndex(key).Interface())
|
||||
mapped := element.Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[keyAsStr] = mapped
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
value := any.val.MapIndex(reflect.ValueOf(firstPath))
|
||||
if !value.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(value.Interface())
|
||||
}
|
||||
}
|
||||
|
||||
func (any *mapAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.Len())
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keys = append(keys, key.String())
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *mapAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *mapAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *mapAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
|
@ -0,0 +1,166 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type stringAny struct {
|
||||
baseAny
|
||||
val string
|
||||
}
|
||||
|
||||
func (any *stringAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *stringAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ValueType() ValueType {
|
||||
return StringValue
|
||||
}
|
||||
|
||||
func (any *stringAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *stringAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ToBool() bool {
|
||||
str := any.ToString()
|
||||
if str == "0" {
|
||||
return false
|
||||
}
|
||||
for _, c := range str {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt() int {
|
||||
return int(any.ToInt64())
|
||||
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt32() int32 {
|
||||
return int32(any.ToInt64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt64() int64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
if any.val[0] == '-' {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
|
||||
return int64(flag) * parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint() uint {
|
||||
return uint(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint32() uint32 {
|
||||
return uint32(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint64() uint64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
}
|
||||
if any.val[0] == '+' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat32() float32 {
|
||||
return float32(any.ToFloat64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat64() float64 {
|
||||
if len(any.val) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// first char invalid
|
||||
if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
|
||||
return 0
|
||||
}
|
||||
|
||||
// extract valid num expression from string
|
||||
// eg 123true => 123, -12.12xxa => -12.12
|
||||
endPos := 1
|
||||
for i := 1; i < len(any.val); i++ {
|
||||
if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
|
||||
endPos = i + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// end position is the first char which is not digit
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
endPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToString() string {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *stringAny) WriteTo(stream *Stream) {
|
||||
stream.WriteString(any.val)
|
||||
}
|
||||
|
||||
func (any *stringAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint32Any struct {
|
||||
baseAny
|
||||
val uint32
|
||||
}
|
||||
|
||||
func (any *uint32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint32() uint32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *uint32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint64Any struct {
|
||||
baseAny
|
||||
val uint64
|
||||
}
|
||||
|
||||
func (any *uint64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint64() uint64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToString() string {
|
||||
return strconv.FormatUint(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *uint64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
375
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
375
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
|
@ -0,0 +1,375 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/concurrent"
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// Config customize how the API should behave.
|
||||
// The API is created from Config by Froze.
|
||||
type Config struct {
|
||||
IndentionStep int
|
||||
MarshalFloatWith6Digits bool
|
||||
EscapeHTML bool
|
||||
SortMapKeys bool
|
||||
UseNumber bool
|
||||
DisallowUnknownFields bool
|
||||
TagKey string
|
||||
OnlyTaggedField bool
|
||||
ValidateJsonRawMessage bool
|
||||
ObjectFieldMustBeSimpleString bool
|
||||
CaseSensitive bool
|
||||
}
|
||||
|
||||
// API the public interface of this package.
|
||||
// Primary Marshal and Unmarshal.
|
||||
type API interface {
|
||||
IteratorPool
|
||||
StreamPool
|
||||
MarshalToString(v interface{}) (string, error)
|
||||
Marshal(v interface{}) ([]byte, error)
|
||||
MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
|
||||
UnmarshalFromString(str string, v interface{}) error
|
||||
Unmarshal(data []byte, v interface{}) error
|
||||
Get(data []byte, path ...interface{}) Any
|
||||
NewEncoder(writer io.Writer) *Encoder
|
||||
NewDecoder(reader io.Reader) *Decoder
|
||||
Valid(data []byte) bool
|
||||
RegisterExtension(extension Extension)
|
||||
DecoderOf(typ reflect2.Type) ValDecoder
|
||||
EncoderOf(typ reflect2.Type) ValEncoder
|
||||
}
|
||||
|
||||
// ConfigDefault the default API
|
||||
var ConfigDefault = Config{
|
||||
EscapeHTML: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
|
||||
var ConfigCompatibleWithStandardLibrary = Config{
|
||||
EscapeHTML: true,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigFastest marshals float with only 6 digits precision
|
||||
var ConfigFastest = Config{
|
||||
EscapeHTML: false,
|
||||
MarshalFloatWith6Digits: true, // will lose precession
|
||||
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
||||
}.Froze()
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
decoderCache *concurrent.Map
|
||||
encoderCache *concurrent.Map
|
||||
encoderExtension Extension
|
||||
decoderExtension Extension
|
||||
extraExtensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
caseSensitive bool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.decoderCache = concurrent.NewMap()
|
||||
cfg.encoderCache = concurrent.NewMap()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.decoderCache.Store(cacheKey, decoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.encoderCache.Store(cacheKey, encoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return decoder.(ValDecoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return encoder.(ValEncoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfgCache = concurrent.NewMap()
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
obj, found := cfgCache.Load(cfg)
|
||||
if found {
|
||||
return obj.(*frozenConfig)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCache.Store(cfg, frozenConfig)
|
||||
}
|
||||
|
||||
// Froze forge API from config
|
||||
func (cfg Config) Froze() API {
|
||||
api := &frozenConfig{
|
||||
sortMapKeys: cfg.SortMapKeys,
|
||||
indentionStep: cfg.IndentionStep,
|
||||
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
||||
onlyTaggedField: cfg.OnlyTaggedField,
|
||||
disallowUnknownFields: cfg.DisallowUnknownFields,
|
||||
caseSensitive: cfg.CaseSensitive,
|
||||
}
|
||||
api.streamPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewStream(api, nil, 512)
|
||||
},
|
||||
}
|
||||
api.iteratorPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewIterator(api)
|
||||
},
|
||||
}
|
||||
api.initCache()
|
||||
encoderExtension := EncoderExtension{}
|
||||
decoderExtension := DecoderExtension{}
|
||||
if cfg.MarshalFloatWith6Digits {
|
||||
api.marshalFloatWith6Digits(encoderExtension)
|
||||
}
|
||||
if cfg.EscapeHTML {
|
||||
api.escapeHTML(encoderExtension)
|
||||
}
|
||||
if cfg.UseNumber {
|
||||
api.useNumber(decoderExtension)
|
||||
}
|
||||
if cfg.ValidateJsonRawMessage {
|
||||
api.validateJsonRawMessage(encoderExtension)
|
||||
}
|
||||
api.encoderExtension = encoderExtension
|
||||
api.decoderExtension = decoderExtension
|
||||
api.configBeforeFrozen = cfg
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
|
||||
api := getFrozenConfigFromCache(cfg)
|
||||
if api != nil {
|
||||
return api
|
||||
}
|
||||
api = cfg.Froze().(*frozenConfig)
|
||||
for _, extension := range extraExtensions {
|
||||
api.RegisterExtension(extension)
|
||||
}
|
||||
addFrozenConfigToCache(cfg, api)
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Read()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.RawMessage)(ptr))) == 0
|
||||
}}
|
||||
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
||||
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
||||
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
||||
exitingValue := *((*interface{})(ptr))
|
||||
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
|
||||
iter.ReadVal(exitingValue)
|
||||
return
|
||||
}
|
||||
if iter.WhatIsNext() == NumberValue {
|
||||
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
||||
} else {
|
||||
*((*interface{})(ptr)) = iter.Read()
|
||||
}
|
||||
}}
|
||||
}
|
||||
func (cfg *frozenConfig) getTagKey() string {
|
||||
tagKey := cfg.configBeforeFrozen.TagKey
|
||||
if tagKey == "" {
|
||||
return "json"
|
||||
}
|
||||
return tagKey
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
||||
cfg.extraExtensions = append(cfg.extraExtensions, extension)
|
||||
copied := cfg.configBeforeFrozen
|
||||
cfg.configBeforeFrozen = copied
|
||||
}
|
||||
|
||||
type lossyFloat32Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat32Lossy(*((*float32)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type lossyFloat64Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat64Lossy(*((*float64)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float64)(ptr)) == 0
|
||||
}
|
||||
|
||||
// EnableLossyFloatMarshalling keeps 10**(-6) precision
|
||||
// for float variables for better performance.
|
||||
func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
|
||||
// for better performance
|
||||
extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
|
||||
extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
|
||||
}
|
||||
|
||||
type htmlEscapedStringEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
str := *((*string)(ptr))
|
||||
stream.WriteStringWithHTMLEscaped(str)
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
|
||||
encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanDecoders() {
|
||||
typeDecoders = map[string]ValDecoder{}
|
||||
fieldDecoders = map[string]ValDecoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanEncoders() {
|
||||
typeEncoders = map[string]ValEncoder{}
|
||||
fieldEncoders = map[string]ValEncoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return "", stream.Error
|
||||
}
|
||||
return string(stream.Buffer()), nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return nil, stream.Error
|
||||
}
|
||||
result := stream.Buffer()
|
||||
copied := make([]byte, len(result))
|
||||
copy(copied, result)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
if prefix != "" {
|
||||
panic("prefix is not supported")
|
||||
}
|
||||
for _, r := range indent {
|
||||
if r != ' ' {
|
||||
panic("indent can only be space")
|
||||
}
|
||||
}
|
||||
newCfg := cfg.configBeforeFrozen
|
||||
newCfg.IndentionStep = len(indent)
|
||||
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
||||
data := []byte(str)
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
return locatePath(iter, path)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
|
||||
stream := NewStream(cfg, writer, 512)
|
||||
return &Encoder{stream}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
|
||||
iter := Parse(cfg, reader, 512)
|
||||
return &Decoder{iter}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Valid(data []byte) bool {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Skip()
|
||||
return iter.Error == nil
|
||||
}
|
11
vendor/github.com/json-iterator/go/go.mod
generated
vendored
Normal file
11
vendor/github.com/json-iterator/go/go.mod
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
module github.com/json-iterator/go
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/google/gofuzz v1.0.0
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
|
||||
github.com/stretchr/testify v1.3.0
|
||||
)
|
349
vendor/github.com/json-iterator/go/iter.go
generated
vendored
Normal file
349
vendor/github.com/json-iterator/go/iter.go
generated
vendored
Normal file
|
@ -0,0 +1,349 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// ValueType the type for JSON element
|
||||
type ValueType int
|
||||
|
||||
const (
|
||||
// InvalidValue invalid JSON element
|
||||
InvalidValue ValueType = iota
|
||||
// StringValue JSON element "string"
|
||||
StringValue
|
||||
// NumberValue JSON element 100 or 0.10
|
||||
NumberValue
|
||||
// NilValue JSON element null
|
||||
NilValue
|
||||
// BoolValue JSON element true or false
|
||||
BoolValue
|
||||
// ArrayValue JSON element []
|
||||
ArrayValue
|
||||
// ObjectValue JSON element {}
|
||||
ObjectValue
|
||||
)
|
||||
|
||||
var hexDigits []byte
|
||||
var valueTypes []ValueType
|
||||
|
||||
func init() {
|
||||
hexDigits = make([]byte, 256)
|
||||
for i := 0; i < len(hexDigits); i++ {
|
||||
hexDigits[i] = 255
|
||||
}
|
||||
for i := '0'; i <= '9'; i++ {
|
||||
hexDigits[i] = byte(i - '0')
|
||||
}
|
||||
for i := 'a'; i <= 'f'; i++ {
|
||||
hexDigits[i] = byte((i - 'a') + 10)
|
||||
}
|
||||
for i := 'A'; i <= 'F'; i++ {
|
||||
hexDigits[i] = byte((i - 'A') + 10)
|
||||
}
|
||||
valueTypes = make([]ValueType, 256)
|
||||
for i := 0; i < len(valueTypes); i++ {
|
||||
valueTypes[i] = InvalidValue
|
||||
}
|
||||
valueTypes['"'] = StringValue
|
||||
valueTypes['-'] = NumberValue
|
||||
valueTypes['0'] = NumberValue
|
||||
valueTypes['1'] = NumberValue
|
||||
valueTypes['2'] = NumberValue
|
||||
valueTypes['3'] = NumberValue
|
||||
valueTypes['4'] = NumberValue
|
||||
valueTypes['5'] = NumberValue
|
||||
valueTypes['6'] = NumberValue
|
||||
valueTypes['7'] = NumberValue
|
||||
valueTypes['8'] = NumberValue
|
||||
valueTypes['9'] = NumberValue
|
||||
valueTypes['t'] = BoolValue
|
||||
valueTypes['f'] = BoolValue
|
||||
valueTypes['n'] = NilValue
|
||||
valueTypes['['] = ArrayValue
|
||||
valueTypes['{'] = ObjectValue
|
||||
}
|
||||
|
||||
// Iterator is a io.Reader like object, with JSON specific read functions.
|
||||
// Error is not returned as return value, but stored as Error member on this iterator instance.
|
||||
type Iterator struct {
|
||||
cfg *frozenConfig
|
||||
reader io.Reader
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
Attachment interface{} // open for customized decoder
|
||||
}
|
||||
|
||||
// NewIterator creates an empty Iterator instance
|
||||
func NewIterator(cfg API) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: nil,
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse creates an Iterator instance from io.Reader
|
||||
func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: reader,
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// ParseBytes creates an Iterator instance from byte array
|
||||
func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: nil,
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// ParseString creates an Iterator instance from string
|
||||
func ParseString(cfg API, input string) *Iterator {
|
||||
return ParseBytes(cfg, []byte(input))
|
||||
}
|
||||
|
||||
// Pool returns a pool can provide more iterator with same configuration
|
||||
func (iter *Iterator) Pool() IteratorPool {
|
||||
return iter.cfg
|
||||
}
|
||||
|
||||
// Reset reuse iterator instance by specifying another reader
|
||||
func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
// ResetBytes reuse iterator instance by specifying another byte array as input
|
||||
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.reader = nil
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
// WhatIsNext gets ValueType of relatively next json element
|
||||
func (iter *Iterator) WhatIsNext() ValueType {
|
||||
valueType := valueTypes[iter.nextToken()]
|
||||
iter.unreadByte()
|
||||
return valueType
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
continue
|
||||
}
|
||||
iter.head = i
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (iter *Iterator) isObjectEnd() bool {
|
||||
c := iter.nextToken()
|
||||
if c == ',' {
|
||||
return false
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
|
||||
return true
|
||||
}
|
||||
|
||||
func (iter *Iterator) nextToken() byte {
|
||||
// a variation of skip whitespaces, returning the next non-whitespace token
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
continue
|
||||
}
|
||||
iter.head = i + 1
|
||||
return c
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ReportError record a error in iterator instance with current position.
|
||||
func (iter *Iterator) ReportError(operation string, msg string) {
|
||||
if iter.Error != nil {
|
||||
if iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
}
|
||||
peekStart := iter.head - 10
|
||||
if peekStart < 0 {
|
||||
peekStart = 0
|
||||
}
|
||||
peekEnd := iter.head + 10
|
||||
if peekEnd > iter.tail {
|
||||
peekEnd = iter.tail
|
||||
}
|
||||
parsing := string(iter.buf[peekStart:peekEnd])
|
||||
contextStart := iter.head - 50
|
||||
if contextStart < 0 {
|
||||
contextStart = 0
|
||||
}
|
||||
contextEnd := iter.head + 50
|
||||
if contextEnd > iter.tail {
|
||||
contextEnd = iter.tail
|
||||
}
|
||||
context := string(iter.buf[contextStart:contextEnd])
|
||||
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
|
||||
operation, msg, iter.head-peekStart, parsing, context)
|
||||
}
|
||||
|
||||
// CurrentBuffer gets current buffer as string for debugging purpose
|
||||
func (iter *Iterator) CurrentBuffer() string {
|
||||
peekStart := iter.head - 10
|
||||
if peekStart < 0 {
|
||||
peekStart = 0
|
||||
}
|
||||
return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
|
||||
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
|
||||
}
|
||||
|
||||
func (iter *Iterator) readByte() (ret byte) {
|
||||
if iter.head == iter.tail {
|
||||
if iter.loadMore() {
|
||||
ret = iter.buf[iter.head]
|
||||
iter.head++
|
||||
return ret
|
||||
}
|
||||
return 0
|
||||
}
|
||||
ret = iter.buf[iter.head]
|
||||
iter.head++
|
||||
return ret
|
||||
}
|
||||
|
||||
func (iter *Iterator) loadMore() bool {
|
||||
if iter.reader == nil {
|
||||
if iter.Error == nil {
|
||||
iter.head = iter.tail
|
||||
iter.Error = io.EOF
|
||||
}
|
||||
return false
|
||||
}
|
||||
if iter.captured != nil {
|
||||
iter.captured = append(iter.captured,
|
||||
iter.buf[iter.captureStartedAt:iter.tail]...)
|
||||
iter.captureStartedAt = 0
|
||||
}
|
||||
for {
|
||||
n, err := iter.reader.Read(iter.buf)
|
||||
if n == 0 {
|
||||
if err != nil {
|
||||
if iter.Error == nil {
|
||||
iter.Error = err
|
||||
}
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
iter.head = 0
|
||||
iter.tail = n
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) unreadByte() {
|
||||
if iter.Error != nil {
|
||||
return
|
||||
}
|
||||
iter.head--
|
||||
return
|
||||
}
|
||||
|
||||
// Read read the next JSON element as generic interface{}.
|
||||
func (iter *Iterator) Read() interface{} {
|
||||
valueType := iter.WhatIsNext()
|
||||
switch valueType {
|
||||
case StringValue:
|
||||
return iter.ReadString()
|
||||
case NumberValue:
|
||||
if iter.cfg.configBeforeFrozen.UseNumber {
|
||||
return json.Number(iter.readNumberAsString())
|
||||
}
|
||||
return iter.ReadFloat64()
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
return nil
|
||||
case BoolValue:
|
||||
return iter.ReadBool()
|
||||
case ArrayValue:
|
||||
arr := []interface{}{}
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
var elem interface{}
|
||||
iter.ReadVal(&elem)
|
||||
arr = append(arr, elem)
|
||||
return true
|
||||
})
|
||||
return arr
|
||||
case ObjectValue:
|
||||
obj := map[string]interface{}{}
|
||||
iter.ReadMapCB(func(Iter *Iterator, field string) bool {
|
||||
var elem interface{}
|
||||
iter.ReadVal(&elem)
|
||||
obj[field] = elem
|
||||
return true
|
||||
})
|
||||
return obj
|
||||
default:
|
||||
iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
64
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
Normal file
64
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
package jsoniter
|
||||
|
||||
// ReadArray read array element, tells if the array has more element to read.
|
||||
func (iter *Iterator) ReadArray() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return false // null
|
||||
case '[':
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
case ']':
|
||||
return false
|
||||
case ',':
|
||||
return true
|
||||
default:
|
||||
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// ReadArrayCB read array with callback
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
|
||||
return false
|
||||
}
|
339
vendor/github.com/json-iterator/go/iter_float.go
generated
vendored
Normal file
339
vendor/github.com/json-iterator/go/iter_float.go
generated
vendored
Normal file
|
@ -0,0 +1,339 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"math/big"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var floatDigits []int8
|
||||
|
||||
const invalidCharForNumber = int8(-1)
|
||||
const endOfNumber = int8(-2)
|
||||
const dotInNumber = int8(-3)
|
||||
|
||||
func init() {
|
||||
floatDigits = make([]int8, 256)
|
||||
for i := 0; i < len(floatDigits); i++ {
|
||||
floatDigits[i] = invalidCharForNumber
|
||||
}
|
||||
for i := int8('0'); i <= int8('9'); i++ {
|
||||
floatDigits[i] = i - int8('0')
|
||||
}
|
||||
floatDigits[','] = endOfNumber
|
||||
floatDigits[']'] = endOfNumber
|
||||
floatDigits['}'] = endOfNumber
|
||||
floatDigits[' '] = endOfNumber
|
||||
floatDigits['\t'] = endOfNumber
|
||||
floatDigits['\n'] = endOfNumber
|
||||
floatDigits['.'] = dotInNumber
|
||||
}
|
||||
|
||||
// ReadBigFloat read big.Float
|
||||
func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return nil
|
||||
}
|
||||
prec := 64
|
||||
if len(str) > prec {
|
||||
prec = len(str)
|
||||
}
|
||||
val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return nil
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
// ReadBigInt read big.Int
|
||||
func (iter *Iterator) ReadBigInt() (ret *big.Int) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return nil
|
||||
}
|
||||
ret = big.NewInt(0)
|
||||
var success bool
|
||||
ret, success = ret.SetString(str, 10)
|
||||
if !success {
|
||||
iter.ReportError("ReadBigInt", "invalid big int")
|
||||
return nil
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
//ReadFloat32 read float32
|
||||
func (iter *Iterator) ReadFloat32() (ret float32) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
return -iter.readPositiveFloat32()
|
||||
}
|
||||
iter.unreadByte()
|
||||
return iter.readPositiveFloat32()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
case endOfNumber:
|
||||
iter.ReportError("readFloat32", "empty number")
|
||||
return
|
||||
case dotInNumber:
|
||||
iter.ReportError("readFloat32", "leading dot is invalid")
|
||||
return
|
||||
case 0:
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.ReportError("readFloat32", "leading zero is invalid")
|
||||
return
|
||||
}
|
||||
}
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
case endOfNumber:
|
||||
iter.head = i
|
||||
return float32(value)
|
||||
case dotInNumber:
|
||||
break non_decimal_loop
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
|
||||
}
|
||||
// chars after dot
|
||||
if c == '.' {
|
||||
i++
|
||||
decimalPlaces := 0
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case endOfNumber:
|
||||
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
|
||||
iter.head = i
|
||||
return float32(float64(value) / float64(pow10[decimalPlaces]))
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat32SlowPath()
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
}
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readNumberAsString() (ret string) {
|
||||
strBuf := [16]byte{}
|
||||
str := strBuf[0:0]
|
||||
load_loop:
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
str = append(str, c)
|
||||
continue
|
||||
default:
|
||||
iter.head = i
|
||||
break load_loop
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
if len(str) == 0 {
|
||||
iter.ReportError("readNumberAsString", "invalid number")
|
||||
}
|
||||
return *(*string)(unsafe.Pointer(&str))
|
||||
}
|
||||
|
||||
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
errMsg := validateFloat(str)
|
||||
if errMsg != "" {
|
||||
iter.ReportError("readFloat32SlowPath", errMsg)
|
||||
return
|
||||
}
|
||||
val, err := strconv.ParseFloat(str, 32)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return
|
||||
}
|
||||
return float32(val)
|
||||
}
|
||||
|
||||
// ReadFloat64 read float64
|
||||
func (iter *Iterator) ReadFloat64() (ret float64) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
return -iter.readPositiveFloat64()
|
||||
}
|
||||
iter.unreadByte()
|
||||
return iter.readPositiveFloat64()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
case endOfNumber:
|
||||
iter.ReportError("readFloat64", "empty number")
|
||||
return
|
||||
case dotInNumber:
|
||||
iter.ReportError("readFloat64", "leading dot is invalid")
|
||||
return
|
||||
case 0:
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.ReportError("readFloat64", "leading zero is invalid")
|
||||
return
|
||||
}
|
||||
}
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
case endOfNumber:
|
||||
iter.head = i
|
||||
return float64(value)
|
||||
case dotInNumber:
|
||||
break non_decimal_loop
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
|
||||
}
|
||||
// chars after dot
|
||||
if c == '.' {
|
||||
i++
|
||||
decimalPlaces := 0
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case endOfNumber:
|
||||
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
|
||||
iter.head = i
|
||||
return float64(value) / float64(pow10[decimalPlaces])
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat64SlowPath()
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
}
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readFloat64SlowPath() (ret float64) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
errMsg := validateFloat(str)
|
||||
if errMsg != "" {
|
||||
iter.ReportError("readFloat64SlowPath", errMsg)
|
||||
return
|
||||
}
|
||||
val, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func validateFloat(str string) string {
|
||||
// strconv.ParseFloat is not validating `1.` or `1.e1`
|
||||
if len(str) == 0 {
|
||||
return "empty number"
|
||||
}
|
||||
if str[0] == '-' {
|
||||
return "-- is not valid"
|
||||
}
|
||||
dotPos := strings.IndexByte(str, '.')
|
||||
if dotPos != -1 {
|
||||
if dotPos == len(str)-1 {
|
||||
return "dot can not be last character"
|
||||
}
|
||||
switch str[dotPos+1] {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
default:
|
||||
return "missing digit after dot"
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// ReadNumber read json.Number
|
||||
func (iter *Iterator) ReadNumber() (ret json.Number) {
|
||||
return json.Number(iter.readNumberAsString())
|
||||
}
|
345
vendor/github.com/json-iterator/go/iter_int.go
generated
vendored
Normal file
345
vendor/github.com/json-iterator/go/iter_int.go
generated
vendored
Normal file
|
@ -0,0 +1,345 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var intDigits []int8
|
||||
|
||||
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
|
||||
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
|
||||
|
||||
func init() {
|
||||
intDigits = make([]int8, 256)
|
||||
for i := 0; i < len(intDigits); i++ {
|
||||
intDigits[i] = invalidCharForNumber
|
||||
}
|
||||
for i := int8('0'); i <= int8('9'); i++ {
|
||||
intDigits[i] = i - int8('0')
|
||||
}
|
||||
}
|
||||
|
||||
// ReadUint read uint
|
||||
func (iter *Iterator) ReadUint() uint {
|
||||
if strconv.IntSize == 32 {
|
||||
return uint(iter.ReadUint32())
|
||||
}
|
||||
return uint(iter.ReadUint64())
|
||||
}
|
||||
|
||||
// ReadInt read int
|
||||
func (iter *Iterator) ReadInt() int {
|
||||
if strconv.IntSize == 32 {
|
||||
return int(iter.ReadInt32())
|
||||
}
|
||||
return int(iter.ReadInt64())
|
||||
}
|
||||
|
||||
// ReadInt8 read int8
|
||||
func (iter *Iterator) ReadInt8() (ret int8) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt8+1 {
|
||||
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int8(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt8 {
|
||||
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int8(val)
|
||||
}
|
||||
|
||||
// ReadUint8 read uint8
|
||||
func (iter *Iterator) ReadUint8() (ret uint8) {
|
||||
val := iter.readUint32(iter.nextToken())
|
||||
if val > math.MaxUint8 {
|
||||
iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return uint8(val)
|
||||
}
|
||||
|
||||
// ReadInt16 read int16
|
||||
func (iter *Iterator) ReadInt16() (ret int16) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt16+1 {
|
||||
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int16(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt16 {
|
||||
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int16(val)
|
||||
}
|
||||
|
||||
// ReadUint16 read uint16
|
||||
func (iter *Iterator) ReadUint16() (ret uint16) {
|
||||
val := iter.readUint32(iter.nextToken())
|
||||
if val > math.MaxUint16 {
|
||||
iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return uint16(val)
|
||||
}
|
||||
|
||||
// ReadInt32 read int32
|
||||
func (iter *Iterator) ReadInt32() (ret int32) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt32+1 {
|
||||
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int32(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt32 {
|
||||
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int32(val)
|
||||
}
|
||||
|
||||
// ReadUint32 read uint32
|
||||
func (iter *Iterator) ReadUint32() (ret uint32) {
|
||||
return iter.readUint32(iter.nextToken())
|
||||
}
|
||||
|
||||
func (iter *Iterator) readUint32(c byte) (ret uint32) {
|
||||
ind := intDigits[c]
|
||||
if ind == 0 {
|
||||
iter.assertInteger()
|
||||
return 0 // single zero
|
||||
}
|
||||
if ind == invalidCharForNumber {
|
||||
iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
|
||||
return
|
||||
}
|
||||
value := uint32(ind)
|
||||
if iter.tail-iter.head > 10 {
|
||||
i := iter.head
|
||||
ind2 := intDigits[iter.buf[i]]
|
||||
if ind2 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
i++
|
||||
ind3 := intDigits[iter.buf[i]]
|
||||
if ind3 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10 + uint32(ind2)
|
||||
}
|
||||
//iter.head = i + 1
|
||||
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
|
||||
i++
|
||||
ind4 := intDigits[iter.buf[i]]
|
||||
if ind4 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100 + uint32(ind2)*10 + uint32(ind3)
|
||||
}
|
||||
i++
|
||||
ind5 := intDigits[iter.buf[i]]
|
||||
if ind5 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
|
||||
}
|
||||
i++
|
||||
ind6 := intDigits[iter.buf[i]]
|
||||
if ind6 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
|
||||
}
|
||||
i++
|
||||
ind7 := intDigits[iter.buf[i]]
|
||||
if ind7 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
|
||||
}
|
||||
i++
|
||||
ind8 := intDigits[iter.buf[i]]
|
||||
if ind8 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
|
||||
}
|
||||
i++
|
||||
ind9 := intDigits[iter.buf[i]]
|
||||
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
|
||||
iter.head = i
|
||||
if ind9 == invalidCharForNumber {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
ind = intDigits[iter.buf[i]]
|
||||
if ind == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
if value > uint32SafeToMultiply10 {
|
||||
value2 := (value << 3) + (value << 1) + uint32(ind)
|
||||
if value2 < value {
|
||||
iter.ReportError("readUint32", "overflow")
|
||||
return
|
||||
}
|
||||
value = value2
|
||||
continue
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint32(ind)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ReadInt64 read int64
|
||||
func (iter *Iterator) ReadInt64() (ret int64) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint64(iter.readByte())
|
||||
if val > math.MaxInt64+1 {
|
||||
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int64(val)
|
||||
}
|
||||
val := iter.readUint64(c)
|
||||
if val > math.MaxInt64 {
|
||||
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
|
||||
return
|
||||
}
|
||||
return int64(val)
|
||||
}
|
||||
|
||||
// ReadUint64 read uint64
|
||||
func (iter *Iterator) ReadUint64() uint64 {
|
||||
return iter.readUint64(iter.nextToken())
|
||||
}
|
||||
|
||||
func (iter *Iterator) readUint64(c byte) (ret uint64) {
|
||||
ind := intDigits[c]
|
||||
if ind == 0 {
|
||||
iter.assertInteger()
|
||||
return 0 // single zero
|
||||
}
|
||||
if ind == invalidCharForNumber {
|
||||
iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
|
||||
return
|
||||
}
|
||||
value := uint64(ind)
|
||||
if iter.tail-iter.head > 10 {
|
||||
i := iter.head
|
||||
ind2 := intDigits[iter.buf[i]]
|
||||
if ind2 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
i++
|
||||
ind3 := intDigits[iter.buf[i]]
|
||||
if ind3 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10 + uint64(ind2)
|
||||
}
|
||||
//iter.head = i + 1
|
||||
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
|
||||
i++
|
||||
ind4 := intDigits[iter.buf[i]]
|
||||
if ind4 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100 + uint64(ind2)*10 + uint64(ind3)
|
||||
}
|
||||
i++
|
||||
ind5 := intDigits[iter.buf[i]]
|
||||
if ind5 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
|
||||
}
|
||||
i++
|
||||
ind6 := intDigits[iter.buf[i]]
|
||||
if ind6 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
|
||||
}
|
||||
i++
|
||||
ind7 := intDigits[iter.buf[i]]
|
||||
if ind7 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
|
||||
}
|
||||
i++
|
||||
ind8 := intDigits[iter.buf[i]]
|
||||
if ind8 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
|
||||
}
|
||||
i++
|
||||
ind9 := intDigits[iter.buf[i]]
|
||||
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
|
||||
iter.head = i
|
||||
if ind9 == invalidCharForNumber {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
ind = intDigits[iter.buf[i]]
|
||||
if ind == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
value2 := (value << 3) + (value << 1) + uint64(ind)
|
||||
if value2 < value {
|
||||
iter.ReportError("readUint64", "overflow")
|
||||
return
|
||||
}
|
||||
value = value2
|
||||
continue
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) assertInteger() {
|
||||
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
|
||||
iter.ReportError("assertInteger", "can not decode float as int")
|
||||
}
|
||||
}
|
267
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
Normal file
267
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
Normal file
|
@ -0,0 +1,267 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ReadObject read one field from object.
|
||||
// If object ended, returns empty string.
|
||||
// Otherwise, returns the field name.
|
||||
func (iter *Iterator) ReadObject() (ret string) {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return "" // null
|
||||
case '{':
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
return field
|
||||
}
|
||||
if c == '}' {
|
||||
return "" // end of object
|
||||
}
|
||||
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
|
||||
return
|
||||
case ',':
|
||||
field := iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
return field
|
||||
case '}':
|
||||
return "" // end of object
|
||||
default:
|
||||
iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// CaseInsensitive
|
||||
func (iter *Iterator) readFieldHash() int64 {
|
||||
hash := int64(0x811c9dc5)
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
// require ascii string and no escape
|
||||
b := iter.buf[i]
|
||||
if b == '\\' {
|
||||
iter.head = i
|
||||
for _, b := range iter.readStringSlowPath() {
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
return hash
|
||||
}
|
||||
if b == '"' {
|
||||
iter.head = i + 1
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
return hash
|
||||
}
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("readFieldHash", `incomplete field name`)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calcHash(str string, caseSensitive bool) int64 {
|
||||
if !caseSensitive {
|
||||
str = strings.ToLower(str)
|
||||
}
|
||||
hash := int64(0x811c9dc5)
|
||||
for _, b := range []byte(str) {
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
return int64(hash)
|
||||
}
|
||||
|
||||
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
|
||||
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field = iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
field = iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
// ReadMapCB read map with callback, the key can be any string
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectStart() bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
c = iter.nextToken()
|
||||
if c == '}' {
|
||||
return false
|
||||
}
|
||||
iter.unreadByte()
|
||||
return true
|
||||
} else if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return false
|
||||
}
|
||||
iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
|
||||
str := iter.ReadStringAsSlice()
|
||||
if iter.skipWhitespacesWithoutLoadMore() {
|
||||
if ret == nil {
|
||||
ret = make([]byte, len(str))
|
||||
copy(ret, str)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
if iter.buf[iter.head] != ':' {
|
||||
iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
|
||||
return
|
||||
}
|
||||
iter.head++
|
||||
if iter.skipWhitespacesWithoutLoadMore() {
|
||||
if ret == nil {
|
||||
ret = make([]byte, len(str))
|
||||
copy(ret, str)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
if ret == nil {
|
||||
return str
|
||||
}
|
||||
return ret
|
||||
}
|
130
vendor/github.com/json-iterator/go/iter_skip.go
generated
vendored
Normal file
130
vendor/github.com/json-iterator/go/iter_skip.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
|
||||
// ReadNil reads a json object as nil and
|
||||
// returns whether it's a nil or not
|
||||
func (iter *Iterator) ReadNil() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
return true
|
||||
}
|
||||
iter.unreadByte()
|
||||
return false
|
||||
}
|
||||
|
||||
// ReadBool reads a json object as BoolValue
|
||||
func (iter *Iterator) ReadBool() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == 't' {
|
||||
iter.skipThreeBytes('r', 'u', 'e')
|
||||
return true
|
||||
}
|
||||
if c == 'f' {
|
||||
iter.skipFourBytes('a', 'l', 's', 'e')
|
||||
return false
|
||||
}
|
||||
iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
|
||||
// The []byte can be kept, it is a copy of data.
|
||||
func (iter *Iterator) SkipAndReturnBytes() []byte {
|
||||
iter.startCapture(iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||
// buffer, returning the result.
|
||||
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||
iter.startCaptureTo(buf, iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||
if iter.captured != nil {
|
||||
panic("already in capture mode")
|
||||
}
|
||||
iter.captureStartedAt = captureStartedAt
|
||||
iter.captured = buf
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||
}
|
||||
|
||||
func (iter *Iterator) stopCapture() []byte {
|
||||
if iter.captured == nil {
|
||||
panic("not in capture mode")
|
||||
}
|
||||
captured := iter.captured
|
||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||
iter.captureStartedAt = -1
|
||||
iter.captured = nil
|
||||
return append(captured, remaining...)
|
||||
}
|
||||
|
||||
// Skip skips a json object and positions to relatively the next json object
|
||||
func (iter *Iterator) Skip() {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case '"':
|
||||
iter.skipString()
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
case 't':
|
||||
iter.skipThreeBytes('r', 'u', 'e') // true
|
||||
case 'f':
|
||||
iter.skipFourBytes('a', 'l', 's', 'e') // false
|
||||
case '0':
|
||||
iter.unreadByte()
|
||||
iter.ReadFloat32()
|
||||
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.skipNumber()
|
||||
case '[':
|
||||
iter.skipArray()
|
||||
case '{':
|
||||
iter.skipObject()
|
||||
default:
|
||||
iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
|
||||
if iter.readByte() != b1 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b2 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b3 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b4 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
|
||||
if iter.readByte() != b1 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b2 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b3 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
}
|
163
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
Normal file
163
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
Normal file
|
@ -0,0 +1,163 @@
|
|||
//+build jsoniter_sloppy
|
||||
|
||||
package jsoniter
|
||||
|
||||
// sloppy but faster implementation, do not validate the input json
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t', ',', '}', ']':
|
||||
iter.head = i
|
||||
return
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
case '"': // If inside string, skip it
|
||||
iter.head = i + 1
|
||||
iter.skipString()
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
iter.head = i + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipObject", "incomplete array")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
case '"': // If inside string, skip it
|
||||
iter.head = i + 1
|
||||
iter.skipString()
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
iter.head = i + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipObject", "incomplete object")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipString() {
|
||||
for {
|
||||
end, escaped := iter.findStringEnd()
|
||||
if end == -1 {
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipString", "incomplete string")
|
||||
return
|
||||
}
|
||||
if escaped {
|
||||
iter.head = 1 // skip the first char as last char read is \
|
||||
}
|
||||
} else {
|
||||
iter.head = end
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
|
||||
// Tries to find the end of string
|
||||
// Support if string contains escaped quote symbols.
|
||||
func (iter *Iterator) findStringEnd() (int, bool) {
|
||||
escaped := false
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
if !escaped {
|
||||
return i + 1, false
|
||||
}
|
||||
j := i - 1
|
||||
for {
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// even number of backslashes
|
||||
// either end of buffer, or " found
|
||||
return i + 1, true
|
||||
}
|
||||
j--
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// odd number of backslashes
|
||||
// it is \" or \\\"
|
||||
break
|
||||
}
|
||||
j--
|
||||
}
|
||||
} else if c == '\\' {
|
||||
escaped = true
|
||||
}
|
||||
}
|
||||
j := iter.tail - 1
|
||||
for {
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// even number of backslashes
|
||||
// either end of buffer, or " found
|
||||
return -1, false // do not end with \
|
||||
}
|
||||
j--
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// odd number of backslashes
|
||||
// it is \" or \\\"
|
||||
break
|
||||
}
|
||||
j--
|
||||
|
||||
}
|
||||
return -1, true // end with \
|
||||
}
|
99
vendor/github.com/json-iterator/go/iter_skip_strict.go
generated
vendored
Normal file
99
vendor/github.com/json-iterator/go/iter_skip_strict.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
//+build !jsoniter_sloppy
|
||||
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
if !iter.trySkipNumber() {
|
||||
iter.unreadByte()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = nil
|
||||
iter.ReadBigFloat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) trySkipNumber() bool {
|
||||
dotFound := false
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
case '.':
|
||||
if dotFound {
|
||||
iter.ReportError("validateNumber", `more than one dot found in number`)
|
||||
return true // already failed
|
||||
}
|
||||
if i+1 == iter.tail {
|
||||
return false
|
||||
}
|
||||
c = iter.buf[i+1]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
default:
|
||||
iter.ReportError("validateNumber", `missing digit after dot`)
|
||||
return true // already failed
|
||||
}
|
||||
dotFound = true
|
||||
default:
|
||||
switch c {
|
||||
case ',', ']', '}', ' ', '\t', '\n', '\r':
|
||||
if iter.head == i {
|
||||
return false // if - without following digits
|
||||
}
|
||||
iter.head = i
|
||||
return true // must be valid
|
||||
}
|
||||
return false // may be invalid
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipString() {
|
||||
if !iter.trySkipString() {
|
||||
iter.unreadByte()
|
||||
iter.ReadString()
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) trySkipString() bool {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
iter.head = i + 1
|
||||
return true // valid
|
||||
} else if c == '\\' {
|
||||
return false
|
||||
} else if c < ' ' {
|
||||
iter.ReportError("trySkipString",
|
||||
fmt.Sprintf(`invalid control character found: %d`, c))
|
||||
return true // already failed
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
iter.unreadByte()
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
iter.unreadByte()
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
}
|
215
vendor/github.com/json-iterator/go/iter_str.go
generated
vendored
Normal file
215
vendor/github.com/json-iterator/go/iter_str.go
generated
vendored
Normal file
|
@ -0,0 +1,215 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf16"
|
||||
)
|
||||
|
||||
// ReadString read string from iterator
|
||||
func (iter *Iterator) ReadString() (ret string) {
|
||||
c := iter.nextToken()
|
||||
if c == '"' {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
ret = string(iter.buf[iter.head:i])
|
||||
iter.head = i + 1
|
||||
return ret
|
||||
} else if c == '\\' {
|
||||
break
|
||||
} else if c < ' ' {
|
||||
iter.ReportError("ReadString",
|
||||
fmt.Sprintf(`invalid control character found: %d`, c))
|
||||
return
|
||||
}
|
||||
}
|
||||
return iter.readStringSlowPath()
|
||||
} else if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return ""
|
||||
}
|
||||
iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readStringSlowPath() (ret string) {
|
||||
var str []byte
|
||||
var c byte
|
||||
for iter.Error == nil {
|
||||
c = iter.readByte()
|
||||
if c == '"' {
|
||||
return string(str)
|
||||
}
|
||||
if c == '\\' {
|
||||
c = iter.readByte()
|
||||
str = iter.readEscapedChar(c, str)
|
||||
} else {
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
iter.ReportError("readStringSlowPath", "unexpected end of input")
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
|
||||
switch c {
|
||||
case 'u':
|
||||
r := iter.readU4()
|
||||
if utf16.IsSurrogate(r) {
|
||||
c = iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
if c != '\\' {
|
||||
iter.unreadByte()
|
||||
str = appendRune(str, r)
|
||||
return str
|
||||
}
|
||||
c = iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
if c != 'u' {
|
||||
str = appendRune(str, r)
|
||||
return iter.readEscapedChar(c, str)
|
||||
}
|
||||
r2 := iter.readU4()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
combined := utf16.DecodeRune(r, r2)
|
||||
if combined == '\uFFFD' {
|
||||
str = appendRune(str, r)
|
||||
str = appendRune(str, r2)
|
||||
} else {
|
||||
str = appendRune(str, combined)
|
||||
}
|
||||
} else {
|
||||
str = appendRune(str, r)
|
||||
}
|
||||
case '"':
|
||||
str = append(str, '"')
|
||||
case '\\':
|
||||
str = append(str, '\\')
|
||||
case '/':
|
||||
str = append(str, '/')
|
||||
case 'b':
|
||||
str = append(str, '\b')
|
||||
case 'f':
|
||||
str = append(str, '\f')
|
||||
case 'n':
|
||||
str = append(str, '\n')
|
||||
case 'r':
|
||||
str = append(str, '\r')
|
||||
case 't':
|
||||
str = append(str, '\t')
|
||||
default:
|
||||
iter.ReportError("readEscapedChar",
|
||||
`invalid escape char after \`)
|
||||
return nil
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// ReadStringAsSlice read string from iterator without copying into string form.
|
||||
// The []byte can not be kept, as it will change after next iterator call.
|
||||
func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
|
||||
c := iter.nextToken()
|
||||
if c == '"' {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
// require ascii string and no escape
|
||||
// for: field name, base64, number
|
||||
if iter.buf[i] == '"' {
|
||||
// fast path: reuse the underlying buffer
|
||||
ret = iter.buf[iter.head:i]
|
||||
iter.head = i + 1
|
||||
return ret
|
||||
}
|
||||
}
|
||||
readLen := iter.tail - iter.head
|
||||
copied := make([]byte, readLen, readLen*2)
|
||||
copy(copied, iter.buf[iter.head:iter.tail])
|
||||
iter.head = iter.tail
|
||||
for iter.Error == nil {
|
||||
c := iter.readByte()
|
||||
if c == '"' {
|
||||
return copied
|
||||
}
|
||||
copied = append(copied, c)
|
||||
}
|
||||
return copied
|
||||
}
|
||||
iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readU4() (ret rune) {
|
||||
for i := 0; i < 4; i++ {
|
||||
c := iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return
|
||||
}
|
||||
if c >= '0' && c <= '9' {
|
||||
ret = ret*16 + rune(c-'0')
|
||||
} else if c >= 'a' && c <= 'f' {
|
||||
ret = ret*16 + rune(c-'a'+10)
|
||||
} else if c >= 'A' && c <= 'F' {
|
||||
ret = ret*16 + rune(c-'A'+10)
|
||||
} else {
|
||||
iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
const (
|
||||
t1 = 0x00 // 0000 0000
|
||||
tx = 0x80 // 1000 0000
|
||||
t2 = 0xC0 // 1100 0000
|
||||
t3 = 0xE0 // 1110 0000
|
||||
t4 = 0xF0 // 1111 0000
|
||||
t5 = 0xF8 // 1111 1000
|
||||
|
||||
maskx = 0x3F // 0011 1111
|
||||
mask2 = 0x1F // 0001 1111
|
||||
mask3 = 0x0F // 0000 1111
|
||||
mask4 = 0x07 // 0000 0111
|
||||
|
||||
rune1Max = 1<<7 - 1
|
||||
rune2Max = 1<<11 - 1
|
||||
rune3Max = 1<<16 - 1
|
||||
|
||||
surrogateMin = 0xD800
|
||||
surrogateMax = 0xDFFF
|
||||
|
||||
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
|
||||
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
|
||||
)
|
||||
|
||||
func appendRune(p []byte, r rune) []byte {
|
||||
// Negative values are erroneous. Making it unsigned addresses the problem.
|
||||
switch i := uint32(r); {
|
||||
case i <= rune1Max:
|
||||
p = append(p, byte(r))
|
||||
return p
|
||||
case i <= rune2Max:
|
||||
p = append(p, t2|byte(r>>6))
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
|
||||
r = runeError
|
||||
fallthrough
|
||||
case i <= rune3Max:
|
||||
p = append(p, t3|byte(r>>12))
|
||||
p = append(p, tx|byte(r>>6)&maskx)
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
default:
|
||||
p = append(p, t4|byte(r>>18))
|
||||
p = append(p, tx|byte(r>>12)&maskx)
|
||||
p = append(p, tx|byte(r>>6)&maskx)
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
}
|
||||
}
|
18
vendor/github.com/json-iterator/go/jsoniter.go
generated
vendored
Normal file
18
vendor/github.com/json-iterator/go/jsoniter.go
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
// Package jsoniter implements encoding and decoding of JSON as defined in
|
||||
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
|
||||
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
|
||||
// and variable type declarations (if any).
|
||||
// jsoniter interfaces gives 100% compatibility with code using standard lib.
|
||||
//
|
||||
// "JSON and Go"
|
||||
// (https://golang.org/doc/articles/json_and_go.html)
|
||||
// gives a description of how Marshal/Unmarshal operate
|
||||
// between arbitrary or predefined json objects and bytes,
|
||||
// and it applies to jsoniter.Marshal/Unmarshal as well.
|
||||
//
|
||||
// Besides, jsoniter.Iterator provides a different set of interfaces
|
||||
// iterating given bytes/string/reader
|
||||
// and yielding parsed elements one by one.
|
||||
// This set of interfaces reads input as required and gives
|
||||
// better performance.
|
||||
package jsoniter
|
42
vendor/github.com/json-iterator/go/pool.go
generated
vendored
Normal file
42
vendor/github.com/json-iterator/go/pool.go
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// IteratorPool a thread safe pool of iterators with same configuration
|
||||
type IteratorPool interface {
|
||||
BorrowIterator(data []byte) *Iterator
|
||||
ReturnIterator(iter *Iterator)
|
||||
}
|
||||
|
||||
// StreamPool a thread safe pool of streams with same configuration
|
||||
type StreamPool interface {
|
||||
BorrowStream(writer io.Writer) *Stream
|
||||
ReturnStream(stream *Stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
|
||||
stream := cfg.streamPool.Get().(*Stream)
|
||||
stream.Reset(writer)
|
||||
return stream
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
|
||||
stream.out = nil
|
||||
stream.Error = nil
|
||||
stream.Attachment = nil
|
||||
cfg.streamPool.Put(stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
|
||||
iter := cfg.iteratorPool.Get().(*Iterator)
|
||||
iter.ResetBytes(data)
|
||||
return iter
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
|
||||
iter.Error = nil
|
||||
iter.Attachment = nil
|
||||
cfg.iteratorPool.Put(iter)
|
||||
}
|
337
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
Normal file
337
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
Normal file
|
@ -0,0 +1,337 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// ValDecoder is an internal type registered to cache as needed.
|
||||
// Don't confuse jsoniter.ValDecoder with json.Decoder.
|
||||
// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
|
||||
//
|
||||
// Reflection on type to create decoders, which is then cached
|
||||
// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
|
||||
// 1. create instance of new value, for example *int will need a int to be allocated
|
||||
// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
|
||||
// 3. assignment to map, both key and value will be reflect.Value
|
||||
// For a simple struct binding, it will be reflect.Value free and allocation free
|
||||
type ValDecoder interface {
|
||||
Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
}
|
||||
|
||||
// ValEncoder is an internal type registered to cache as needed.
|
||||
// Don't confuse jsoniter.ValEncoder with json.Encoder.
|
||||
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
|
||||
type ValEncoder interface {
|
||||
IsEmpty(ptr unsafe.Pointer) bool
|
||||
Encode(ptr unsafe.Pointer, stream *Stream)
|
||||
}
|
||||
|
||||
type checkIsEmpty interface {
|
||||
IsEmpty(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
type ctx struct {
|
||||
*frozenConfig
|
||||
prefix string
|
||||
encoders map[reflect2.Type]ValEncoder
|
||||
decoders map[reflect2.Type]ValDecoder
|
||||
}
|
||||
|
||||
func (b *ctx) caseSensitive() bool {
|
||||
if b.frozenConfig == nil {
|
||||
// default is case-insensitive
|
||||
return false
|
||||
}
|
||||
return b.frozenConfig.caseSensitive
|
||||
}
|
||||
|
||||
func (b *ctx) append(prefix string) *ctx {
|
||||
return &ctx{
|
||||
frozenConfig: b.frozenConfig,
|
||||
prefix: b.prefix + " " + prefix,
|
||||
encoders: b.encoders,
|
||||
decoders: b.decoders,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
iter.ReportError("ReadVal", "can only unmarshal into pointer")
|
||||
return
|
||||
}
|
||||
decoder = iter.cfg.DecoderOf(typ)
|
||||
}
|
||||
ptr := reflect2.PtrOf(obj)
|
||||
if ptr == nil {
|
||||
iter.ReportError("ReadVal", "can not read into nil pointer")
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
func (stream *Stream) WriteVal(val interface{}) {
|
||||
if nil == val {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
cacheKey := reflect2.RTypeOf(val)
|
||||
encoder := stream.cfg.getEncoderFromCache(cacheKey)
|
||||
if encoder == nil {
|
||||
typ := reflect2.TypeOf(val)
|
||||
encoder = stream.cfg.EncoderOf(typ)
|
||||
}
|
||||
encoder.Encode(reflect2.PtrOf(val), stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
|
||||
cacheKey := typ.RType()
|
||||
decoder := cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
ctx := &ctx{
|
||||
frozenConfig: cfg,
|
||||
prefix: "",
|
||||
decoders: map[reflect2.Type]ValDecoder{},
|
||||
encoders: map[reflect2.Type]ValEncoder{},
|
||||
}
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
decoder = decoderOfType(ctx, ptrType.Elem())
|
||||
cfg.addDecoderToCache(cacheKey, decoder)
|
||||
return decoder
|
||||
}
|
||||
|
||||
func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := getTypeDecoderFromExtension(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfType(ctx, typ)
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
|
||||
func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := ctx.decoders[typ]
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
placeholder := &placeholderDecoder{}
|
||||
ctx.decoders[typ] = placeholder
|
||||
decoder = _createDecoderOfType(ctx, typ)
|
||||
placeholder.decoder = decoder
|
||||
return decoder
|
||||
}
|
||||
|
||||
func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := createDecoderOfJsonRawMessage(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfJsonNumber(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfMarshaler(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfAny(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfNative(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
|
||||
if isIFace {
|
||||
return &ifaceDecoder{valType: ifaceType}
|
||||
}
|
||||
return &efaceDecoder{}
|
||||
case reflect.Struct:
|
||||
return decoderOfStruct(ctx, typ)
|
||||
case reflect.Array:
|
||||
return decoderOfArray(ctx, typ)
|
||||
case reflect.Slice:
|
||||
return decoderOfSlice(ctx, typ)
|
||||
case reflect.Map:
|
||||
return decoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return decoderOfOptional(ctx, typ)
|
||||
default:
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
|
||||
}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
|
||||
cacheKey := typ.RType()
|
||||
encoder := cfg.getEncoderFromCache(cacheKey)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
ctx := &ctx{
|
||||
frozenConfig: cfg,
|
||||
prefix: "",
|
||||
decoders: map[reflect2.Type]ValDecoder{},
|
||||
encoders: map[reflect2.Type]ValEncoder{},
|
||||
}
|
||||
encoder = encoderOfType(ctx, typ)
|
||||
if typ.LikePtr() {
|
||||
encoder = &onePtrEncoder{encoder}
|
||||
}
|
||||
cfg.addEncoderToCache(cacheKey, encoder)
|
||||
return encoder
|
||||
}
|
||||
|
||||
type onePtrEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
|
||||
}
|
||||
|
||||
func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
|
||||
}
|
||||
|
||||
func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := getTypeEncoderFromExtension(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfType(ctx, typ)
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
|
||||
func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := ctx.encoders[typ]
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
placeholder := &placeholderEncoder{}
|
||||
ctx.encoders[typ] = placeholder
|
||||
encoder = _createEncoderOfType(ctx, typ)
|
||||
placeholder.encoder = encoder
|
||||
return encoder
|
||||
}
|
||||
func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := createEncoderOfJsonRawMessage(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfJsonNumber(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfMarshaler(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfAny(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfNative(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.Interface:
|
||||
return &dynamicEncoder{typ}
|
||||
case reflect.Struct:
|
||||
return encoderOfStruct(ctx, typ)
|
||||
case reflect.Array:
|
||||
return encoderOfArray(ctx, typ)
|
||||
case reflect.Slice:
|
||||
return encoderOfSlice(ctx, typ)
|
||||
case reflect.Map:
|
||||
return encoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return encoderOfOptional(ctx, typ)
|
||||
default:
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
|
||||
}
|
||||
}
|
||||
|
||||
type lazyErrorDecoder struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.WhatIsNext() != NilValue {
|
||||
if iter.Error == nil {
|
||||
iter.Error = decoder.err
|
||||
}
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
|
||||
type lazyErrorEncoder struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if ptr == nil {
|
||||
stream.WriteNil()
|
||||
} else if stream.Error == nil {
|
||||
stream.Error = encoder.err
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type placeholderDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.decoder.Decode(ptr, iter)
|
||||
}
|
||||
|
||||
type placeholderEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(ptr, stream)
|
||||
}
|
||||
|
||||
func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(ptr)
|
||||
}
|
104
vendor/github.com/json-iterator/go/reflect_array.go
generated
vendored
Normal file
104
vendor/github.com/json-iterator/go/reflect_array.go
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
arrayType := typ.(*reflect2.UnsafeArrayType)
|
||||
decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
|
||||
return &arrayDecoder{arrayType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
arrayType := typ.(*reflect2.UnsafeArrayType)
|
||||
if arrayType.Len() == 0 {
|
||||
return emptyArrayEncoder{}
|
||||
}
|
||||
encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
|
||||
return &arrayEncoder{arrayType, encoder}
|
||||
}
|
||||
|
||||
type emptyArrayEncoder struct{}
|
||||
|
||||
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteEmptyArray()
|
||||
}
|
||||
|
||||
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type arrayEncoder struct {
|
||||
arrayType *reflect2.UnsafeArrayType
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteArrayStart()
|
||||
elemPtr := unsafe.Pointer(ptr)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
for i := 1; i < encoder.arrayType.Len(); i++ {
|
||||
stream.WriteMore()
|
||||
elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type arrayDecoder struct {
|
||||
arrayType *reflect2.UnsafeArrayType
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.doDecode(ptr, iter)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
arrayType := decoder.arrayType
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return
|
||||
}
|
||||
if c != '[' {
|
||||
iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == ']' {
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
length := 1
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
if length >= arrayType.Len() {
|
||||
iter.Skip()
|
||||
continue
|
||||
}
|
||||
idx := length
|
||||
length += 1
|
||||
elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
70
vendor/github.com/json-iterator/go/reflect_dynamic.go
generated
vendored
Normal file
70
vendor/github.com/json-iterator/go/reflect_dynamic.go
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type dynamicEncoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
stream.WriteVal(obj)
|
||||
}
|
||||
|
||||
func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.valType.UnsafeIndirect(ptr) == nil
|
||||
}
|
||||
|
||||
type efaceDecoder struct {
|
||||
}
|
||||
|
||||
func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
pObj := (*interface{})(ptr)
|
||||
obj := *pObj
|
||||
if obj == nil {
|
||||
*pObj = iter.Read()
|
||||
return
|
||||
}
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
*pObj = iter.Read()
|
||||
return
|
||||
}
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
ptrElemType := ptrType.Elem()
|
||||
if iter.WhatIsNext() == NilValue {
|
||||
if ptrElemType.Kind() != reflect.Ptr {
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*pObj = nil
|
||||
return
|
||||
}
|
||||
}
|
||||
if reflect2.IsNil(obj) {
|
||||
obj := ptrElemType.New()
|
||||
iter.ReadVal(obj)
|
||||
*pObj = obj
|
||||
return
|
||||
}
|
||||
iter.ReadVal(obj)
|
||||
}
|
||||
|
||||
type ifaceDecoder struct {
|
||||
valType *reflect2.UnsafeIFaceType
|
||||
}
|
||||
|
||||
func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
|
||||
return
|
||||
}
|
||||
obj := decoder.valType.UnsafeIndirect(ptr)
|
||||
if reflect2.IsNil(obj) {
|
||||
iter.ReportError("decode non empty interface", "can not unmarshal into nil")
|
||||
return
|
||||
}
|
||||
iter.ReadVal(obj)
|
||||
}
|
483
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
Normal file
483
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
Normal file
|
@ -0,0 +1,483 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var typeDecoders = map[string]ValDecoder{}
|
||||
var fieldDecoders = map[string]ValDecoder{}
|
||||
var typeEncoders = map[string]ValEncoder{}
|
||||
var fieldEncoders = map[string]ValEncoder{}
|
||||
var extensions = []Extension{}
|
||||
|
||||
// StructDescriptor describe how should we encode/decode the struct
|
||||
type StructDescriptor struct {
|
||||
Type reflect2.Type
|
||||
Fields []*Binding
|
||||
}
|
||||
|
||||
// GetField get one field from the descriptor by its name.
|
||||
// Can not use map here to keep field orders.
|
||||
func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
if binding.Field.Name() == fieldName {
|
||||
return binding
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Binding describe how should we encode/decode the struct field
|
||||
type Binding struct {
|
||||
levels []int
|
||||
Field reflect2.StructField
|
||||
FromNames []string
|
||||
ToNames []string
|
||||
Encoder ValEncoder
|
||||
Decoder ValDecoder
|
||||
}
|
||||
|
||||
// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
|
||||
// Can also rename fields by UpdateStructDescriptor.
|
||||
type Extension interface {
|
||||
UpdateStructDescriptor(structDescriptor *StructDescriptor)
|
||||
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
|
||||
CreateDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateEncoder(typ reflect2.Type) ValEncoder
|
||||
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
|
||||
DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
|
||||
}
|
||||
|
||||
// DummyExtension embed this type get dummy implementation for all methods of Extension
|
||||
type DummyExtension struct {
|
||||
}
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder No-op
|
||||
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateEncoder No-op
|
||||
func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type EncoderExtension map[reflect2.Type]ValEncoder
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateDecoder No-op
|
||||
func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateEncoder get encoder from map
|
||||
func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return extension[typ]
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type DecoderExtension map[reflect2.Type]ValDecoder
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder get decoder from map
|
||||
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return extension[typ]
|
||||
}
|
||||
|
||||
// CreateEncoder No-op
|
||||
func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type funcDecoder struct {
|
||||
fun DecoderFunc
|
||||
}
|
||||
|
||||
func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.fun(ptr, iter)
|
||||
}
|
||||
|
||||
type funcEncoder struct {
|
||||
fun EncoderFunc
|
||||
isEmptyFunc func(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.fun(ptr, stream)
|
||||
}
|
||||
|
||||
func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
if encoder.isEmptyFunc == nil {
|
||||
return false
|
||||
}
|
||||
return encoder.isEmptyFunc(ptr)
|
||||
}
|
||||
|
||||
// DecoderFunc the function form of TypeDecoder
|
||||
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
|
||||
|
||||
// EncoderFunc the function form of TypeEncoder
|
||||
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
|
||||
|
||||
// RegisterTypeDecoderFunc register TypeDecoder for a type with function
|
||||
func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
|
||||
typeDecoders[typ] = &funcDecoder{fun}
|
||||
}
|
||||
|
||||
// RegisterTypeDecoder register TypeDecoder for a typ
|
||||
func RegisterTypeDecoder(typ string, decoder ValDecoder) {
|
||||
typeDecoders[typ] = decoder
|
||||
}
|
||||
|
||||
// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
|
||||
func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
|
||||
RegisterFieldDecoder(typ, field, &funcDecoder{fun})
|
||||
}
|
||||
|
||||
// RegisterFieldDecoder register TypeDecoder for a struct field
|
||||
func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
|
||||
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
|
||||
}
|
||||
|
||||
// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
|
||||
func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
|
||||
typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
|
||||
}
|
||||
|
||||
// RegisterTypeEncoder register TypeEncoder for a type
|
||||
func RegisterTypeEncoder(typ string, encoder ValEncoder) {
|
||||
typeEncoders[typ] = encoder
|
||||
}
|
||||
|
||||
// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
|
||||
func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
|
||||
RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
|
||||
}
|
||||
|
||||
// RegisterFieldEncoder register TypeEncoder for a struct field
|
||||
func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
|
||||
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
|
||||
}
|
||||
|
||||
// RegisterExtension register extension
|
||||
func RegisterExtension(extension Extension) {
|
||||
extensions = append(extensions, extension)
|
||||
}
|
||||
|
||||
func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := _getTypeDecoderFromExtension(ctx, typ)
|
||||
if decoder != nil {
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
for _, extension := range extensions {
|
||||
decoder := extension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
decoder := ctx.decoderExtension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
decoder = typeDecoders[typeName]
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
decoder := typeDecoders[ptrType.Elem().String()]
|
||||
if decoder != nil {
|
||||
return &OptionalDecoder{ptrType.Elem(), decoder}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := _getTypeEncoderFromExtension(ctx, typ)
|
||||
if encoder != nil {
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
|
||||
func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
for _, extension := range extensions {
|
||||
encoder := extension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
encoder := ctx.encoderExtension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
encoder = typeEncoders[typeName]
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
typePtr := typ.(*reflect2.UnsafePtrType)
|
||||
encoder := typeEncoders[typePtr.Elem().String()]
|
||||
if encoder != nil {
|
||||
return &OptionalEncoder{encoder}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
structType := typ.(*reflect2.UnsafeStructType)
|
||||
embeddedBindings := []*Binding{}
|
||||
bindings := []*Binding{}
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
binding.levels = append([]int{i}, binding.levels...)
|
||||
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
|
||||
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
|
||||
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
|
||||
embeddedBindings = append(embeddedBindings, binding)
|
||||
}
|
||||
continue
|
||||
} else if field.Type().Kind() == reflect.Ptr {
|
||||
ptrType := field.Type().(*reflect2.UnsafePtrType)
|
||||
if ptrType.Elem().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, ptrType.Elem())
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
binding.levels = append([]int{i}, binding.levels...)
|
||||
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
|
||||
binding.Encoder = &dereferenceEncoder{binding.Encoder}
|
||||
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
|
||||
binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
|
||||
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
|
||||
embeddedBindings = append(embeddedBindings, binding)
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
|
||||
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
|
||||
decoder := fieldDecoders[fieldCacheKey]
|
||||
if decoder == nil {
|
||||
decoder = decoderOfType(ctx.append(field.Name()), field.Type())
|
||||
}
|
||||
encoder := fieldEncoders[fieldCacheKey]
|
||||
if encoder == nil {
|
||||
encoder = encoderOfType(ctx.append(field.Name()), field.Type())
|
||||
}
|
||||
binding := &Binding{
|
||||
Field: field,
|
||||
FromNames: fieldNames,
|
||||
ToNames: fieldNames,
|
||||
Decoder: decoder,
|
||||
Encoder: encoder,
|
||||
}
|
||||
binding.levels = []int{i}
|
||||
bindings = append(bindings, binding)
|
||||
}
|
||||
return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
|
||||
}
|
||||
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
|
||||
structDescriptor := &StructDescriptor{
|
||||
Type: typ,
|
||||
Fields: bindings,
|
||||
}
|
||||
for _, extension := range extensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
processTags(structDescriptor, ctx.frozenConfig)
|
||||
// merge normal & embedded bindings & sort with original order
|
||||
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
|
||||
sort.Sort(allBindings)
|
||||
structDescriptor.Fields = allBindings
|
||||
return structDescriptor
|
||||
}
|
||||
|
||||
type sortableBindings []*Binding
|
||||
|
||||
func (bindings sortableBindings) Len() int {
|
||||
return len(bindings)
|
||||
}
|
||||
|
||||
func (bindings sortableBindings) Less(i, j int) bool {
|
||||
left := bindings[i].levels
|
||||
right := bindings[j].levels
|
||||
k := 0
|
||||
for {
|
||||
if left[k] < right[k] {
|
||||
return true
|
||||
} else if left[k] > right[k] {
|
||||
return false
|
||||
}
|
||||
k++
|
||||
}
|
||||
}
|
||||
|
||||
func (bindings sortableBindings) Swap(i, j int) {
|
||||
bindings[i], bindings[j] = bindings[j], bindings[i]
|
||||
}
|
||||
|
||||
func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
shouldOmitEmpty := false
|
||||
tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
|
||||
for _, tagPart := range tagParts[1:] {
|
||||
if tagPart == "omitempty" {
|
||||
shouldOmitEmpty = true
|
||||
} else if tagPart == "string" {
|
||||
if binding.Field.Type().Kind() == reflect.String {
|
||||
binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
|
||||
binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
|
||||
} else {
|
||||
binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
|
||||
binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
|
||||
}
|
||||
}
|
||||
}
|
||||
binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
|
||||
binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
|
||||
}
|
||||
}
|
||||
|
||||
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
|
||||
// ignore?
|
||||
if wholeTag == "-" {
|
||||
return []string{}
|
||||
}
|
||||
// rename?
|
||||
var fieldNames []string
|
||||
if tagProvidedFieldName == "" {
|
||||
fieldNames = []string{originalFieldName}
|
||||
} else {
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
return fieldNames
|
||||
}
|
112
vendor/github.com/json-iterator/go/reflect_json_number.go
generated
vendored
Normal file
112
vendor/github.com/json-iterator/go/reflect_json_number.go
generated
vendored
Normal file
|
@ -0,0 +1,112 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type Number string
|
||||
|
||||
// String returns the literal text of the number.
|
||||
func (n Number) String() string { return string(n) }
|
||||
|
||||
// Float64 returns the number as a float64.
|
||||
func (n Number) Float64() (float64, error) {
|
||||
return strconv.ParseFloat(string(n), 64)
|
||||
}
|
||||
|
||||
// Int64 returns the number as an int64.
|
||||
func (n Number) Int64() (int64, error) {
|
||||
return strconv.ParseInt(string(n), 10, 64)
|
||||
}
|
||||
|
||||
func CastJsonNumber(val interface{}) (string, bool) {
|
||||
switch typedVal := val.(type) {
|
||||
case json.Number:
|
||||
return string(typedVal), true
|
||||
case Number:
|
||||
return string(typedVal), true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
|
||||
var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
|
||||
|
||||
func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ.AssignableTo(jsonNumberType) {
|
||||
return &jsonNumberCodec{}
|
||||
}
|
||||
if typ.AssignableTo(jsoniterNumberType) {
|
||||
return &jsoniterNumberCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ.AssignableTo(jsonNumberType) {
|
||||
return &jsonNumberCodec{}
|
||||
}
|
||||
if typ.AssignableTo(jsoniterNumberType) {
|
||||
return &jsoniterNumberCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonNumberCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*((*json.Number)(ptr)) = ""
|
||||
default:
|
||||
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
number := *((*json.Number)(ptr))
|
||||
if len(number) == 0 {
|
||||
stream.writeByte('0')
|
||||
} else {
|
||||
stream.WriteRaw(string(number))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.Number)(ptr))) == 0
|
||||
}
|
||||
|
||||
type jsoniterNumberCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
*((*Number)(ptr)) = Number(iter.ReadString())
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*((*Number)(ptr)) = ""
|
||||
default:
|
||||
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
number := *((*Number)(ptr))
|
||||
if len(number) == 0 {
|
||||
stream.writeByte('0')
|
||||
} else {
|
||||
stream.WriteRaw(string(number))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*Number)(ptr))) == 0
|
||||
}
|
60
vendor/github.com/json-iterator/go/reflect_json_raw_message.go
generated
vendored
Normal file
60
vendor/github.com/json-iterator/go/reflect_json_raw_message.go
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
|
||||
var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
|
||||
|
||||
func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == jsonRawMessageType {
|
||||
return &jsonRawMessageCodec{}
|
||||
}
|
||||
if typ == jsoniterRawMessageType {
|
||||
return &jsoniterRawMessageCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ == jsonRawMessageType {
|
||||
return &jsonRawMessageCodec{}
|
||||
}
|
||||
if typ == jsoniterRawMessageType {
|
||||
return &jsoniterRawMessageCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.RawMessage)(ptr))) == 0
|
||||
}
|
||||
|
||||
type jsoniterRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*RawMessage)(ptr))) == 0
|
||||
}
|
346
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
Normal file
346
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
Normal file
|
@ -0,0 +1,346 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
mapType := typ.(*reflect2.UnsafeMapType)
|
||||
keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
|
||||
elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
|
||||
return &mapDecoder{
|
||||
mapType: mapType,
|
||||
keyType: mapType.Key(),
|
||||
elemType: mapType.Elem(),
|
||||
keyDecoder: keyDecoder,
|
||||
elemDecoder: elemDecoder,
|
||||
}
|
||||
}
|
||||
|
||||
func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
mapType := typ.(*reflect2.UnsafeMapType)
|
||||
if ctx.sortMapKeys {
|
||||
return &sortKeysMapEncoder{
|
||||
mapType: mapType,
|
||||
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
|
||||
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
|
||||
}
|
||||
}
|
||||
return &mapEncoder{
|
||||
mapType: mapType,
|
||||
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
|
||||
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
case reflect.Bool,
|
||||
reflect.Uint8, reflect.Int8,
|
||||
reflect.Uint16, reflect.Int16,
|
||||
reflect.Uint32, reflect.Int32,
|
||||
reflect.Uint64, reflect.Int64,
|
||||
reflect.Uint, reflect.Int,
|
||||
reflect.Float32, reflect.Float64,
|
||||
reflect.Uintptr:
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||
default:
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
case reflect.Bool,
|
||||
reflect.Uint8, reflect.Int8,
|
||||
reflect.Uint16, reflect.Int16,
|
||||
reflect.Uint32, reflect.Int32,
|
||||
reflect.Uint64, reflect.Int64,
|
||||
reflect.Uint, reflect.Int,
|
||||
reflect.Float32, reflect.Float64,
|
||||
reflect.Uintptr:
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
|
||||
default:
|
||||
if typ.Kind() == reflect.Interface {
|
||||
return &dynamicMapKeyEncoder{ctx, typ}
|
||||
}
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
type mapDecoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyType reflect2.Type
|
||||
elemType reflect2.Type
|
||||
keyDecoder ValDecoder
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
mapType := decoder.mapType
|
||||
c := iter.nextToken()
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
*(*unsafe.Pointer)(ptr) = nil
|
||||
mapType.UnsafeSet(ptr, mapType.UnsafeNew())
|
||||
return
|
||||
}
|
||||
if mapType.UnsafeIsNil(ptr) {
|
||||
mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
|
||||
}
|
||||
if c != '{' {
|
||||
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '}' {
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
elem := decoder.elemType.UnsafeNew()
|
||||
decoder.elemDecoder.Decode(elem, iter)
|
||||
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
elem := decoder.elemType.UnsafeNew()
|
||||
decoder.elemDecoder.Decode(elem, iter)
|
||||
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
}
|
||||
|
||||
type numericMapKeyDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
decoder.decoder.Decode(ptr, iter)
|
||||
c = iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
type numericMapKeyEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.writeByte('"')
|
||||
encoder.encoder.Encode(ptr, stream)
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type dynamicMapKeyEncoder struct {
|
||||
ctx *ctx
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
|
||||
}
|
||||
|
||||
type mapEncoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyEncoder ValEncoder
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
key, elem := iter.UnsafeNext()
|
||||
encoder.keyEncoder.Encode(key, stream)
|
||||
if stream.indention > 0 {
|
||||
stream.writeTwoBytes(byte(':'), byte(' '))
|
||||
} else {
|
||||
stream.writeByte(':')
|
||||
}
|
||||
encoder.elemEncoder.Encode(elem, stream)
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
return !iter.HasNext()
|
||||
}
|
||||
|
||||
type sortKeysMapEncoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyEncoder ValEncoder
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
subStream.writeTwoBytes(byte(':'), byte(' '))
|
||||
} else {
|
||||
subStream.writeByte(':')
|
||||
}
|
||||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
for i, keyValue := range keyValues {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
}
|
||||
|
||||
func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
return !iter.HasNext()
|
||||
}
|
||||
|
||||
type encodedKeyValues []encodedKV
|
||||
|
||||
type encodedKV struct {
|
||||
key string
|
||||
keyValue []byte
|
||||
}
|
||||
|
||||
func (sv encodedKeyValues) Len() int { return len(sv) }
|
||||
func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
|
||||
func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }
|
225
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
Normal file
225
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
Normal file
|
@ -0,0 +1,225 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
|
||||
var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
|
||||
var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
|
||||
func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{ptrType},
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{ptrType},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == marshalerType {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &directMarshalerEncoder{
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
if typ.Implements(marshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &marshalerEncoder{
|
||||
valType: typ,
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ctx.prefix != "" && ptrType.Implements(marshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
|
||||
var encoder ValEncoder = &marshalerEncoder{
|
||||
valType: ptrType,
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return &referenceEncoder{encoder}
|
||||
}
|
||||
if typ == textMarshalerType {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &directTextMarshalerEncoder{
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
// if prefix is empty, the type is the root type
|
||||
if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
|
||||
var encoder ValEncoder = &textMarshalerEncoder{
|
||||
valType: ptrType,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return &referenceEncoder{encoder}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type marshalerEncoder struct {
|
||||
checkIsEmpty checkIsEmpty
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
marshaler := obj.(json.Marshaler)
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type directMarshalerEncoder struct {
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
marshaler := *(*json.Marshaler)(ptr)
|
||||
if marshaler == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type textMarshalerEncoder struct {
|
||||
valType reflect2.Type
|
||||
stringEncoder ValEncoder
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
marshaler := (obj).(encoding.TextMarshaler)
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
str := string(bytes)
|
||||
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type directTextMarshalerEncoder struct {
|
||||
stringEncoder ValEncoder
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
marshaler := *(*encoding.TextMarshaler)(ptr)
|
||||
if marshaler == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
str := string(bytes)
|
||||
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type unmarshalerDecoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
valType := decoder.valType
|
||||
obj := valType.UnsafeIndirect(ptr)
|
||||
unmarshaler := obj.(json.Unmarshaler)
|
||||
iter.nextToken()
|
||||
iter.unreadByte() // skip spaces
|
||||
bytes := iter.SkipAndReturnBytes()
|
||||
err := unmarshaler.UnmarshalJSON(bytes)
|
||||
if err != nil {
|
||||
iter.ReportError("unmarshalerDecoder", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
type textUnmarshalerDecoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
valType := decoder.valType
|
||||
obj := valType.UnsafeIndirect(ptr)
|
||||
if reflect2.IsNil(obj) {
|
||||
ptrType := valType.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
elem := elemType.UnsafeNew()
|
||||
ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
|
||||
obj = valType.UnsafeIndirect(ptr)
|
||||
}
|
||||
unmarshaler := (obj).(encoding.TextUnmarshaler)
|
||||
str := iter.ReadString()
|
||||
err := unmarshaler.UnmarshalText([]byte(str))
|
||||
if err != nil {
|
||||
iter.ReportError("textUnmarshalerDecoder", err.Error())
|
||||
}
|
||||
}
|
453
vendor/github.com/json-iterator/go/reflect_native.go
generated
vendored
Normal file
453
vendor/github.com/json-iterator/go/reflect_native.go
generated
vendored
Normal file
|
@ -0,0 +1,453 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
|
||||
|
||||
func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
|
||||
sliceDecoder := decoderOfSlice(ctx, typ)
|
||||
return &base64Codec{sliceDecoder: sliceDecoder}
|
||||
}
|
||||
typeName := typ.String()
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.String:
|
||||
if typeName != "string" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
|
||||
}
|
||||
return &stringCodec{}
|
||||
case reflect.Int:
|
||||
if typeName != "int" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
}
|
||||
return &int8Codec{}
|
||||
case reflect.Int16:
|
||||
if typeName != "int16" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
|
||||
}
|
||||
return &int16Codec{}
|
||||
case reflect.Int32:
|
||||
if typeName != "int32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
|
||||
}
|
||||
return &int32Codec{}
|
||||
case reflect.Int64:
|
||||
if typeName != "int64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Uint:
|
||||
if typeName != "uint" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
}
|
||||
return &uint8Codec{}
|
||||
case reflect.Uint16:
|
||||
if typeName != "uint16" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
|
||||
}
|
||||
return &uint16Codec{}
|
||||
case reflect.Uint32:
|
||||
if typeName != "uint32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
|
||||
}
|
||||
return &uint32Codec{}
|
||||
case reflect.Uintptr:
|
||||
if typeName != "uintptr" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Float32:
|
||||
if typeName != "float32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
|
||||
}
|
||||
return &float32Codec{}
|
||||
case reflect.Float64:
|
||||
if typeName != "float64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
|
||||
}
|
||||
return &float64Codec{}
|
||||
case reflect.Bool:
|
||||
if typeName != "bool" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
|
||||
}
|
||||
return &boolCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
|
||||
sliceDecoder := decoderOfSlice(ctx, typ)
|
||||
return &base64Codec{sliceDecoder: sliceDecoder}
|
||||
}
|
||||
typeName := typ.String()
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
if typeName != "string" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
|
||||
}
|
||||
return &stringCodec{}
|
||||
case reflect.Int:
|
||||
if typeName != "int" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
}
|
||||
return &int8Codec{}
|
||||
case reflect.Int16:
|
||||
if typeName != "int16" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
|
||||
}
|
||||
return &int16Codec{}
|
||||
case reflect.Int32:
|
||||
if typeName != "int32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
|
||||
}
|
||||
return &int32Codec{}
|
||||
case reflect.Int64:
|
||||
if typeName != "int64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Uint:
|
||||
if typeName != "uint" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
}
|
||||
return &uint8Codec{}
|
||||
case reflect.Uint16:
|
||||
if typeName != "uint16" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
|
||||
}
|
||||
return &uint16Codec{}
|
||||
case reflect.Uint32:
|
||||
if typeName != "uint32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
|
||||
}
|
||||
return &uint32Codec{}
|
||||
case reflect.Uintptr:
|
||||
if typeName != "uintptr" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Float32:
|
||||
if typeName != "float32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
|
||||
}
|
||||
return &float32Codec{}
|
||||
case reflect.Float64:
|
||||
if typeName != "float64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
|
||||
}
|
||||
return &float64Codec{}
|
||||
case reflect.Bool:
|
||||
if typeName != "bool" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
|
||||
}
|
||||
return &boolCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type stringCodec struct {
|
||||
}
|
||||
|
||||
func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*string)(ptr)) = iter.ReadString()
|
||||
}
|
||||
|
||||
func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
str := *((*string)(ptr))
|
||||
stream.WriteString(str)
|
||||
}
|
||||
|
||||
func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
type int8Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int8)(ptr)) = iter.ReadInt8()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt8(*((*int8)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int8)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int16Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int16)(ptr)) = iter.ReadInt16()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt16(*((*int16)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int16)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int32)(ptr)) = iter.ReadInt32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt32(*((*int32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int64)(ptr)) = iter.ReadInt64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt64(*((*int64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint8Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint8)(ptr)) = iter.ReadUint8()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint8(*((*uint8)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint8)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint16Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint16)(ptr)) = iter.ReadUint16()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint16(*((*uint16)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint16)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint32)(ptr)) = iter.ReadUint32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint32(*((*uint32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint64)(ptr)) = iter.ReadUint64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint64(*((*uint64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type float32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*float32)(ptr)) = iter.ReadFloat32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat32(*((*float32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type float64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*float64)(ptr)) = iter.ReadFloat64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat64(*((*float64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type boolCodec struct {
|
||||
}
|
||||
|
||||
func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*bool)(ptr)) = iter.ReadBool()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteBool(*((*bool)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return !(*((*bool)(ptr)))
|
||||
}
|
||||
|
||||
type base64Codec struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
sliceDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
codec.sliceType.UnsafeSetNil(ptr)
|
||||
return
|
||||
}
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
src := iter.ReadString()
|
||||
dst, err := base64.StdEncoding.DecodeString(src)
|
||||
if err != nil {
|
||||
iter.ReportError("decode base64", err.Error())
|
||||
} else {
|
||||
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
|
||||
}
|
||||
case ArrayValue:
|
||||
codec.sliceDecoder.Decode(ptr, iter)
|
||||
default:
|
||||
iter.ReportError("base64Codec", "invalid input")
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
src := *((*[]byte)(ptr))
|
||||
encoding := base64.StdEncoding
|
||||
stream.writeByte('"')
|
||||
if len(src) != 0 {
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*[]byte)(ptr))) == 0
|
||||
}
|
129
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
Normal file
129
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
Normal file
|
@ -0,0 +1,129 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
decoder := decoderOfType(ctx, elemType)
|
||||
return &OptionalDecoder{elemType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
elemEncoder := encoderOfType(ctx, elemType)
|
||||
encoder := &OptionalEncoder{elemEncoder}
|
||||
return encoder
|
||||
}
|
||||
|
||||
type OptionalDecoder struct {
|
||||
ValueType reflect2.Type
|
||||
ValueDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
*((*unsafe.Pointer)(ptr)) = nil
|
||||
} else {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
//pointer to null, we have to allocate memory to hold the value
|
||||
newPtr := decoder.ValueType.UnsafeNew()
|
||||
decoder.ValueDecoder.Decode(newPtr, iter)
|
||||
*((*unsafe.Pointer)(ptr)) = newPtr
|
||||
} else {
|
||||
//reuse existing instance
|
||||
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type dereferenceDecoder struct {
|
||||
// only to deference a pointer
|
||||
valueType reflect2.Type
|
||||
valueDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
//pointer to null, we have to allocate memory to hold the value
|
||||
newPtr := decoder.valueType.UnsafeNew()
|
||||
decoder.valueDecoder.Decode(newPtr, iter)
|
||||
*((*unsafe.Pointer)(ptr)) = newPtr
|
||||
} else {
|
||||
//reuse existing instance
|
||||
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
|
||||
}
|
||||
}
|
||||
|
||||
type OptionalEncoder struct {
|
||||
ValueEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*unsafe.Pointer)(ptr)) == nil
|
||||
}
|
||||
|
||||
type dereferenceEncoder struct {
|
||||
ValueEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
dePtr := *((*unsafe.Pointer)(ptr))
|
||||
if dePtr == nil {
|
||||
return true
|
||||
}
|
||||
return encoder.ValueEncoder.IsEmpty(dePtr)
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
|
||||
deReferenced := *((*unsafe.Pointer)(ptr))
|
||||
if deReferenced == nil {
|
||||
return true
|
||||
}
|
||||
isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
|
||||
if !converted {
|
||||
return false
|
||||
}
|
||||
fieldPtr := unsafe.Pointer(deReferenced)
|
||||
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
|
||||
}
|
||||
|
||||
type referenceEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
|
||||
}
|
||||
|
||||
func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
|
||||
}
|
||||
|
||||
type referenceDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
|
||||
}
|
99
vendor/github.com/json-iterator/go/reflect_slice.go
generated
vendored
Normal file
99
vendor/github.com/json-iterator/go/reflect_slice.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
sliceType := typ.(*reflect2.UnsafeSliceType)
|
||||
decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
|
||||
return &sliceDecoder{sliceType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
sliceType := typ.(*reflect2.UnsafeSliceType)
|
||||
encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
|
||||
return &sliceEncoder{sliceType, encoder}
|
||||
}
|
||||
|
||||
type sliceEncoder struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if encoder.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
length := encoder.sliceType.UnsafeLengthOf(ptr)
|
||||
if length == 0 {
|
||||
stream.WriteEmptyArray()
|
||||
return
|
||||
}
|
||||
stream.WriteArrayStart()
|
||||
encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
|
||||
for i := 1; i < length; i++ {
|
||||
stream.WriteMore()
|
||||
elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.sliceType.UnsafeLengthOf(ptr) == 0
|
||||
}
|
||||
|
||||
type sliceDecoder struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.doDecode(ptr, iter)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
sliceType := decoder.sliceType
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
sliceType.UnsafeSetNil(ptr)
|
||||
return
|
||||
}
|
||||
if c != '[' {
|
||||
iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == ']' {
|
||||
sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
sliceType.UnsafeGrow(ptr, 1)
|
||||
elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
length := 1
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
idx := length
|
||||
length += 1
|
||||
sliceType.UnsafeGrow(ptr, length)
|
||||
elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
1092
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
Normal file
1092
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
211
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
Normal file
211
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
Normal file
|
@ -0,0 +1,211 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
type bindingTo struct {
|
||||
binding *Binding
|
||||
toName string
|
||||
ignored bool
|
||||
}
|
||||
orderedBindings := []*bindingTo{}
|
||||
structDescriptor := describeStruct(ctx, typ)
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
for _, toName := range binding.ToNames {
|
||||
new := &bindingTo{
|
||||
binding: binding,
|
||||
toName: toName,
|
||||
}
|
||||
for _, old := range orderedBindings {
|
||||
if old.toName != toName {
|
||||
continue
|
||||
}
|
||||
old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
|
||||
}
|
||||
orderedBindings = append(orderedBindings, new)
|
||||
}
|
||||
}
|
||||
if len(orderedBindings) == 0 {
|
||||
return &emptyStructEncoder{}
|
||||
}
|
||||
finalOrderedFields := []structFieldTo{}
|
||||
for _, bindingTo := range orderedBindings {
|
||||
if !bindingTo.ignored {
|
||||
finalOrderedFields = append(finalOrderedFields, structFieldTo{
|
||||
encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
|
||||
toName: bindingTo.toName,
|
||||
})
|
||||
}
|
||||
}
|
||||
return &structEncoder{typ, finalOrderedFields}
|
||||
}
|
||||
|
||||
func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
|
||||
encoder := createEncoderOfNative(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.Interface:
|
||||
return &dynamicEncoder{typ}
|
||||
case reflect.Struct:
|
||||
return &structEncoder{typ: typ}
|
||||
case reflect.Array:
|
||||
return &arrayEncoder{}
|
||||
case reflect.Slice:
|
||||
return &sliceEncoder{}
|
||||
case reflect.Map:
|
||||
return encoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return &OptionalEncoder{}
|
||||
default:
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
|
||||
newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
|
||||
oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
|
||||
if newTagged {
|
||||
if oldTagged {
|
||||
if len(old.levels) > len(new.levels) {
|
||||
return true, false
|
||||
} else if len(new.levels) > len(old.levels) {
|
||||
return false, true
|
||||
} else {
|
||||
return true, true
|
||||
}
|
||||
} else {
|
||||
return true, false
|
||||
}
|
||||
} else {
|
||||
if oldTagged {
|
||||
return true, false
|
||||
}
|
||||
if len(old.levels) > len(new.levels) {
|
||||
return true, false
|
||||
} else if len(new.levels) > len(old.levels) {
|
||||
return false, true
|
||||
} else {
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type structFieldEncoder struct {
|
||||
field reflect2.StructField
|
||||
fieldEncoder ValEncoder
|
||||
omitempty bool
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
encoder.fieldEncoder.Encode(fieldPtr, stream)
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
return encoder.fieldEncoder.IsEmpty(fieldPtr)
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
|
||||
isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
|
||||
if !converted {
|
||||
return false
|
||||
}
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
|
||||
}
|
||||
|
||||
type IsEmbeddedPtrNil interface {
|
||||
IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
type structEncoder struct {
|
||||
typ reflect2.Type
|
||||
fields []structFieldTo
|
||||
}
|
||||
|
||||
type structFieldTo struct {
|
||||
encoder *structFieldEncoder
|
||||
toName string
|
||||
}
|
||||
|
||||
func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteObjectStart()
|
||||
isNotFirst := false
|
||||
for _, field := range encoder.fields {
|
||||
if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
|
||||
continue
|
||||
}
|
||||
if field.encoder.IsEmbeddedPtrNil(ptr) {
|
||||
continue
|
||||
}
|
||||
if isNotFirst {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField(field.toName)
|
||||
field.encoder.Encode(ptr, stream)
|
||||
isNotFirst = true
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type emptyStructEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteEmptyObject()
|
||||
}
|
||||
|
||||
func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type stringModeNumberEncoder struct {
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.writeByte('"')
|
||||
encoder.elemEncoder.Encode(ptr, stream)
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.elemEncoder.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type stringModeStringEncoder struct {
|
||||
elemEncoder ValEncoder
|
||||
cfg *frozenConfig
|
||||
}
|
||||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
}
|
||||
|
||||
func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.elemEncoder.IsEmpty(ptr)
|
||||
}
|
210
vendor/github.com/json-iterator/go/stream.go
generated
vendored
Normal file
210
vendor/github.com/json-iterator/go/stream.go
generated
vendored
Normal file
|
@ -0,0 +1,210 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// stream is a io.Writer like object, with JSON specific write functions.
|
||||
// Error is not returned as return value, but stored as Error member on this stream instance.
|
||||
type Stream struct {
|
||||
cfg *frozenConfig
|
||||
out io.Writer
|
||||
buf []byte
|
||||
Error error
|
||||
indention int
|
||||
Attachment interface{} // open for customized encoder
|
||||
}
|
||||
|
||||
// NewStream create new stream instance.
|
||||
// cfg can be jsoniter.ConfigDefault.
|
||||
// out can be nil if write to internal buffer.
|
||||
// bufSize is the initial size for the internal buffer in bytes.
|
||||
func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
|
||||
return &Stream{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
out: out,
|
||||
buf: make([]byte, 0, bufSize),
|
||||
Error: nil,
|
||||
indention: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Pool returns a pool can provide more stream with same configuration
|
||||
func (stream *Stream) Pool() StreamPool {
|
||||
return stream.cfg
|
||||
}
|
||||
|
||||
// Reset reuse this stream instance by assign a new writer
|
||||
func (stream *Stream) Reset(out io.Writer) {
|
||||
stream.out = out
|
||||
stream.buf = stream.buf[:0]
|
||||
}
|
||||
|
||||
// Available returns how many bytes are unused in the buffer.
|
||||
func (stream *Stream) Available() int {
|
||||
return cap(stream.buf) - len(stream.buf)
|
||||
}
|
||||
|
||||
// Buffered returns the number of bytes that have been written into the current buffer.
|
||||
func (stream *Stream) Buffered() int {
|
||||
return len(stream.buf)
|
||||
}
|
||||
|
||||
// Buffer if writer is nil, use this method to take the result
|
||||
func (stream *Stream) Buffer() []byte {
|
||||
return stream.buf
|
||||
}
|
||||
|
||||
// SetBuffer allows to append to the internal buffer directly
|
||||
func (stream *Stream) SetBuffer(buf []byte) {
|
||||
stream.buf = buf
|
||||
}
|
||||
|
||||
// Write writes the contents of p into the buffer.
|
||||
// It returns the number of bytes written.
|
||||
// If nn < len(p), it also returns an error explaining
|
||||
// why the write is short.
|
||||
func (stream *Stream) Write(p []byte) (nn int, err error) {
|
||||
stream.buf = append(stream.buf, p...)
|
||||
if stream.out != nil {
|
||||
nn, err = stream.out.Write(stream.buf)
|
||||
stream.buf = stream.buf[nn:]
|
||||
return
|
||||
}
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// WriteByte writes a single byte.
|
||||
func (stream *Stream) writeByte(c byte) {
|
||||
stream.buf = append(stream.buf, c)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3, c4)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
|
||||
}
|
||||
|
||||
// Flush writes any buffered data to the underlying io.Writer.
|
||||
func (stream *Stream) Flush() error {
|
||||
if stream.out == nil {
|
||||
return nil
|
||||
}
|
||||
if stream.Error != nil {
|
||||
return stream.Error
|
||||
}
|
||||
_, err := stream.out.Write(stream.buf)
|
||||
if err != nil {
|
||||
if stream.Error == nil {
|
||||
stream.Error = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
stream.buf = stream.buf[:0]
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteRaw write string out without quotes, just like []byte
|
||||
func (stream *Stream) WriteRaw(s string) {
|
||||
stream.buf = append(stream.buf, s...)
|
||||
}
|
||||
|
||||
// WriteNil write null to stream
|
||||
func (stream *Stream) WriteNil() {
|
||||
stream.writeFourBytes('n', 'u', 'l', 'l')
|
||||
}
|
||||
|
||||
// WriteTrue write true to stream
|
||||
func (stream *Stream) WriteTrue() {
|
||||
stream.writeFourBytes('t', 'r', 'u', 'e')
|
||||
}
|
||||
|
||||
// WriteFalse write false to stream
|
||||
func (stream *Stream) WriteFalse() {
|
||||
stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
|
||||
}
|
||||
|
||||
// WriteBool write true or false into stream
|
||||
func (stream *Stream) WriteBool(val bool) {
|
||||
if val {
|
||||
stream.WriteTrue()
|
||||
} else {
|
||||
stream.WriteFalse()
|
||||
}
|
||||
}
|
||||
|
||||
// WriteObjectStart write { with possible indention
|
||||
func (stream *Stream) WriteObjectStart() {
|
||||
stream.indention += stream.cfg.indentionStep
|
||||
stream.writeByte('{')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteObjectField write "field": with possible indention
|
||||
func (stream *Stream) WriteObjectField(field string) {
|
||||
stream.WriteString(field)
|
||||
if stream.indention > 0 {
|
||||
stream.writeTwoBytes(':', ' ')
|
||||
} else {
|
||||
stream.writeByte(':')
|
||||
}
|
||||
}
|
||||
|
||||
// WriteObjectEnd write } with possible indention
|
||||
func (stream *Stream) WriteObjectEnd() {
|
||||
stream.writeIndention(stream.cfg.indentionStep)
|
||||
stream.indention -= stream.cfg.indentionStep
|
||||
stream.writeByte('}')
|
||||
}
|
||||
|
||||
// WriteEmptyObject write {}
|
||||
func (stream *Stream) WriteEmptyObject() {
|
||||
stream.writeByte('{')
|
||||
stream.writeByte('}')
|
||||
}
|
||||
|
||||
// WriteMore write , with possible indention
|
||||
func (stream *Stream) WriteMore() {
|
||||
stream.writeByte(',')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteArrayStart write [ with possible indention
|
||||
func (stream *Stream) WriteArrayStart() {
|
||||
stream.indention += stream.cfg.indentionStep
|
||||
stream.writeByte('[')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteEmptyArray write []
|
||||
func (stream *Stream) WriteEmptyArray() {
|
||||
stream.writeTwoBytes('[', ']')
|
||||
}
|
||||
|
||||
// WriteArrayEnd write ] with possible indention
|
||||
func (stream *Stream) WriteArrayEnd() {
|
||||
stream.writeIndention(stream.cfg.indentionStep)
|
||||
stream.indention -= stream.cfg.indentionStep
|
||||
stream.writeByte(']')
|
||||
}
|
||||
|
||||
func (stream *Stream) writeIndention(delta int) {
|
||||
if stream.indention == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('\n')
|
||||
toWrite := stream.indention - delta
|
||||
for i := 0; i < toWrite; i++ {
|
||||
stream.buf = append(stream.buf, ' ')
|
||||
}
|
||||
}
|
111
vendor/github.com/json-iterator/go/stream_float.go
generated
vendored
Normal file
111
vendor/github.com/json-iterator/go/stream_float.go
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var pow10 []uint64
|
||||
|
||||
func init() {
|
||||
pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
|
||||
}
|
||||
|
||||
// WriteFloat32 write float32 to stream
|
||||
func (stream *Stream) WriteFloat32(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(float64(val))
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
|
||||
}
|
||||
|
||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
}
|
||||
if val > 0x4ffffff {
|
||||
stream.WriteFloat32(val)
|
||||
return
|
||||
}
|
||||
precision := 6
|
||||
exp := uint64(1000000) // 6
|
||||
lval := uint64(float64(val)*float64(exp) + 0.5)
|
||||
stream.WriteUint64(lval / exp)
|
||||
fval := lval % exp
|
||||
if fval == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('.')
|
||||
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
|
||||
stream.writeByte('0')
|
||||
}
|
||||
stream.WriteUint64(fval)
|
||||
for stream.buf[len(stream.buf)-1] == '0' {
|
||||
stream.buf = stream.buf[:len(stream.buf)-1]
|
||||
}
|
||||
}
|
||||
|
||||
// WriteFloat64 write float64 to stream
|
||||
func (stream *Stream) WriteFloat64(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(val)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
if abs < 1e-6 || abs >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
|
||||
}
|
||||
|
||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
}
|
||||
if val > 0x4ffffff {
|
||||
stream.WriteFloat64(val)
|
||||
return
|
||||
}
|
||||
precision := 6
|
||||
exp := uint64(1000000) // 6
|
||||
lval := uint64(val*float64(exp) + 0.5)
|
||||
stream.WriteUint64(lval / exp)
|
||||
fval := lval % exp
|
||||
if fval == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('.')
|
||||
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
|
||||
stream.writeByte('0')
|
||||
}
|
||||
stream.WriteUint64(fval)
|
||||
for stream.buf[len(stream.buf)-1] == '0' {
|
||||
stream.buf = stream.buf[:len(stream.buf)-1]
|
||||
}
|
||||
}
|
190
vendor/github.com/json-iterator/go/stream_int.go
generated
vendored
Normal file
190
vendor/github.com/json-iterator/go/stream_int.go
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
|||
package jsoniter
|
||||
|
||||
var digits []uint32
|
||||
|
||||
func init() {
|
||||
digits = make([]uint32, 1000)
|
||||
for i := uint32(0); i < 1000; i++ {
|
||||
digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
|
||||
if i < 10 {
|
||||
digits[i] += 2 << 24
|
||||
} else if i < 100 {
|
||||
digits[i] += 1 << 24
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func writeFirstBuf(space []byte, v uint32) []byte {
|
||||
start := v >> 24
|
||||
if start == 0 {
|
||||
space = append(space, byte(v>>16), byte(v>>8))
|
||||
} else if start == 1 {
|
||||
space = append(space, byte(v>>8))
|
||||
}
|
||||
space = append(space, byte(v))
|
||||
return space
|
||||
}
|
||||
|
||||
func writeBuf(buf []byte, v uint32) []byte {
|
||||
return append(buf, byte(v>>16), byte(v>>8), byte(v))
|
||||
}
|
||||
|
||||
// WriteUint8 write uint8 to stream
|
||||
func (stream *Stream) WriteUint8(val uint8) {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
}
|
||||
|
||||
// WriteInt8 write int8 to stream
|
||||
func (stream *Stream) WriteInt8(nval int8) {
|
||||
var val uint8
|
||||
if nval < 0 {
|
||||
val = uint8(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint8(nval)
|
||||
}
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
}
|
||||
|
||||
// WriteUint16 write uint16 to stream
|
||||
func (stream *Stream) WriteUint16(val uint16) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
|
||||
// WriteInt16 write int16 to stream
|
||||
func (stream *Stream) WriteInt16(nval int16) {
|
||||
var val uint16
|
||||
if nval < 0 {
|
||||
val = uint16(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint16(nval)
|
||||
}
|
||||
stream.WriteUint16(val)
|
||||
}
|
||||
|
||||
// WriteUint32 write uint32 to stream
|
||||
func (stream *Stream) WriteUint32(val uint32) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
q2 := q1 / 1000
|
||||
if q2 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r2 := q1 - q2*1000
|
||||
q3 := q2 / 1000
|
||||
if q3 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||
} else {
|
||||
r3 := q2 - q3*1000
|
||||
stream.buf = append(stream.buf, byte(q3+'0'))
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
}
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
}
|
||||
|
||||
// WriteInt32 write int32 to stream
|
||||
func (stream *Stream) WriteInt32(nval int32) {
|
||||
var val uint32
|
||||
if nval < 0 {
|
||||
val = uint32(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint32(nval)
|
||||
}
|
||||
stream.WriteUint32(val)
|
||||
}
|
||||
|
||||
// WriteUint64 write uint64 to stream
|
||||
func (stream *Stream) WriteUint64(val uint64) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
q2 := q1 / 1000
|
||||
if q2 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r2 := q1 - q2*1000
|
||||
q3 := q2 / 1000
|
||||
if q3 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r3 := q2 - q3*1000
|
||||
q4 := q3 / 1000
|
||||
if q4 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r4 := q3 - q4*1000
|
||||
q5 := q4 / 1000
|
||||
if q5 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r5 := q4 - q5*1000
|
||||
q6 := q5 / 1000
|
||||
if q6 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q5])
|
||||
} else {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q6])
|
||||
r6 := q5 - q6*1000
|
||||
stream.buf = writeBuf(stream.buf, digits[r6])
|
||||
}
|
||||
stream.buf = writeBuf(stream.buf, digits[r5])
|
||||
stream.buf = writeBuf(stream.buf, digits[r4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
}
|
||||
|
||||
// WriteInt64 write int64 to stream
|
||||
func (stream *Stream) WriteInt64(nval int64) {
|
||||
var val uint64
|
||||
if nval < 0 {
|
||||
val = uint64(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint64(nval)
|
||||
}
|
||||
stream.WriteUint64(val)
|
||||
}
|
||||
|
||||
// WriteInt write int to stream
|
||||
func (stream *Stream) WriteInt(val int) {
|
||||
stream.WriteInt64(int64(val))
|
||||
}
|
||||
|
||||
// WriteUint write uint to stream
|
||||
func (stream *Stream) WriteUint(val uint) {
|
||||
stream.WriteUint64(uint64(val))
|
||||
}
|
372
vendor/github.com/json-iterator/go/stream_str.go
generated
vendored
Normal file
372
vendor/github.com/json-iterator/go/stream_str.go
generated
vendored
Normal file
|
@ -0,0 +1,372 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// htmlSafeSet holds the value true if the ASCII character with the given
|
||||
// array position can be safely represented inside a JSON string, embedded
|
||||
// inside of HTML <script> tags, without any additional escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), the backslash character ("\"), HTML opening and closing
|
||||
// tags ("<" and ">"), and the ampersand ("&").
|
||||
var htmlSafeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': false,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': false,
|
||||
'=': true,
|
||||
'>': false,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
// safeSet holds the value true if the ASCII character with the given array
|
||||
// position can be represented inside a JSON string without any further
|
||||
// escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), and the backslash character ("\").
|
||||
var safeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': true,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': true,
|
||||
'=': true,
|
||||
'>': true,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
var hex = "0123456789abcdef"
|
||||
|
||||
// WriteStringWithHTMLEscaped write string to stream with html special characters escaped
|
||||
func (stream *Stream) WriteStringWithHTMLEscaped(s string) {
|
||||
valLen := len(s)
|
||||
stream.buf = append(stream.buf, '"')
|
||||
// write string, the fast path, without utf8 and escape support
|
||||
i := 0
|
||||
for ; i < valLen; i++ {
|
||||
c := s[i]
|
||||
if c < utf8.RuneSelf && htmlSafeSet[c] {
|
||||
stream.buf = append(stream.buf, c)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == valLen {
|
||||
stream.buf = append(stream.buf, '"')
|
||||
return
|
||||
}
|
||||
writeStringSlowPathWithHTMLEscaped(stream, i, s, valLen)
|
||||
}
|
||||
|
||||
func writeStringSlowPathWithHTMLEscaped(stream *Stream, i int, s string, valLen int) {
|
||||
start := i
|
||||
// for the remaining parts, we process them char by char
|
||||
for i < valLen {
|
||||
if b := s[i]; b < utf8.RuneSelf {
|
||||
if htmlSafeSet[b] {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
switch b {
|
||||
case '\\', '"':
|
||||
stream.writeTwoBytes('\\', b)
|
||||
case '\n':
|
||||
stream.writeTwoBytes('\\', 'n')
|
||||
case '\r':
|
||||
stream.writeTwoBytes('\\', 'r')
|
||||
case '\t':
|
||||
stream.writeTwoBytes('\\', 't')
|
||||
default:
|
||||
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||
// If escapeHTML is set, it also escapes <, >, and &
|
||||
// because they can lead to security holes when
|
||||
// user-controlled strings are rendered into JSON
|
||||
// and served to some browsers.
|
||||
stream.WriteRaw(`\u00`)
|
||||
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
|
||||
}
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
c, size := utf8.DecodeRuneInString(s[i:])
|
||||
if c == utf8.RuneError && size == 1 {
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
stream.WriteRaw(`\ufffd`)
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
// U+2028 is LINE SEPARATOR.
|
||||
// U+2029 is PARAGRAPH SEPARATOR.
|
||||
// They are both technically valid characters in JSON strings,
|
||||
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||
// and can lead to security holes there. It is valid JSON to
|
||||
// escape them, so we do so unconditionally.
|
||||
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||
if c == '\u2028' || c == '\u2029' {
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
stream.WriteRaw(`\u202`)
|
||||
stream.writeByte(hex[c&0xF])
|
||||
i += size
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
i += size
|
||||
}
|
||||
if start < len(s) {
|
||||
stream.WriteRaw(s[start:])
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
// WriteString write string to stream without html escape
|
||||
func (stream *Stream) WriteString(s string) {
|
||||
valLen := len(s)
|
||||
stream.buf = append(stream.buf, '"')
|
||||
// write string, the fast path, without utf8 and escape support
|
||||
i := 0
|
||||
for ; i < valLen; i++ {
|
||||
c := s[i]
|
||||
if c > 31 && c != '"' && c != '\\' {
|
||||
stream.buf = append(stream.buf, c)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == valLen {
|
||||
stream.buf = append(stream.buf, '"')
|
||||
return
|
||||
}
|
||||
writeStringSlowPath(stream, i, s, valLen)
|
||||
}
|
||||
|
||||
func writeStringSlowPath(stream *Stream, i int, s string, valLen int) {
|
||||
start := i
|
||||
// for the remaining parts, we process them char by char
|
||||
for i < valLen {
|
||||
if b := s[i]; b < utf8.RuneSelf {
|
||||
if safeSet[b] {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
switch b {
|
||||
case '\\', '"':
|
||||
stream.writeTwoBytes('\\', b)
|
||||
case '\n':
|
||||
stream.writeTwoBytes('\\', 'n')
|
||||
case '\r':
|
||||
stream.writeTwoBytes('\\', 'r')
|
||||
case '\t':
|
||||
stream.writeTwoBytes('\\', 't')
|
||||
default:
|
||||
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||
// If escapeHTML is set, it also escapes <, >, and &
|
||||
// because they can lead to security holes when
|
||||
// user-controlled strings are rendered into JSON
|
||||
// and served to some browsers.
|
||||
stream.WriteRaw(`\u00`)
|
||||
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
|
||||
}
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < len(s) {
|
||||
stream.WriteRaw(s[start:])
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
201
vendor/github.com/modern-go/concurrent/LICENSE
generated
vendored
Normal file
201
vendor/github.com/modern-go/concurrent/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
49
vendor/github.com/modern-go/concurrent/README.md
generated
vendored
Normal file
49
vendor/github.com/modern-go/concurrent/README.md
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
# concurrent
|
||||
|
||||
[![Sourcegraph](https://sourcegraph.com/github.com/modern-go/concurrent/-/badge.svg)](https://sourcegraph.com/github.com/modern-go/concurrent?badge)
|
||||
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/modern-go/concurrent)
|
||||
[![Build Status](https://travis-ci.org/modern-go/concurrent.svg?branch=master)](https://travis-ci.org/modern-go/concurrent)
|
||||
[![codecov](https://codecov.io/gh/modern-go/concurrent/branch/master/graph/badge.svg)](https://codecov.io/gh/modern-go/concurrent)
|
||||
[![rcard](https://goreportcard.com/badge/github.com/modern-go/concurrent)](https://goreportcard.com/report/github.com/modern-go/concurrent)
|
||||
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://raw.githubusercontent.com/modern-go/concurrent/master/LICENSE)
|
||||
|
||||
* concurrent.Map: backport sync.Map for go below 1.9
|
||||
* concurrent.Executor: goroutine with explicit ownership and cancellable
|
||||
|
||||
# concurrent.Map
|
||||
|
||||
because sync.Map is only available in go 1.9, we can use concurrent.Map to make code portable
|
||||
|
||||
```go
|
||||
m := concurrent.NewMap()
|
||||
m.Store("hello", "world")
|
||||
elem, found := m.Load("hello")
|
||||
// elem will be "world"
|
||||
// found will be true
|
||||
```
|
||||
|
||||
# concurrent.Executor
|
||||
|
||||
```go
|
||||
executor := concurrent.NewUnboundedExecutor()
|
||||
executor.Go(func(ctx context.Context) {
|
||||
everyMillisecond := time.NewTicker(time.Millisecond)
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
fmt.Println("goroutine exited")
|
||||
return
|
||||
case <-everyMillisecond.C:
|
||||
// do something
|
||||
}
|
||||
}
|
||||
})
|
||||
time.Sleep(time.Second)
|
||||
executor.StopAndWaitForever()
|
||||
fmt.Println("executor stopped")
|
||||
```
|
||||
|
||||
attach goroutine to executor instance, so that we can
|
||||
|
||||
* cancel it by stop the executor with Stop/StopAndWait/StopAndWaitForever
|
||||
* handle panic by callback: the default behavior will no longer crash your application
|
14
vendor/github.com/modern-go/concurrent/executor.go
generated
vendored
Normal file
14
vendor/github.com/modern-go/concurrent/executor.go
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
package concurrent
|
||||
|
||||
import "context"
|
||||
|
||||
// Executor replace go keyword to start a new goroutine
|
||||
// the goroutine should cancel itself if the context passed in has been cancelled
|
||||
// the goroutine started by the executor, is owned by the executor
|
||||
// we can cancel all executors owned by the executor just by stop the executor itself
|
||||
// however Executor interface does not Stop method, the one starting and owning executor
|
||||
// should use the concrete type of executor, instead of this interface.
|
||||
type Executor interface {
|
||||
// Go starts a new goroutine controlled by the context
|
||||
Go(handler func(ctx context.Context))
|
||||
}
|
15
vendor/github.com/modern-go/concurrent/go_above_19.go
generated
vendored
Normal file
15
vendor/github.com/modern-go/concurrent/go_above_19.go
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
//+build go1.9
|
||||
|
||||
package concurrent
|
||||
|
||||
import "sync"
|
||||
|
||||
// Map is a wrapper for sync.Map introduced in go1.9
|
||||
type Map struct {
|
||||
sync.Map
|
||||
}
|
||||
|
||||
// NewMap creates a thread safe Map
|
||||
func NewMap() *Map {
|
||||
return &Map{}
|
||||
}
|
33
vendor/github.com/modern-go/concurrent/go_below_19.go
generated
vendored
Normal file
33
vendor/github.com/modern-go/concurrent/go_below_19.go
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
//+build !go1.9
|
||||
|
||||
package concurrent
|
||||
|
||||
import "sync"
|
||||
|
||||
// Map implements a thread safe map for go version below 1.9 using mutex
|
||||
type Map struct {
|
||||
lock sync.RWMutex
|
||||
data map[interface{}]interface{}
|
||||
}
|
||||
|
||||
// NewMap creates a thread safe map
|
||||
func NewMap() *Map {
|
||||
return &Map{
|
||||
data: make(map[interface{}]interface{}, 32),
|
||||
}
|
||||
}
|
||||
|
||||
// Load is same as sync.Map Load
|
||||
func (m *Map) Load(key interface{}) (elem interface{}, found bool) {
|
||||
m.lock.RLock()
|
||||
elem, found = m.data[key]
|
||||
m.lock.RUnlock()
|
||||
return
|
||||
}
|
||||
|
||||
// Load is same as sync.Map Store
|
||||
func (m *Map) Store(key interface{}, elem interface{}) {
|
||||
m.lock.Lock()
|
||||
m.data[key] = elem
|
||||
m.lock.Unlock()
|
||||
}
|
13
vendor/github.com/modern-go/concurrent/log.go
generated
vendored
Normal file
13
vendor/github.com/modern-go/concurrent/log.go
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
package concurrent
|
||||
|
||||
import (
|
||||
"os"
|
||||
"log"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// ErrorLogger is used to print out error, can be set to writer other than stderr
|
||||
var ErrorLogger = log.New(os.Stderr, "", 0)
|
||||
|
||||
// InfoLogger is used to print informational message, default to off
|
||||
var InfoLogger = log.New(ioutil.Discard, "", 0)
|
119
vendor/github.com/modern-go/concurrent/unbounded_executor.go
generated
vendored
Normal file
119
vendor/github.com/modern-go/concurrent/unbounded_executor.go
generated
vendored
Normal file
|
@ -0,0 +1,119 @@
|
|||
package concurrent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
"sync"
|
||||
"time"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// HandlePanic logs goroutine panic by default
|
||||
var HandlePanic = func(recovered interface{}, funcName string) {
|
||||
ErrorLogger.Println(fmt.Sprintf("%s panic: %v", funcName, recovered))
|
||||
ErrorLogger.Println(string(debug.Stack()))
|
||||
}
|
||||
|
||||
// UnboundedExecutor is a executor without limits on counts of alive goroutines
|
||||
// it tracks the goroutine started by it, and can cancel them when shutdown
|
||||
type UnboundedExecutor struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
activeGoroutinesMutex *sync.Mutex
|
||||
activeGoroutines map[string]int
|
||||
HandlePanic func(recovered interface{}, funcName string)
|
||||
}
|
||||
|
||||
// GlobalUnboundedExecutor has the life cycle of the program itself
|
||||
// any goroutine want to be shutdown before main exit can be started from this executor
|
||||
// GlobalUnboundedExecutor expects the main function to call stop
|
||||
// it does not magically knows the main function exits
|
||||
var GlobalUnboundedExecutor = NewUnboundedExecutor()
|
||||
|
||||
// NewUnboundedExecutor creates a new UnboundedExecutor,
|
||||
// UnboundedExecutor can not be created by &UnboundedExecutor{}
|
||||
// HandlePanic can be set with a callback to override global HandlePanic
|
||||
func NewUnboundedExecutor() *UnboundedExecutor {
|
||||
ctx, cancel := context.WithCancel(context.TODO())
|
||||
return &UnboundedExecutor{
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
activeGoroutinesMutex: &sync.Mutex{},
|
||||
activeGoroutines: map[string]int{},
|
||||
}
|
||||
}
|
||||
|
||||
// Go starts a new goroutine and tracks its lifecycle.
|
||||
// Panic will be recovered and logged automatically, except for StopSignal
|
||||
func (executor *UnboundedExecutor) Go(handler func(ctx context.Context)) {
|
||||
pc := reflect.ValueOf(handler).Pointer()
|
||||
f := runtime.FuncForPC(pc)
|
||||
funcName := f.Name()
|
||||
file, line := f.FileLine(pc)
|
||||
executor.activeGoroutinesMutex.Lock()
|
||||
defer executor.activeGoroutinesMutex.Unlock()
|
||||
startFrom := fmt.Sprintf("%s:%d", file, line)
|
||||
executor.activeGoroutines[startFrom] += 1
|
||||
go func() {
|
||||
defer func() {
|
||||
recovered := recover()
|
||||
// if you want to quit a goroutine without trigger HandlePanic
|
||||
// use runtime.Goexit() to quit
|
||||
if recovered != nil {
|
||||
if executor.HandlePanic == nil {
|
||||
HandlePanic(recovered, funcName)
|
||||
} else {
|
||||
executor.HandlePanic(recovered, funcName)
|
||||
}
|
||||
}
|
||||
executor.activeGoroutinesMutex.Lock()
|
||||
executor.activeGoroutines[startFrom] -= 1
|
||||
executor.activeGoroutinesMutex.Unlock()
|
||||
}()
|
||||
handler(executor.ctx)
|
||||
}()
|
||||
}
|
||||
|
||||
// Stop cancel all goroutines started by this executor without wait
|
||||
func (executor *UnboundedExecutor) Stop() {
|
||||
executor.cancel()
|
||||
}
|
||||
|
||||
// StopAndWaitForever cancel all goroutines started by this executor and
|
||||
// wait until all goroutines exited
|
||||
func (executor *UnboundedExecutor) StopAndWaitForever() {
|
||||
executor.StopAndWait(context.Background())
|
||||
}
|
||||
|
||||
// StopAndWait cancel all goroutines started by this executor and wait.
|
||||
// Wait can be cancelled by the context passed in.
|
||||
func (executor *UnboundedExecutor) StopAndWait(ctx context.Context) {
|
||||
executor.cancel()
|
||||
for {
|
||||
oneHundredMilliseconds := time.NewTimer(time.Millisecond * 100)
|
||||
select {
|
||||
case <-oneHundredMilliseconds.C:
|
||||
if executor.checkNoActiveGoroutines() {
|
||||
return
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (executor *UnboundedExecutor) checkNoActiveGoroutines() bool {
|
||||
executor.activeGoroutinesMutex.Lock()
|
||||
defer executor.activeGoroutinesMutex.Unlock()
|
||||
for startFrom, count := range executor.activeGoroutines {
|
||||
if count > 0 {
|
||||
InfoLogger.Println("UnboundedExecutor is still waiting goroutines to quit",
|
||||
"startFrom", startFrom,
|
||||
"count", count)
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
201
vendor/github.com/modern-go/reflect2/LICENSE
generated
vendored
Normal file
201
vendor/github.com/modern-go/reflect2/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
71
vendor/github.com/modern-go/reflect2/README.md
generated
vendored
Normal file
71
vendor/github.com/modern-go/reflect2/README.md
generated
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
# reflect2
|
||||
|
||||
[![Sourcegraph](https://sourcegraph.com/github.com/modern-go/reflect2/-/badge.svg)](https://sourcegraph.com/github.com/modern-go/reflect2?badge)
|
||||
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/modern-go/reflect2)
|
||||
[![Build Status](https://travis-ci.org/modern-go/reflect2.svg?branch=master)](https://travis-ci.org/modern-go/reflect2)
|
||||
[![codecov](https://codecov.io/gh/modern-go/reflect2/branch/master/graph/badge.svg)](https://codecov.io/gh/modern-go/reflect2)
|
||||
[![rcard](https://goreportcard.com/badge/github.com/modern-go/reflect2)](https://goreportcard.com/report/github.com/modern-go/reflect2)
|
||||
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://raw.githubusercontent.com/modern-go/reflect2/master/LICENSE)
|
||||
|
||||
reflect api that avoids runtime reflect.Value cost
|
||||
|
||||
* reflect get/set interface{}, with type checking
|
||||
* reflect get/set unsafe.Pointer, without type checking
|
||||
* `reflect2.TypeByName` works like `Class.forName` found in java
|
||||
|
||||
[json-iterator](https://github.com/json-iterator/go) use this package to save runtime dispatching cost.
|
||||
This package is designed for low level libraries to optimize reflection performance.
|
||||
General application should still use reflect standard library.
|
||||
|
||||
# reflect2.TypeByName
|
||||
|
||||
```go
|
||||
// given package is github.com/your/awesome-package
|
||||
type MyStruct struct {
|
||||
// ...
|
||||
}
|
||||
|
||||
// will return the type
|
||||
reflect2.TypeByName("awesome-package.MyStruct")
|
||||
// however, if the type has not been used
|
||||
// it will be eliminated by compiler, so we can not get it in runtime
|
||||
```
|
||||
|
||||
# reflect2 get/set interface{}
|
||||
|
||||
```go
|
||||
valType := reflect2.TypeOf(1)
|
||||
i := 1
|
||||
j := 10
|
||||
valType.Set(&i, &j)
|
||||
// i will be 10
|
||||
```
|
||||
|
||||
to get set `type`, always use its pointer `*type`
|
||||
|
||||
# reflect2 get/set unsafe.Pointer
|
||||
|
||||
```go
|
||||
valType := reflect2.TypeOf(1)
|
||||
i := 1
|
||||
j := 10
|
||||
valType.UnsafeSet(unsafe.Pointer(&i), unsafe.Pointer(&j))
|
||||
// i will be 10
|
||||
```
|
||||
|
||||
to get set `type`, always use its pointer `*type`
|
||||
|
||||
# benchmark
|
||||
|
||||
Benchmark is not necessary for this package. It does nothing actually.
|
||||
As it is just a thin wrapper to make go runtime public.
|
||||
Both `reflect2` and `reflect` call same function
|
||||
provided by `runtime` package exposed by go language.
|
||||
|
||||
# unsafe safety
|
||||
|
||||
Instead of casting `[]byte` to `sliceHeader` in your application using unsafe.
|
||||
We can use reflect2 instead. This way, if `sliceHeader` changes in the future,
|
||||
only reflect2 need to be upgraded.
|
||||
|
||||
reflect2 tries its best to keep the implementation same as reflect (by testing).
|
8
vendor/github.com/modern-go/reflect2/go_above_17.go
generated
vendored
Normal file
8
vendor/github.com/modern-go/reflect2/go_above_17.go
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
//+build go1.7
|
||||
|
||||
package reflect2
|
||||
|
||||
import "unsafe"
|
||||
|
||||
//go:linkname resolveTypeOff reflect.resolveTypeOff
|
||||
func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
|
14
vendor/github.com/modern-go/reflect2/go_above_19.go
generated
vendored
Normal file
14
vendor/github.com/modern-go/reflect2/go_above_19.go
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
//+build go1.9
|
||||
|
||||
package reflect2
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
//go:linkname makemap reflect.makemap
|
||||
func makemap(rtype unsafe.Pointer, cap int) (m unsafe.Pointer)
|
||||
|
||||
func makeMapWithSize(rtype unsafe.Pointer, cap int) unsafe.Pointer {
|
||||
return makemap(rtype, cap)
|
||||
}
|
9
vendor/github.com/modern-go/reflect2/go_below_17.go
generated
vendored
Normal file
9
vendor/github.com/modern-go/reflect2/go_below_17.go
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
//+build !go1.7
|
||||
|
||||
package reflect2
|
||||
|
||||
import "unsafe"
|
||||
|
||||
func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer {
|
||||
return nil
|
||||
}
|
14
vendor/github.com/modern-go/reflect2/go_below_19.go
generated
vendored
Normal file
14
vendor/github.com/modern-go/reflect2/go_below_19.go
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
//+build !go1.9
|
||||
|
||||
package reflect2
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
//go:linkname makemap reflect.makemap
|
||||
func makemap(rtype unsafe.Pointer) (m unsafe.Pointer)
|
||||
|
||||
func makeMapWithSize(rtype unsafe.Pointer, cap int) unsafe.Pointer {
|
||||
return makemap(rtype)
|
||||
}
|
298
vendor/github.com/modern-go/reflect2/reflect2.go
generated
vendored
Normal file
298
vendor/github.com/modern-go/reflect2/reflect2.go
generated
vendored
Normal file
|
@ -0,0 +1,298 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"github.com/modern-go/concurrent"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type Type interface {
|
||||
Kind() reflect.Kind
|
||||
// New return pointer to data of this type
|
||||
New() interface{}
|
||||
// UnsafeNew return the allocated space pointed by unsafe.Pointer
|
||||
UnsafeNew() unsafe.Pointer
|
||||
// PackEFace cast a unsafe pointer to object represented pointer
|
||||
PackEFace(ptr unsafe.Pointer) interface{}
|
||||
// Indirect dereference object represented pointer to this type
|
||||
Indirect(obj interface{}) interface{}
|
||||
// UnsafeIndirect dereference pointer to this type
|
||||
UnsafeIndirect(ptr unsafe.Pointer) interface{}
|
||||
// Type1 returns reflect.Type
|
||||
Type1() reflect.Type
|
||||
Implements(thatType Type) bool
|
||||
String() string
|
||||
RType() uintptr
|
||||
// interface{} of this type has pointer like behavior
|
||||
LikePtr() bool
|
||||
IsNullable() bool
|
||||
IsNil(obj interface{}) bool
|
||||
UnsafeIsNil(ptr unsafe.Pointer) bool
|
||||
Set(obj interface{}, val interface{})
|
||||
UnsafeSet(ptr unsafe.Pointer, val unsafe.Pointer)
|
||||
AssignableTo(anotherType Type) bool
|
||||
}
|
||||
|
||||
type ListType interface {
|
||||
Type
|
||||
Elem() Type
|
||||
SetIndex(obj interface{}, index int, elem interface{})
|
||||
UnsafeSetIndex(obj unsafe.Pointer, index int, elem unsafe.Pointer)
|
||||
GetIndex(obj interface{}, index int) interface{}
|
||||
UnsafeGetIndex(obj unsafe.Pointer, index int) unsafe.Pointer
|
||||
}
|
||||
|
||||
type ArrayType interface {
|
||||
ListType
|
||||
Len() int
|
||||
}
|
||||
|
||||
type SliceType interface {
|
||||
ListType
|
||||
MakeSlice(length int, cap int) interface{}
|
||||
UnsafeMakeSlice(length int, cap int) unsafe.Pointer
|
||||
Grow(obj interface{}, newLength int)
|
||||
UnsafeGrow(ptr unsafe.Pointer, newLength int)
|
||||
Append(obj interface{}, elem interface{})
|
||||
UnsafeAppend(obj unsafe.Pointer, elem unsafe.Pointer)
|
||||
LengthOf(obj interface{}) int
|
||||
UnsafeLengthOf(ptr unsafe.Pointer) int
|
||||
SetNil(obj interface{})
|
||||
UnsafeSetNil(ptr unsafe.Pointer)
|
||||
Cap(obj interface{}) int
|
||||
UnsafeCap(ptr unsafe.Pointer) int
|
||||
}
|
||||
|
||||
type StructType interface {
|
||||
Type
|
||||
NumField() int
|
||||
Field(i int) StructField
|
||||
FieldByName(name string) StructField
|
||||
FieldByIndex(index []int) StructField
|
||||
FieldByNameFunc(match func(string) bool) StructField
|
||||
}
|
||||
|
||||
type StructField interface {
|
||||
Offset() uintptr
|
||||
Name() string
|
||||
PkgPath() string
|
||||
Type() Type
|
||||
Tag() reflect.StructTag
|
||||
Index() []int
|
||||
Anonymous() bool
|
||||
Set(obj interface{}, value interface{})
|
||||
UnsafeSet(obj unsafe.Pointer, value unsafe.Pointer)
|
||||
Get(obj interface{}) interface{}
|
||||
UnsafeGet(obj unsafe.Pointer) unsafe.Pointer
|
||||
}
|
||||
|
||||
type MapType interface {
|
||||
Type
|
||||
Key() Type
|
||||
Elem() Type
|
||||
MakeMap(cap int) interface{}
|
||||
UnsafeMakeMap(cap int) unsafe.Pointer
|
||||
SetIndex(obj interface{}, key interface{}, elem interface{})
|
||||
UnsafeSetIndex(obj unsafe.Pointer, key unsafe.Pointer, elem unsafe.Pointer)
|
||||
TryGetIndex(obj interface{}, key interface{}) (interface{}, bool)
|
||||
GetIndex(obj interface{}, key interface{}) interface{}
|
||||
UnsafeGetIndex(obj unsafe.Pointer, key unsafe.Pointer) unsafe.Pointer
|
||||
Iterate(obj interface{}) MapIterator
|
||||
UnsafeIterate(obj unsafe.Pointer) MapIterator
|
||||
}
|
||||
|
||||
type MapIterator interface {
|
||||
HasNext() bool
|
||||
Next() (key interface{}, elem interface{})
|
||||
UnsafeNext() (key unsafe.Pointer, elem unsafe.Pointer)
|
||||
}
|
||||
|
||||
type PtrType interface {
|
||||
Type
|
||||
Elem() Type
|
||||
}
|
||||
|
||||
type InterfaceType interface {
|
||||
NumMethod() int
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
UseSafeImplementation bool
|
||||
}
|
||||
|
||||
type API interface {
|
||||
TypeOf(obj interface{}) Type
|
||||
Type2(type1 reflect.Type) Type
|
||||
}
|
||||
|
||||
var ConfigUnsafe = Config{UseSafeImplementation: false}.Froze()
|
||||
var ConfigSafe = Config{UseSafeImplementation: true}.Froze()
|
||||
|
||||
type frozenConfig struct {
|
||||
useSafeImplementation bool
|
||||
cache *concurrent.Map
|
||||
}
|
||||
|
||||
func (cfg Config) Froze() *frozenConfig {
|
||||
return &frozenConfig{
|
||||
useSafeImplementation: cfg.UseSafeImplementation,
|
||||
cache: concurrent.NewMap(),
|
||||
}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) TypeOf(obj interface{}) Type {
|
||||
cacheKey := uintptr(unpackEFace(obj).rtype)
|
||||
typeObj, found := cfg.cache.Load(cacheKey)
|
||||
if found {
|
||||
return typeObj.(Type)
|
||||
}
|
||||
return cfg.Type2(reflect.TypeOf(obj))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Type2(type1 reflect.Type) Type {
|
||||
if type1 == nil {
|
||||
return nil
|
||||
}
|
||||
cacheKey := uintptr(unpackEFace(type1).data)
|
||||
typeObj, found := cfg.cache.Load(cacheKey)
|
||||
if found {
|
||||
return typeObj.(Type)
|
||||
}
|
||||
type2 := cfg.wrapType(type1)
|
||||
cfg.cache.Store(cacheKey, type2)
|
||||
return type2
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) wrapType(type1 reflect.Type) Type {
|
||||
safeType := safeType{Type: type1, cfg: cfg}
|
||||
switch type1.Kind() {
|
||||
case reflect.Struct:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeStructType{safeType}
|
||||
}
|
||||
return newUnsafeStructType(cfg, type1)
|
||||
case reflect.Array:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeSliceType{safeType}
|
||||
}
|
||||
return newUnsafeArrayType(cfg, type1)
|
||||
case reflect.Slice:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeSliceType{safeType}
|
||||
}
|
||||
return newUnsafeSliceType(cfg, type1)
|
||||
case reflect.Map:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeMapType{safeType}
|
||||
}
|
||||
return newUnsafeMapType(cfg, type1)
|
||||
case reflect.Ptr, reflect.Chan, reflect.Func:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeMapType{safeType}
|
||||
}
|
||||
return newUnsafePtrType(cfg, type1)
|
||||
case reflect.Interface:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeMapType{safeType}
|
||||
}
|
||||
if type1.NumMethod() == 0 {
|
||||
return newUnsafeEFaceType(cfg, type1)
|
||||
}
|
||||
return newUnsafeIFaceType(cfg, type1)
|
||||
default:
|
||||
if cfg.useSafeImplementation {
|
||||
return &safeType
|
||||
}
|
||||
return newUnsafeType(cfg, type1)
|
||||
}
|
||||
}
|
||||
|
||||
func TypeOf(obj interface{}) Type {
|
||||
return ConfigUnsafe.TypeOf(obj)
|
||||
}
|
||||
|
||||
func TypeOfPtr(obj interface{}) PtrType {
|
||||
return TypeOf(obj).(PtrType)
|
||||
}
|
||||
|
||||
func Type2(type1 reflect.Type) Type {
|
||||
if type1 == nil {
|
||||
return nil
|
||||
}
|
||||
return ConfigUnsafe.Type2(type1)
|
||||
}
|
||||
|
||||
func PtrTo(typ Type) Type {
|
||||
return Type2(reflect.PtrTo(typ.Type1()))
|
||||
}
|
||||
|
||||
func PtrOf(obj interface{}) unsafe.Pointer {
|
||||
return unpackEFace(obj).data
|
||||
}
|
||||
|
||||
func RTypeOf(obj interface{}) uintptr {
|
||||
return uintptr(unpackEFace(obj).rtype)
|
||||
}
|
||||
|
||||
func IsNil(obj interface{}) bool {
|
||||
if obj == nil {
|
||||
return true
|
||||
}
|
||||
return unpackEFace(obj).data == nil
|
||||
}
|
||||
|
||||
func IsNullable(kind reflect.Kind) bool {
|
||||
switch kind {
|
||||
case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Func, reflect.Slice, reflect.Interface:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func likePtrKind(kind reflect.Kind) bool {
|
||||
switch kind {
|
||||
case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Func:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func likePtrType(typ reflect.Type) bool {
|
||||
if likePtrKind(typ.Kind()) {
|
||||
return true
|
||||
}
|
||||
if typ.Kind() == reflect.Struct {
|
||||
if typ.NumField() != 1 {
|
||||
return false
|
||||
}
|
||||
return likePtrType(typ.Field(0).Type)
|
||||
}
|
||||
if typ.Kind() == reflect.Array {
|
||||
if typ.Len() != 1 {
|
||||
return false
|
||||
}
|
||||
return likePtrType(typ.Elem())
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NoEscape hides a pointer from escape analysis. noescape is
|
||||
// the identity function but escape analysis doesn't think the
|
||||
// output depends on the input. noescape is inlined and currently
|
||||
// compiles down to zero instructions.
|
||||
// USE CAREFULLY!
|
||||
//go:nosplit
|
||||
func NoEscape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
||||
|
||||
func UnsafeCastString(str string) []byte {
|
||||
stringHeader := (*reflect.StringHeader)(unsafe.Pointer(&str))
|
||||
sliceHeader := &reflect.SliceHeader{
|
||||
Data: stringHeader.Data,
|
||||
Cap: stringHeader.Len,
|
||||
Len: stringHeader.Len,
|
||||
}
|
||||
return *(*[]byte)(unsafe.Pointer(sliceHeader))
|
||||
}
|
0
vendor/github.com/modern-go/reflect2/reflect2_amd64.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/reflect2_amd64.s
generated
vendored
Normal file
30
vendor/github.com/modern-go/reflect2/reflect2_kind.go
generated
vendored
Normal file
30
vendor/github.com/modern-go/reflect2/reflect2_kind.go
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// DefaultTypeOfKind return the non aliased default type for the kind
|
||||
func DefaultTypeOfKind(kind reflect.Kind) Type {
|
||||
return kindTypes[kind]
|
||||
}
|
||||
|
||||
var kindTypes = map[reflect.Kind]Type{
|
||||
reflect.Bool: TypeOf(true),
|
||||
reflect.Uint8: TypeOf(uint8(0)),
|
||||
reflect.Int8: TypeOf(int8(0)),
|
||||
reflect.Uint16: TypeOf(uint16(0)),
|
||||
reflect.Int16: TypeOf(int16(0)),
|
||||
reflect.Uint32: TypeOf(uint32(0)),
|
||||
reflect.Int32: TypeOf(int32(0)),
|
||||
reflect.Uint64: TypeOf(uint64(0)),
|
||||
reflect.Int64: TypeOf(int64(0)),
|
||||
reflect.Uint: TypeOf(uint(0)),
|
||||
reflect.Int: TypeOf(int(0)),
|
||||
reflect.Float32: TypeOf(float32(0)),
|
||||
reflect.Float64: TypeOf(float64(0)),
|
||||
reflect.Uintptr: TypeOf(uintptr(0)),
|
||||
reflect.String: TypeOf(""),
|
||||
reflect.UnsafePointer: TypeOf(unsafe.Pointer(nil)),
|
||||
}
|
0
vendor/github.com/modern-go/reflect2/relfect2_386.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_386.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_amd64p32.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_amd64p32.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_arm.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_arm.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_arm64.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_arm64.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_mips64x.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_mips64x.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_mipsx.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_mipsx.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_ppc64x.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_ppc64x.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_s390x.s
generated
vendored
Normal file
0
vendor/github.com/modern-go/reflect2/relfect2_s390x.s
generated
vendored
Normal file
58
vendor/github.com/modern-go/reflect2/safe_field.go
generated
vendored
Normal file
58
vendor/github.com/modern-go/reflect2/safe_field.go
generated
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type safeField struct {
|
||||
reflect.StructField
|
||||
}
|
||||
|
||||
func (field *safeField) Offset() uintptr {
|
||||
return field.StructField.Offset
|
||||
}
|
||||
|
||||
func (field *safeField) Name() string {
|
||||
return field.StructField.Name
|
||||
}
|
||||
|
||||
func (field *safeField) PkgPath() string {
|
||||
return field.StructField.PkgPath
|
||||
}
|
||||
|
||||
func (field *safeField) Type() Type {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (field *safeField) Tag() reflect.StructTag {
|
||||
return field.StructField.Tag
|
||||
}
|
||||
|
||||
func (field *safeField) Index() []int {
|
||||
return field.StructField.Index
|
||||
}
|
||||
|
||||
func (field *safeField) Anonymous() bool {
|
||||
return field.StructField.Anonymous
|
||||
}
|
||||
|
||||
func (field *safeField) Set(obj interface{}, value interface{}) {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
val.FieldByIndex(field.Index()).Set(reflect.ValueOf(value).Elem())
|
||||
}
|
||||
|
||||
func (field *safeField) UnsafeSet(obj unsafe.Pointer, value unsafe.Pointer) {
|
||||
panic("unsafe operation is not supported")
|
||||
}
|
||||
|
||||
func (field *safeField) Get(obj interface{}) interface{} {
|
||||
val := reflect.ValueOf(obj).Elem().FieldByIndex(field.Index())
|
||||
ptr := reflect.New(val.Type())
|
||||
ptr.Elem().Set(val)
|
||||
return ptr.Interface()
|
||||
}
|
||||
|
||||
func (field *safeField) UnsafeGet(obj unsafe.Pointer) unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
101
vendor/github.com/modern-go/reflect2/safe_map.go
generated
vendored
Normal file
101
vendor/github.com/modern-go/reflect2/safe_map.go
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type safeMapType struct {
|
||||
safeType
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) Key() Type {
|
||||
return type2.safeType.cfg.Type2(type2.Type.Key())
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) MakeMap(cap int) interface{} {
|
||||
ptr := reflect.New(type2.Type)
|
||||
ptr.Elem().Set(reflect.MakeMap(type2.Type))
|
||||
return ptr.Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) UnsafeMakeMap(cap int) unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) SetIndex(obj interface{}, key interface{}, elem interface{}) {
|
||||
keyVal := reflect.ValueOf(key)
|
||||
elemVal := reflect.ValueOf(elem)
|
||||
val := reflect.ValueOf(obj)
|
||||
val.Elem().SetMapIndex(keyVal.Elem(), elemVal.Elem())
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) UnsafeSetIndex(obj unsafe.Pointer, key unsafe.Pointer, elem unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) TryGetIndex(obj interface{}, key interface{}) (interface{}, bool) {
|
||||
keyVal := reflect.ValueOf(key)
|
||||
if key == nil {
|
||||
keyVal = reflect.New(type2.Type.Key()).Elem()
|
||||
}
|
||||
val := reflect.ValueOf(obj).MapIndex(keyVal)
|
||||
if !val.IsValid() {
|
||||
return nil, false
|
||||
}
|
||||
return val.Interface(), true
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) GetIndex(obj interface{}, key interface{}) interface{} {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
keyVal := reflect.ValueOf(key).Elem()
|
||||
elemVal := val.MapIndex(keyVal)
|
||||
if !elemVal.IsValid() {
|
||||
ptr := reflect.New(reflect.PtrTo(val.Type().Elem()))
|
||||
return ptr.Elem().Interface()
|
||||
}
|
||||
ptr := reflect.New(elemVal.Type())
|
||||
ptr.Elem().Set(elemVal)
|
||||
return ptr.Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) UnsafeGetIndex(obj unsafe.Pointer, key unsafe.Pointer) unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) Iterate(obj interface{}) MapIterator {
|
||||
m := reflect.ValueOf(obj).Elem()
|
||||
return &safeMapIterator{
|
||||
m: m,
|
||||
keys: m.MapKeys(),
|
||||
}
|
||||
}
|
||||
|
||||
func (type2 *safeMapType) UnsafeIterate(obj unsafe.Pointer) MapIterator {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
type safeMapIterator struct {
|
||||
i int
|
||||
m reflect.Value
|
||||
keys []reflect.Value
|
||||
}
|
||||
|
||||
func (iter *safeMapIterator) HasNext() bool {
|
||||
return iter.i != len(iter.keys)
|
||||
}
|
||||
|
||||
func (iter *safeMapIterator) Next() (interface{}, interface{}) {
|
||||
key := iter.keys[iter.i]
|
||||
elem := iter.m.MapIndex(key)
|
||||
iter.i += 1
|
||||
keyPtr := reflect.New(key.Type())
|
||||
keyPtr.Elem().Set(key)
|
||||
elemPtr := reflect.New(elem.Type())
|
||||
elemPtr.Elem().Set(elem)
|
||||
return keyPtr.Interface(), elemPtr.Interface()
|
||||
}
|
||||
|
||||
func (iter *safeMapIterator) UnsafeNext() (unsafe.Pointer, unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
92
vendor/github.com/modern-go/reflect2/safe_slice.go
generated
vendored
Normal file
92
vendor/github.com/modern-go/reflect2/safe_slice.go
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type safeSliceType struct {
|
||||
safeType
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) SetIndex(obj interface{}, index int, value interface{}) {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
elem := reflect.ValueOf(value).Elem()
|
||||
val.Index(index).Set(elem)
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeSetIndex(obj unsafe.Pointer, index int, value unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) GetIndex(obj interface{}, index int) interface{} {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
elem := val.Index(index)
|
||||
ptr := reflect.New(elem.Type())
|
||||
ptr.Elem().Set(elem)
|
||||
return ptr.Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeGetIndex(obj unsafe.Pointer, index int) unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) MakeSlice(length int, cap int) interface{} {
|
||||
val := reflect.MakeSlice(type2.Type, length, cap)
|
||||
ptr := reflect.New(val.Type())
|
||||
ptr.Elem().Set(val)
|
||||
return ptr.Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeMakeSlice(length int, cap int) unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) Grow(obj interface{}, newLength int) {
|
||||
oldCap := type2.Cap(obj)
|
||||
oldSlice := reflect.ValueOf(obj).Elem()
|
||||
delta := newLength - oldCap
|
||||
deltaVals := make([]reflect.Value, delta)
|
||||
newSlice := reflect.Append(oldSlice, deltaVals...)
|
||||
oldSlice.Set(newSlice)
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeGrow(ptr unsafe.Pointer, newLength int) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) Append(obj interface{}, elem interface{}) {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
elemVal := reflect.ValueOf(elem).Elem()
|
||||
newVal := reflect.Append(val, elemVal)
|
||||
val.Set(newVal)
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeAppend(obj unsafe.Pointer, elem unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) SetNil(obj interface{}) {
|
||||
val := reflect.ValueOf(obj).Elem()
|
||||
val.Set(reflect.Zero(val.Type()))
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeSetNil(ptr unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) LengthOf(obj interface{}) int {
|
||||
return reflect.ValueOf(obj).Elem().Len()
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeLengthOf(ptr unsafe.Pointer) int {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) Cap(obj interface{}) int {
|
||||
return reflect.ValueOf(obj).Elem().Cap()
|
||||
}
|
||||
|
||||
func (type2 *safeSliceType) UnsafeCap(ptr unsafe.Pointer) int {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
29
vendor/github.com/modern-go/reflect2/safe_struct.go
generated
vendored
Normal file
29
vendor/github.com/modern-go/reflect2/safe_struct.go
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
package reflect2
|
||||
|
||||
type safeStructType struct {
|
||||
safeType
|
||||
}
|
||||
|
||||
func (type2 *safeStructType) FieldByName(name string) StructField {
|
||||
field, found := type2.Type.FieldByName(name)
|
||||
if !found {
|
||||
panic("field " + name + " not found")
|
||||
}
|
||||
return &safeField{StructField: field}
|
||||
}
|
||||
|
||||
func (type2 *safeStructType) Field(i int) StructField {
|
||||
return &safeField{StructField: type2.Type.Field(i)}
|
||||
}
|
||||
|
||||
func (type2 *safeStructType) FieldByIndex(index []int) StructField {
|
||||
return &safeField{StructField: type2.Type.FieldByIndex(index)}
|
||||
}
|
||||
|
||||
func (type2 *safeStructType) FieldByNameFunc(match func(string) bool) StructField {
|
||||
field, found := type2.Type.FieldByNameFunc(match)
|
||||
if !found {
|
||||
panic("field match condition not found in " + type2.Type.String())
|
||||
}
|
||||
return &safeField{StructField: field}
|
||||
}
|
78
vendor/github.com/modern-go/reflect2/safe_type.go
generated
vendored
Normal file
78
vendor/github.com/modern-go/reflect2/safe_type.go
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type safeType struct {
|
||||
reflect.Type
|
||||
cfg *frozenConfig
|
||||
}
|
||||
|
||||
func (type2 *safeType) New() interface{} {
|
||||
return reflect.New(type2.Type).Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeType) UnsafeNew() unsafe.Pointer {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) Elem() Type {
|
||||
return type2.cfg.Type2(type2.Type.Elem())
|
||||
}
|
||||
|
||||
func (type2 *safeType) Type1() reflect.Type {
|
||||
return type2.Type
|
||||
}
|
||||
|
||||
func (type2 *safeType) PackEFace(ptr unsafe.Pointer) interface{} {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) Implements(thatType Type) bool {
|
||||
return type2.Type.Implements(thatType.Type1())
|
||||
}
|
||||
|
||||
func (type2 *safeType) RType() uintptr {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) Indirect(obj interface{}) interface{} {
|
||||
return reflect.Indirect(reflect.ValueOf(obj)).Interface()
|
||||
}
|
||||
|
||||
func (type2 *safeType) UnsafeIndirect(ptr unsafe.Pointer) interface{} {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) LikePtr() bool {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) IsNullable() bool {
|
||||
return IsNullable(type2.Kind())
|
||||
}
|
||||
|
||||
func (type2 *safeType) IsNil(obj interface{}) bool {
|
||||
if obj == nil {
|
||||
return true
|
||||
}
|
||||
return reflect.ValueOf(obj).Elem().IsNil()
|
||||
}
|
||||
|
||||
func (type2 *safeType) UnsafeIsNil(ptr unsafe.Pointer) bool {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) Set(obj interface{}, val interface{}) {
|
||||
reflect.ValueOf(obj).Elem().Set(reflect.ValueOf(val).Elem())
|
||||
}
|
||||
|
||||
func (type2 *safeType) UnsafeSet(ptr unsafe.Pointer, val unsafe.Pointer) {
|
||||
panic("does not support unsafe operation")
|
||||
}
|
||||
|
||||
func (type2 *safeType) AssignableTo(anotherType Type) bool {
|
||||
return type2.Type1().AssignableTo(anotherType.Type1())
|
||||
}
|
113
vendor/github.com/modern-go/reflect2/type_map.go
generated
vendored
Normal file
113
vendor/github.com/modern-go/reflect2/type_map.go
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// typelinks1 for 1.5 ~ 1.6
|
||||
//go:linkname typelinks1 reflect.typelinks
|
||||
func typelinks1() [][]unsafe.Pointer
|
||||
|
||||
// typelinks2 for 1.7 ~
|
||||
//go:linkname typelinks2 reflect.typelinks
|
||||
func typelinks2() (sections []unsafe.Pointer, offset [][]int32)
|
||||
|
||||
// initOnce guards initialization of types and packages
|
||||
var initOnce sync.Once
|
||||
|
||||
var types map[string]reflect.Type
|
||||
var packages map[string]map[string]reflect.Type
|
||||
|
||||
// discoverTypes initializes types and packages
|
||||
func discoverTypes() {
|
||||
types = make(map[string]reflect.Type)
|
||||
packages = make(map[string]map[string]reflect.Type)
|
||||
|
||||
ver := runtime.Version()
|
||||
if ver == "go1.5" || strings.HasPrefix(ver, "go1.5.") {
|
||||
loadGo15Types()
|
||||
} else if ver == "go1.6" || strings.HasPrefix(ver, "go1.6.") {
|
||||
loadGo15Types()
|
||||
} else {
|
||||
loadGo17Types()
|
||||
}
|
||||
}
|
||||
|
||||
func loadGo15Types() {
|
||||
var obj interface{} = reflect.TypeOf(0)
|
||||
typePtrss := typelinks1()
|
||||
for _, typePtrs := range typePtrss {
|
||||
for _, typePtr := range typePtrs {
|
||||
(*emptyInterface)(unsafe.Pointer(&obj)).word = typePtr
|
||||
typ := obj.(reflect.Type)
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem().Kind() == reflect.Struct {
|
||||
loadedType := typ.Elem()
|
||||
pkgTypes := packages[loadedType.PkgPath()]
|
||||
if pkgTypes == nil {
|
||||
pkgTypes = map[string]reflect.Type{}
|
||||
packages[loadedType.PkgPath()] = pkgTypes
|
||||
}
|
||||
types[loadedType.String()] = loadedType
|
||||
pkgTypes[loadedType.Name()] = loadedType
|
||||
}
|
||||
if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Ptr &&
|
||||
typ.Elem().Elem().Kind() == reflect.Struct {
|
||||
loadedType := typ.Elem().Elem()
|
||||
pkgTypes := packages[loadedType.PkgPath()]
|
||||
if pkgTypes == nil {
|
||||
pkgTypes = map[string]reflect.Type{}
|
||||
packages[loadedType.PkgPath()] = pkgTypes
|
||||
}
|
||||
types[loadedType.String()] = loadedType
|
||||
pkgTypes[loadedType.Name()] = loadedType
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func loadGo17Types() {
|
||||
var obj interface{} = reflect.TypeOf(0)
|
||||
sections, offset := typelinks2()
|
||||
for i, offs := range offset {
|
||||
rodata := sections[i]
|
||||
for _, off := range offs {
|
||||
(*emptyInterface)(unsafe.Pointer(&obj)).word = resolveTypeOff(unsafe.Pointer(rodata), off)
|
||||
typ := obj.(reflect.Type)
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem().Kind() == reflect.Struct {
|
||||
loadedType := typ.Elem()
|
||||
pkgTypes := packages[loadedType.PkgPath()]
|
||||
if pkgTypes == nil {
|
||||
pkgTypes = map[string]reflect.Type{}
|
||||
packages[loadedType.PkgPath()] = pkgTypes
|
||||
}
|
||||
types[loadedType.String()] = loadedType
|
||||
pkgTypes[loadedType.Name()] = loadedType
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type emptyInterface struct {
|
||||
typ unsafe.Pointer
|
||||
word unsafe.Pointer
|
||||
}
|
||||
|
||||
// TypeByName return the type by its name, just like Class.forName in java
|
||||
func TypeByName(typeName string) Type {
|
||||
initOnce.Do(discoverTypes)
|
||||
return Type2(types[typeName])
|
||||
}
|
||||
|
||||
// TypeByPackageName return the type by its package and name
|
||||
func TypeByPackageName(pkgPath string, name string) Type {
|
||||
initOnce.Do(discoverTypes)
|
||||
pkgTypes := packages[pkgPath]
|
||||
if pkgTypes == nil {
|
||||
return nil
|
||||
}
|
||||
return Type2(pkgTypes[name])
|
||||
}
|
65
vendor/github.com/modern-go/reflect2/unsafe_array.go
generated
vendored
Normal file
65
vendor/github.com/modern-go/reflect2/unsafe_array.go
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type UnsafeArrayType struct {
|
||||
unsafeType
|
||||
elemRType unsafe.Pointer
|
||||
pElemRType unsafe.Pointer
|
||||
elemSize uintptr
|
||||
likePtr bool
|
||||
}
|
||||
|
||||
func newUnsafeArrayType(cfg *frozenConfig, type1 reflect.Type) *UnsafeArrayType {
|
||||
return &UnsafeArrayType{
|
||||
unsafeType: *newUnsafeType(cfg, type1),
|
||||
elemRType: unpackEFace(type1.Elem()).data,
|
||||
pElemRType: unpackEFace(reflect.PtrTo(type1.Elem())).data,
|
||||
elemSize: type1.Elem().Size(),
|
||||
likePtr: likePtrType(type1),
|
||||
}
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) LikePtr() bool {
|
||||
return type2.likePtr
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) Indirect(obj interface{}) interface{} {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("Type.Indirect argument 1", type2.ptrRType, objEFace.rtype)
|
||||
return type2.UnsafeIndirect(objEFace.data)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) UnsafeIndirect(ptr unsafe.Pointer) interface{} {
|
||||
if type2.likePtr {
|
||||
return packEFace(type2.rtype, *(*unsafe.Pointer)(ptr))
|
||||
}
|
||||
return packEFace(type2.rtype, ptr)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) SetIndex(obj interface{}, index int, elem interface{}) {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("ArrayType.SetIndex argument 1", type2.ptrRType, objEFace.rtype)
|
||||
elemEFace := unpackEFace(elem)
|
||||
assertType("ArrayType.SetIndex argument 3", type2.pElemRType, elemEFace.rtype)
|
||||
type2.UnsafeSetIndex(objEFace.data, index, elemEFace.data)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) UnsafeSetIndex(obj unsafe.Pointer, index int, elem unsafe.Pointer) {
|
||||
elemPtr := arrayAt(obj, index, type2.elemSize, "i < s.Len")
|
||||
typedmemmove(type2.elemRType, elemPtr, elem)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) GetIndex(obj interface{}, index int) interface{} {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("ArrayType.GetIndex argument 1", type2.ptrRType, objEFace.rtype)
|
||||
elemPtr := type2.UnsafeGetIndex(objEFace.data, index)
|
||||
return packEFace(type2.pElemRType, elemPtr)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeArrayType) UnsafeGetIndex(obj unsafe.Pointer, index int) unsafe.Pointer {
|
||||
return arrayAt(obj, index, type2.elemSize, "i < s.Len")
|
||||
}
|
59
vendor/github.com/modern-go/reflect2/unsafe_eface.go
generated
vendored
Normal file
59
vendor/github.com/modern-go/reflect2/unsafe_eface.go
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type eface struct {
|
||||
rtype unsafe.Pointer
|
||||
data unsafe.Pointer
|
||||
}
|
||||
|
||||
func unpackEFace(obj interface{}) *eface {
|
||||
return (*eface)(unsafe.Pointer(&obj))
|
||||
}
|
||||
|
||||
func packEFace(rtype unsafe.Pointer, data unsafe.Pointer) interface{} {
|
||||
var i interface{}
|
||||
e := (*eface)(unsafe.Pointer(&i))
|
||||
e.rtype = rtype
|
||||
e.data = data
|
||||
return i
|
||||
}
|
||||
|
||||
type UnsafeEFaceType struct {
|
||||
unsafeType
|
||||
}
|
||||
|
||||
func newUnsafeEFaceType(cfg *frozenConfig, type1 reflect.Type) *UnsafeEFaceType {
|
||||
return &UnsafeEFaceType{
|
||||
unsafeType: *newUnsafeType(cfg, type1),
|
||||
}
|
||||
}
|
||||
|
||||
func (type2 *UnsafeEFaceType) IsNil(obj interface{}) bool {
|
||||
if obj == nil {
|
||||
return true
|
||||
}
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("Type.IsNil argument 1", type2.ptrRType, objEFace.rtype)
|
||||
return type2.UnsafeIsNil(objEFace.data)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeEFaceType) UnsafeIsNil(ptr unsafe.Pointer) bool {
|
||||
if ptr == nil {
|
||||
return true
|
||||
}
|
||||
return unpackEFace(*(*interface{})(ptr)).data == nil
|
||||
}
|
||||
|
||||
func (type2 *UnsafeEFaceType) Indirect(obj interface{}) interface{} {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("Type.Indirect argument 1", type2.ptrRType, objEFace.rtype)
|
||||
return type2.UnsafeIndirect(objEFace.data)
|
||||
}
|
||||
|
||||
func (type2 *UnsafeEFaceType) UnsafeIndirect(ptr unsafe.Pointer) interface{} {
|
||||
return *(*interface{})(ptr)
|
||||
}
|
74
vendor/github.com/modern-go/reflect2/unsafe_field.go
generated
vendored
Normal file
74
vendor/github.com/modern-go/reflect2/unsafe_field.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package reflect2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type UnsafeStructField struct {
|
||||
reflect.StructField
|
||||
structType *UnsafeStructType
|
||||
rtype unsafe.Pointer
|
||||
ptrRType unsafe.Pointer
|
||||
}
|
||||
|
||||
func newUnsafeStructField(structType *UnsafeStructType, structField reflect.StructField) *UnsafeStructField {
|
||||
return &UnsafeStructField{
|
||||
StructField: structField,
|
||||
rtype: unpackEFace(structField.Type).data,
|
||||
ptrRType: unpackEFace(reflect.PtrTo(structField.Type)).data,
|
||||
structType: structType,
|
||||
}
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Offset() uintptr {
|
||||
return field.StructField.Offset
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Name() string {
|
||||
return field.StructField.Name
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) PkgPath() string {
|
||||
return field.StructField.PkgPath
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Type() Type {
|
||||
return field.structType.cfg.Type2(field.StructField.Type)
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Tag() reflect.StructTag {
|
||||
return field.StructField.Tag
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Index() []int {
|
||||
return field.StructField.Index
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Anonymous() bool {
|
||||
return field.StructField.Anonymous
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Set(obj interface{}, value interface{}) {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("StructField.SetIndex argument 1", field.structType.ptrRType, objEFace.rtype)
|
||||
valueEFace := unpackEFace(value)
|
||||
assertType("StructField.SetIndex argument 2", field.ptrRType, valueEFace.rtype)
|
||||
field.UnsafeSet(objEFace.data, valueEFace.data)
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) UnsafeSet(obj unsafe.Pointer, value unsafe.Pointer) {
|
||||
fieldPtr := add(obj, field.StructField.Offset, "same as non-reflect &v.field")
|
||||
typedmemmove(field.rtype, fieldPtr, value)
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) Get(obj interface{}) interface{} {
|
||||
objEFace := unpackEFace(obj)
|
||||
assertType("StructField.GetIndex argument 1", field.structType.ptrRType, objEFace.rtype)
|
||||
value := field.UnsafeGet(objEFace.data)
|
||||
return packEFace(field.ptrRType, value)
|
||||
}
|
||||
|
||||
func (field *UnsafeStructField) UnsafeGet(obj unsafe.Pointer) unsafe.Pointer {
|
||||
return add(obj, field.StructField.Offset, "same as non-reflect &v.field")
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue