Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3365611f06 | ||
|
|
ceae89c8c7 | ||
|
|
8f300781ad | ||
|
|
8b132bdec6 | ||
|
|
e70b679d21 | ||
|
|
02de43236a | ||
|
|
f949f9e9c5 | ||
|
|
cbe3ca5d0b | ||
|
|
3ac1f35349 | ||
|
|
af1fd56d8a | ||
|
|
0274933c22 | ||
|
|
67776372d6 | ||
|
|
c54639b8ee | ||
|
|
49a71a6461 | ||
|
|
58f4a4114e | ||
|
|
b31307c41c | ||
|
|
c5193a8481 | ||
|
|
1790f01dd9 | ||
|
|
0c7927fe48 | ||
|
|
695e8ae81d | ||
|
|
2148b27bfa | ||
|
|
ab7e36e3a5 | ||
|
|
63178b5654 | ||
|
|
2d15126de6 | ||
|
|
31e4e8205a | ||
|
|
37e4cdbbe6 |
29
CHANGELOG.md
29
CHANGELOG.md
@@ -4,6 +4,35 @@ This changelog goes through all the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||
|
||||
## [1.15.1](https://github.com/go-gitea/gitea/releases/tag/v1.15.1) - 2021-09-02
|
||||
|
||||
* BUGFIXES
|
||||
* Allow BASIC authentication access to /:owner/:repo/releases/download/* (#16916) (#16923)
|
||||
* Prevent leave changes dialogs due to autofill fields (#16912) (#16920)
|
||||
* Ignore review comment when ref commit is missed (#16905) (#16919)
|
||||
* Fix wrong attachment removal (#16915) (#16917)
|
||||
* Gitlab Migrator: dont ignore reactions of last request (#16903) (#16913)
|
||||
* Correctly return the number of Repositories for Organizations (#16807) (#16911)
|
||||
* Test if LFS object is accessible (#16865) (#16904)
|
||||
* Fix git.Blob.DataAsync(): close pipe since we return a NopCloser (#16899) (#16900)
|
||||
* Fix dump and restore respository (#16698) (#16898)
|
||||
* Repare and Improve GetDiffRangeWithWhitespaceBehavior (#16894) (#16895)
|
||||
* Fix wiki raw commit diff/patch view (#16891) (#16892)
|
||||
* Ensure wiki repos are all closed (#16886) (#16888)
|
||||
* List limited and private orgs if authenticated on API (#16866) (#16879)
|
||||
* Simplify split diff view generation and remove JS dependency (#16775) (#16863)
|
||||
* Ensure that the default visibility is set on the user create page (#16845) (#16862)
|
||||
* In Render tolerate not being passed a context (#16842) (#16858)
|
||||
* Upgrade xorm to v1.2.2 (#16663) & Add test to ensure that dumping of login sources remains correct (#16847) (#16848)
|
||||
* Report the correct number of pushes on the feeds (#16811) (#16822)
|
||||
* Add primary_key to issue_index (#16813) (#16820)
|
||||
* Prevent NPE on empty commit (#16812) (#16819)
|
||||
* Fix branch pagination error (#16805) (#16816)
|
||||
* Add missing return to handleSettingRemoteAddrError (#16794) (#16795)
|
||||
* Remove spurious / from issues.opened_by (#16793)
|
||||
* Ensure that template compilation panics are sent to the logs (#16788) (#16792)
|
||||
* Update caddyserver/certmagic (#16789) (#16790)
|
||||
|
||||
## [1.15.0](https://github.com/go-gitea/gitea/releases/tag/v1.15.0) - 2021-08-21
|
||||
|
||||
* BREAKING
|
||||
|
||||
@@ -86,6 +86,11 @@ func runWeb(ctx *cli.Context) error {
|
||||
_ = log.DelLogger("console")
|
||||
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "fatal", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
|
||||
}
|
||||
defer func() {
|
||||
if panicked := recover(); panicked != nil {
|
||||
log.Fatal("PANIC: %v\n%s", panicked, string(log.Stack(2)))
|
||||
}
|
||||
}()
|
||||
|
||||
managerCtx, cancel := context.WithCancel(context.Background())
|
||||
graceful.InitManager(managerCtx)
|
||||
|
||||
6
go.mod
6
go.mod
@@ -22,7 +22,7 @@ require (
|
||||
github.com/blevesearch/bleve/v2 v2.0.6
|
||||
github.com/boombuler/barcode v1.0.1 // indirect
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b // indirect
|
||||
github.com/caddyserver/certmagic v0.14.0
|
||||
github.com/caddyserver/certmagic v0.14.1
|
||||
github.com/chi-middleware/proxy v1.1.1
|
||||
github.com/couchbase/go-couchbase v0.0.0-20210224140812-5740cd35f448 // indirect
|
||||
github.com/couchbase/gomemcached v0.1.2 // indirect
|
||||
@@ -78,7 +78,7 @@ require (
|
||||
github.com/markbates/goth v1.68.0
|
||||
github.com/mattn/go-isatty v0.0.13
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.7
|
||||
github.com/mattn/go-sqlite3 v1.14.8
|
||||
github.com/mholt/archiver/v3 v3.5.0
|
||||
github.com/microcosm-cc/bluemonday v1.0.15
|
||||
github.com/miekg/dns v1.1.43 // indirect
|
||||
@@ -139,7 +139,7 @@ require (
|
||||
mvdan.cc/xurls/v2 v2.2.0
|
||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
||||
xorm.io/builder v0.3.9
|
||||
xorm.io/xorm v1.1.2
|
||||
xorm.io/xorm v1.2.2
|
||||
)
|
||||
|
||||
replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
|
||||
|
||||
206
go.sum
206
go.sum
@@ -60,6 +60,9 @@ github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c/go.mod h1:chxPXzS
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/Julusian/godocdown v0.0.0-20170816220326-6d19f8ff2df8/go.mod h1:INZr5t32rG59/5xeltqoCJoNY7e5x/3xoY9WSWVWg74=
|
||||
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0=
|
||||
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
|
||||
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
|
||||
github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU=
|
||||
@@ -84,8 +87,12 @@ github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06
|
||||
github.com/RoaringBitmap/roaring v0.7.3/go.mod h1:jdT9ykXwHFNdJbEtxePexlFYH9LXucApeS0/+/g+p1I=
|
||||
github.com/RoaringBitmap/roaring v0.9.1 h1:5PRizBmoN/PfV17nPNQou4dHQ7NcJi8FO/bihdYyCEM=
|
||||
github.com/RoaringBitmap/roaring v0.9.1/go.mod h1:h1B7iIUOmnAeb5ytYMvnHJwxMc6LUrwBnzXWRuqTQUc=
|
||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
||||
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
||||
github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g=
|
||||
github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk=
|
||||
github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
@@ -114,12 +121,15 @@ github.com/andybalholm/cascadia v1.2.0/go.mod h1:YCyR8vOZT9aZ1CHEd8ap0gMVm2aFgxB
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg=
|
||||
@@ -127,8 +137,11 @@ github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:o
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ=
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
|
||||
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
||||
github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
|
||||
github.com/aws/aws-sdk-go v1.38.17/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
||||
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||
@@ -184,8 +197,10 @@ github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||
github.com/caddyserver/certmagic v0.14.0 h1:XW1o32s7smIYEJSc6g+N8YXljpjRo5ZE2zi3CIYTs74=
|
||||
github.com/caddyserver/certmagic v0.14.0/go.mod h1:oRQOZmUVKwlpgNidslysHt05osM9uMrJ4YMk+Ot4P4Q=
|
||||
github.com/caddyserver/certmagic v0.14.1 h1:8RIFS/LbGne/I7Op56Kkm2annnei7io9VW/IWDttE9U=
|
||||
github.com/caddyserver/certmagic v0.14.1/go.mod h1:oRQOZmUVKwlpgNidslysHt05osM9uMrJ4YMk+Ot4P4Q=
|
||||
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
|
||||
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
@@ -196,12 +211,15 @@ github.com/chi-middleware/proxy v1.1.1/go.mod h1:jQwMEJct2tz9VmtCELxvnXoMfa+SOdi
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
@@ -209,8 +227,10 @@ github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8Nz
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k=
|
||||
github.com/couchbase/go-couchbase v0.0.0-20201026062457-7b3be89bbd89/go.mod h1:+/bddYDxXsf9qt0xpDUtRR47A2GjaXmGGAqQ/k3GJ8A=
|
||||
@@ -238,7 +258,6 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20200428022330-06a60b6afbbc/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/denisenkom/go-mssqldb v0.10.0 h1:QykgLZBorFE95+gO3u9esLd0BmbvpWp0/waNNZfHBM8=
|
||||
github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||
@@ -259,15 +278,20 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD
|
||||
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
|
||||
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
|
||||
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
|
||||
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20210429054444-fca39067bc72/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw=
|
||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||
github.com/editorconfig/editorconfig-core-go/v2 v2.4.2 h1:1lkDpSoAaFLrgYTVJ/eNCV+lkDSv/j9Wm0jcvDfVVEo=
|
||||
github.com/editorconfig/editorconfig-core-go/v2 v2.4.2/go.mod h1:IXeWRVO4LZRoNunhHh/oP6BQvTs94nB2pNvbw32l8tQ=
|
||||
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||
github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4=
|
||||
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
||||
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
||||
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
@@ -282,6 +306,8 @@ github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSw
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
|
||||
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
@@ -321,6 +347,7 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
github.com/go-ldap/ldap/v3 v3.3.0 h1:lwx+SJpgOHd8tG6SumBQZXCmNX51zM8B1cfxJ5gv4tQ=
|
||||
github.com/go-ldap/ldap/v3 v3.3.0/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg=
|
||||
@@ -427,6 +454,7 @@ github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8w
|
||||
github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M=
|
||||
github.com/go-redis/redis/v8 v8.11.0 h1:O1Td0mQ8UFChQ3N9zFQqo6kTU2cJ+/it88gDB+zg0wo=
|
||||
github.com/go-redis/redis/v8 v8.11.0/go.mod h1:DLomh7y2e3ggQXQLd1YgmvIfecPJoFl7WU5SOQ/r06M=
|
||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||
@@ -465,9 +493,14 @@ github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/V
|
||||
github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
|
||||
github.com/goccy/go-json v0.7.4 h1:B44qRUFwz/vxPKPISQ1KhvzRi9kZ28RAf6YtjriBZ5k=
|
||||
github.com/goccy/go-json v0.7.4/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw=
|
||||
github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||
github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 h1:gBeyun7mySAKWg7Fb0GOcv0upX9bdaZScs8QcRo8mEY=
|
||||
github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14=
|
||||
@@ -480,6 +513,7 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
@@ -568,6 +602,7 @@ github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl
|
||||
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
|
||||
github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
|
||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
|
||||
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1 h1:LqbZZ9sNMWVjeXS4NN5oVvhMjDyLhmA1LG86oSo+IqY=
|
||||
@@ -578,12 +613,17 @@ github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE
|
||||
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
|
||||
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
|
||||
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
@@ -615,11 +655,13 @@ github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/J
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw=
|
||||
github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
|
||||
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||
github.com/issue9/assert v1.4.1 h1:gUtOpMTeaE4JTe9kACma5foOHBvVt1p5XTFrULDwdXI=
|
||||
github.com/issue9/assert v1.4.1/go.mod h1:Yktk83hAVl1SPSYtd9kjhBizuiBIqUQyj+D5SE2yjVY=
|
||||
github.com/issue9/identicon v1.2.0 h1:ek+UcTTyMW/G0iNbLOAlrPC13eSzXTWhbJSs8PHhHGQ=
|
||||
@@ -632,12 +674,18 @@ github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgO
|
||||
github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
|
||||
github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
|
||||
github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
|
||||
github.com/jackc/pgconn v1.5.0 h1:oFSOilzIZkyg787M1fEmyMfOUUvwj0daqYMfaWwNL4o=
|
||||
github.com/jackc/pgconn v1.4.0/go.mod h1:Y2O3ZDF0q4mMacyWV3AstPJpeHXWGEetiFttmq5lahk=
|
||||
github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
|
||||
github.com/jackc/pgconn v1.5.1-0.20200601181101-fa742c524853/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
|
||||
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
|
||||
github.com/jackc/pgconn v1.8.1/go.mod h1:JV6m6b6jhjdmzchES0drzCcYcAHS1OPD5xu3OZ/lE2g=
|
||||
github.com/jackc/pgconn v1.9.0 h1:gqibKSTJup/ahCsNKyMZAniPuZEfIqfXFc8FOWVYR+Q=
|
||||
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
|
||||
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
||||
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye4717ITLaNwV9mWbJx0dLCpcRzdA=
|
||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
||||
github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd h1:eDErF6V/JPJON/B7s68BxwHgfmyOntHJQ8IOaz0x4R8=
|
||||
github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A=
|
||||
@@ -646,25 +694,40 @@ github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.1 h1:Rdjp4NFjwHnEslx2b66FfCI2S0LhO4itac3hXz6WX9M=
|
||||
github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 h1:Q3tB+ExeflWUW7AFcAhXqk40s9mnNYLk1nOkKNZ5GnU=
|
||||
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgproto3/v2 v2.1.1 h1:7PQ/4gLoqnl87ZxL7xjO0DR5gYuviDCZxQJsUlFW1eI=
|
||||
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
||||
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
||||
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
||||
github.com/jackc/pgtype v1.3.0 h1:l8JvKrby3RI7Kg3bYEeU9TA4vqC38QDpFCfcrC7KuN0=
|
||||
github.com/jackc/pgtype v1.2.0/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0=
|
||||
github.com/jackc/pgtype v1.3.0/go.mod h1:b0JqxHvPmljG+HQ5IsvQ0yqeSi4nGcDTVjFoiLDb0Ik=
|
||||
github.com/jackc/pgtype v1.3.1-0.20200510190516-8cd94a14c75a/go.mod h1:vaogEUkALtxZMCH411K+tKzNpwzCKU+AnPzBKZ+I+Po=
|
||||
github.com/jackc/pgtype v1.3.1-0.20200606141011-f6355165a91c/go.mod h1:cvk9Bgu/VzJ9/lxTO5R5sf80p0DiucVtN7ZxvaC4GmQ=
|
||||
github.com/jackc/pgtype v1.7.0/go.mod h1:ZnHF+rMePVqDKaOfJVI4Q8IVvAQMryDlDkZnKOI75BE=
|
||||
github.com/jackc/pgtype v1.8.0 h1:iFVCcVhYlw0PulYCVoguRGm0SE9guIcPcccnLzHj8bA=
|
||||
github.com/jackc/pgtype v1.8.0/go.mod h1:PqDKcEBtllAtk/2p6z6SHdXW5UB+MhE75tUol2OKexE=
|
||||
github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o=
|
||||
github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
||||
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
||||
github.com/jackc/pgx/v4 v4.6.0 h1:Fh0O9GdlG4gYpjpwOqjdEodJUQM9jzN3Hdv7PN0xmm0=
|
||||
github.com/jackc/pgx/v4 v4.5.0/go.mod h1:EpAKPLdnTorwmPUUsqrPxy5fphV18j9q3wrfRXgo+kA=
|
||||
github.com/jackc/pgx/v4 v4.6.0/go.mod h1:vPh43ZzxijXUVJ+t/EmXBtFmbFVO72cuneCT9oAlxAg=
|
||||
github.com/jackc/pgx/v4 v4.6.1-0.20200510190926-94ba730bb1e9/go.mod h1:t3/cdRQl6fOLDxqtlyhe9UWgfIi9R8+8v8GKV5TRA/o=
|
||||
github.com/jackc/pgx/v4 v4.6.1-0.20200606145419-4e5062306904/go.mod h1:ZDaNWkt9sW1JMiNn0kdYBaLelIhw7Pg4qd+Vk6tw7Hg=
|
||||
github.com/jackc/pgx/v4 v4.11.0/go.mod h1:i62xJgdrtVDsnL3U8ekyrQXEwGNTRoG7/8r+CIdYfcc=
|
||||
github.com/jackc/pgx/v4 v4.12.0 h1:xiP3TdnkwyslWNp77yE5XAPfxAsU9RMFDe0c1SwN8h4=
|
||||
github.com/jackc/pgx/v4 v4.12.0/go.mod h1:fE547h6VulLPA3kySjfnSG/e2D861g/50JlVUa/ub60=
|
||||
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.1.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jarcoal/httpmock v0.0.0-20180424175123-9c70cfe4a1da/go.mod h1:ks+b9deReOc7jgqp+e7LuFiCBH6Rm5hL32cLcEAArb4=
|
||||
github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7 h1:g0fAGBisHaEQ0TRq1iBvemFRf+8AEWEmBESSiWB3Vsc=
|
||||
github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk=
|
||||
@@ -672,6 +735,7 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOl
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
|
||||
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
@@ -681,6 +745,8 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
@@ -741,15 +807,19 @@ github.com/lestrrat-go/jwx v0.9.0/go.mod h1:iEoxlYfZjvoGpuWwxUz+eR5e6KTJGsaRcy/Y
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8=
|
||||
github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/libdns/libdns v0.2.1 h1:Wu59T7wSHRgtA0cfxC+n1c/e+O3upJGWytknkmFEDis=
|
||||
github.com/libdns/libdns v0.2.1/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40=
|
||||
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
|
||||
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
||||
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 h1:uNwtsDp7ci48vBTTxDuwcoTXz4lwtDTe7TjCQ0noaWY=
|
||||
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96/go.mod h1:mmIfjCSQlGYXmJ95jFN84AkQFnVABtKuJL8IrzwvUKQ=
|
||||
github.com/lunny/log v0.0.0-20160921050905-7887c61bf0de/go.mod h1:3q8WtuPQsoRbatJuy3nvq/hRSvuBJrHHr+ybPPiNvHQ=
|
||||
github.com/lunny/nodb v0.0.0-20160621015157-fc1ef06ad4af/go.mod h1:Cqz6pqow14VObJ7peltM+2n3PWOz7yTrfUuGbVFkzN0=
|
||||
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
|
||||
@@ -782,14 +852,16 @@ github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2y
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA=
|
||||
github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
|
||||
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA=
|
||||
github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.8 h1:gDp86IdQsN/xWjIEmr9MF6o9mpksUgh0fu+9ByFxzIU=
|
||||
github.com/mattn/go-sqlite3 v1.14.8/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/mholt/acmez v0.1.3 h1:J7MmNIk4Qf9b8mAGqAh4XkNeowv3f1zW816yf4zt7Qk=
|
||||
@@ -841,6 +913,13 @@ github.com/msteinert/pam v0.0.0-20201130170657-e61372126161 h1:XQ1+fYPzaWZCVdu1x
|
||||
github.com/msteinert/pam v0.0.0-20201130170657-e61372126161/go.mod h1:np1wUFZ6tyoke22qDJZY40URn9Ae51gX7ljIWXN5TJs=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg=
|
||||
github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU=
|
||||
github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k=
|
||||
github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w=
|
||||
github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
|
||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
@@ -850,8 +929,11 @@ github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7
|
||||
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||
github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
|
||||
@@ -872,7 +954,17 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y
|
||||
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
|
||||
github.com/onsi/gomega v1.10.5 h1:7n6FEkpFmfCoo2t+YYqXH0evK+a9ICQz0xcAy9dYcaQ=
|
||||
github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48=
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
|
||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA=
|
||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||
github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4=
|
||||
github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4=
|
||||
github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
@@ -881,7 +973,11 @@ github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAv
|
||||
github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=
|
||||
github.com/pelletier/go-toml v1.9.0 h1:NOd0BRdOKpPf0SxkL3HxSQOG7rNh+4kl6PHcBPFs7Q0=
|
||||
github.com/pelletier/go-toml v1.9.0/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac=
|
||||
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.0.3/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=
|
||||
github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
@@ -889,6 +985,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
|
||||
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
@@ -897,31 +994,40 @@ github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac/go.mod h1:hoL
|
||||
github.com/pquerna/otp v1.3.0 h1:oJV/SkzR33anKXwQU3Of42rL4wbrffP4uvUf1SvS5Xs=
|
||||
github.com/pquerna/otp v1.3.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
|
||||
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
|
||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||
github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og=
|
||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
||||
github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=
|
||||
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA=
|
||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
||||
github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=
|
||||
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/quasoft/websspi v1.0.0 h1:5nDgdM5xSur9s+B5w2xQ5kxf5nUGqgFgU4W0aDLZ8Mw=
|
||||
github.com/quasoft/websspi v1.0.0/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
@@ -944,14 +1050,17 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
|
||||
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
|
||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
@@ -980,6 +1089,7 @@ github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIK
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/smartystreets/gunit v1.4.2/go.mod h1:ZjM1ozSIMJlAz/ay4SG8PeKF00ckUp+zMHZXV9/bvak=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||
github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=
|
||||
@@ -993,6 +1103,7 @@ github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSW
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
@@ -1005,6 +1116,9 @@ github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02n
|
||||
github.com/stephens2424/writerset v1.0.2/go.mod h1:aS2JhsMn6eA7e82oNmW4rfsgAOp9COBTTl8mzkwADnc=
|
||||
github.com/steveyen/gtreap v0.1.0 h1:CjhzTa274PyJLJuMZwIzCO1PfC00oRa8d1Kc78bFXJM=
|
||||
github.com/steveyen/gtreap v0.1.0/go.mod h1:kl/5J7XbrOmlIbYIXdRHDDE5QxHqpk0cmkT7Z4dM9/Y=
|
||||
github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
|
||||
github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
|
||||
github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
@@ -1022,6 +1136,7 @@ github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpP
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ=
|
||||
github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM=
|
||||
@@ -1041,6 +1156,8 @@ github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae h1:ihaXiJkaca54I
|
||||
github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae/go.mod h1:1fdkY6xxl6ExVs2QFv7R0F5IRZHKA8RahhB9fMC9RvM=
|
||||
github.com/unrolled/render v1.4.0 h1:p73obhpsXuE3paXOtcuXTBKgBJpLCfmABnsUiO35x+Q=
|
||||
github.com/unrolled/render v1.4.0/go.mod h1:cK4RSTTVdND5j9EYEc0LAMOvdG11JeiKjyjfyZRvV2w=
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU=
|
||||
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
|
||||
@@ -1075,9 +1192,11 @@ github.com/yuin/goldmark-meta v1.0.0/go.mod h1:zsNNOrZ4nLuyHAJeLQEZcQat8dm70SmB2
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
|
||||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU=
|
||||
go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4=
|
||||
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
|
||||
go.jolheiser.com/hcaptcha v0.0.4 h1:RrDERcr/Tz/kWyJenjVtI+V09RtLinXxlAemiwN5F+I=
|
||||
go.jolheiser.com/hcaptcha v0.0.4/go.mod h1:aw32WQOxnQZ6E06C0LypCf+sxNxPACyOnq+ZGnrIYho=
|
||||
go.jolheiser.com/pwn v0.0.3 h1:MQowb3QvCL5r5NmHmCPxw93SdjfgJ0q6rAwYn4i1Hjg=
|
||||
@@ -1091,6 +1210,8 @@ go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S
|
||||
go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
||||
go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI=
|
||||
go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
|
||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
@@ -1101,6 +1222,7 @@ go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||
go.opentelemetry.io/otel v0.14.0/go.mod h1:vH5xEuwy7Rts0GNtsCW3HYQoZDY+OmBJ6t1bFGGlxgw=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/atomic v1.8.0 h1:CUhrE4N1rqSE6FM9ecihEjRkLQu8cDfgDyoOs83mEY4=
|
||||
@@ -1108,6 +1230,7 @@ go.uber.org/atomic v1.8.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=
|
||||
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
|
||||
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
|
||||
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec=
|
||||
@@ -1115,6 +1238,7 @@ go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95a
|
||||
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
|
||||
go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||
go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=
|
||||
go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
go.uber.org/zap v1.18.1 h1:CSUJ2mjFszzEWt4CdKISEuChVIXGBn3lAPwkRGyVrc4=
|
||||
@@ -1133,13 +1257,16 @@ golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
@@ -1192,6 +1319,7 @@ golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73r
|
||||
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
@@ -1271,6 +1399,7 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1300,6 +1429,7 @@ golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -1355,12 +1485,14 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6 h1:Vv0JUPWTyeqUq42B2WJ1FeIDjjvGKoA2Ss+Ts0lAVbs=
|
||||
golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -1398,6 +1530,7 @@ golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtn
|
||||
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
@@ -1435,6 +1568,7 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
|
||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
@@ -1456,6 +1590,7 @@ google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ
|
||||
google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
|
||||
google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -1469,6 +1604,7 @@ google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRn
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=
|
||||
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||
@@ -1500,10 +1636,15 @@ google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6D
|
||||
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
@@ -1540,8 +1681,10 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
|
||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw=
|
||||
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
||||
@@ -1571,6 +1714,7 @@ gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
@@ -1578,31 +1722,34 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009 h1:u0oCo5b9wyLr++HF3AN9JicGhkUxJhMz51+8TIZH9N0=
|
||||
modernc.org/cc/v3 v3.31.5-0.20210308123301-7a3e9dab9009/go.mod h1:0R6jl1aZlIl2avnYfbfHBS1QB6/f+16mihBObaBC878=
|
||||
modernc.org/ccgo/v3 v3.9.0 h1:JbcEIqjw4Agf+0g3Tc85YvfYqkkFOv6xBwS4zkfqSoA=
|
||||
modernc.org/ccgo/v3 v3.9.0/go.mod h1:nQbgkn8mwzPdp4mm6BT6+p85ugQ7FrGgIcYaE7nSrpY=
|
||||
lukechampine.com/uint128 v1.1.1 h1:pnxCASz787iMf+02ssImqk6OLt+Z5QHMoZyUXR4z6JU=
|
||||
lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
|
||||
modernc.org/cc/v3 v3.33.6 h1:r63dgSzVzRxUpAJFPQWHy1QeZeY1ydNENUDaBx1GqYc=
|
||||
modernc.org/cc/v3 v3.33.6/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g=
|
||||
modernc.org/ccgo/v3 v3.9.5 h1:dEuUSf8WN51rDkprFuAqjfchKEzN0WttP/Py3enBwjk=
|
||||
modernc.org/ccgo/v3 v3.9.5/go.mod h1:umuo2EP2oDSBnD3ckjaVUXMrmeAw8C8OSICVa0iFf60=
|
||||
modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
|
||||
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
|
||||
modernc.org/libc v1.7.13-0.20210308123627-12f642a52bb8/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w=
|
||||
modernc.org/libc v1.8.0 h1:Pp4uv9g0csgBMpGPABKtkieF6O5MGhfGo6ZiOdlYfR8=
|
||||
modernc.org/libc v1.8.0/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w=
|
||||
modernc.org/libc v1.9.8/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w=
|
||||
modernc.org/libc v1.9.11 h1:QUxZMs48Ahg2F7SN41aERvMfGLY2HU/ADnB9DC4Yts8=
|
||||
modernc.org/libc v1.9.11/go.mod h1:NyF3tsA5ArIjJ83XB0JlqhjTabTCHm9aX4XMPHyQn0Q=
|
||||
modernc.org/mathutil v1.1.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/mathutil v1.2.2 h1:+yFk8hBprV+4c0U9GjFtL+dV3N8hOJ8JCituQcMShFY=
|
||||
modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/mathutil v1.4.0 h1:GCjoRaBew8ECCKINQA2nYjzvufFW9YiEuuB+rQ9bn2E=
|
||||
modernc.org/mathutil v1.4.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/memory v1.0.4 h1:utMBrFcpnQDdNsmM6asmyH/FM9TqLPS7XF7otpJmrwM=
|
||||
modernc.org/memory v1.0.4/go.mod h1:nV2OApxradM3/OVbs2/0OsP6nPfakXpi50C7dcoHXlc=
|
||||
modernc.org/opt v0.1.1 h1:/0RX92k9vwVeDXj+Xn23DKp2VJubL7k8qNffND6qn3A=
|
||||
modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
||||
modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84 h1:rgEUzE849tFlHSoeCrKyS9cZAljC+DY7MdMHKq6R6sY=
|
||||
modernc.org/sqlite v1.10.1-0.20210314190707-798bbeb9bb84/go.mod h1:PGzq6qlhyYjL6uVbSgS6WoF7ZopTW/sI7+7p+mb4ZVU=
|
||||
modernc.org/strutil v1.1.0 h1:+1/yCzZxY2pZwwrsbH+4T7BQMoLQ9QiBshRC9eicYsc=
|
||||
modernc.org/strutil v1.1.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
|
||||
modernc.org/tcl v1.5.0 h1:euZSUNfE0Fd4W8VqXI1Ly1v7fqDJoBuAV88Ea+SnaSs=
|
||||
modernc.org/tcl v1.5.0/go.mod h1:gb57hj4pO8fRrK54zveIfFXBaMHK3SKJNWcmRw1cRzc=
|
||||
modernc.org/sqlite v1.11.2 h1:ShWQpeD3ag/bmx6TqidBlIWonWmQaSQKls3aenCbt+w=
|
||||
modernc.org/sqlite v1.11.2/go.mod h1:+mhs/P1ONd+6G7hcAs6irwDi/bjTQ7nLW6LHRBsEa3A=
|
||||
modernc.org/strutil v1.1.1 h1:xv+J1BXY3Opl2ALrBwyfEikFAj8pmqcpnfmuwUwcozs=
|
||||
modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw=
|
||||
modernc.org/tcl v1.5.5 h1:N03RwthgTR/l/eQvz3UjfYnvVVj1G2sZqzFGfoD4HE4=
|
||||
modernc.org/tcl v1.5.5/go.mod h1:ADkaTUuwukkrlhqwERyq0SM8OvyXo7+TjFz7yAF56EI=
|
||||
modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk=
|
||||
modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
modernc.org/z v1.0.1-0.20210308123920-1f282aa71362/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA=
|
||||
modernc.org/z v1.0.1 h1:WyIDpEpAIx4Hel6q/Pcgj/VhaQV5XPJ2I6ryIYbjnpc=
|
||||
modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA=
|
||||
mvdan.cc/xurls/v2 v2.2.0 h1:NSZPykBXJFCetGZykLAxaL6SIpvbVy/UFEniIfHAa8A=
|
||||
@@ -1610,12 +1757,13 @@ mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
|
||||
sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
|
||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs=
|
||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY=
|
||||
xorm.io/builder v0.3.7/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
||||
xorm.io/builder v0.3.8/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
||||
xorm.io/builder v0.3.9 h1:Sd65/LdWyO7LR8+Cbd+e7mm3sK/7U9k0jS3999IDHMc=
|
||||
xorm.io/builder v0.3.9/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
||||
xorm.io/xorm v1.0.6/go.mod h1:uF9EtbhODq5kNWxMbnBEj8hRRZnlcNSz2t2N7HW/+A4=
|
||||
xorm.io/xorm v1.1.2 h1:bje+1KZvK3m5AHtZNfUDlKEEyuw/IRHT+an0CLIG5TU=
|
||||
xorm.io/xorm v1.1.2/go.mod h1:Cb0DKYTHbyECMaSfgRnIZp5aiUgQozxcJJ0vzcLGJSg=
|
||||
xorm.io/xorm v1.2.2 h1:FFBOEvJ++8fYBA9cywf2sxDVmFktl1SpJzTAG1ab06Y=
|
||||
xorm.io/xorm v1.2.2/go.mod h1:fTG8tSjk6O1BYxwuohZUK+S1glnRycsCF05L1qQyEU0=
|
||||
|
||||
@@ -254,6 +254,10 @@ func TestAPILFSBatch(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exist)
|
||||
|
||||
repo2 := createLFSTestRepository(t, "batch2")
|
||||
content := []byte("dummy0")
|
||||
storeObjectInRepo(t, repo2.ID, &content)
|
||||
|
||||
meta, err := repo.GetLFSMetaObjectByOid(p.Oid)
|
||||
assert.Nil(t, meta)
|
||||
assert.Equal(t, models.ErrLFSObjectNotExist, err)
|
||||
@@ -359,13 +363,19 @@ func TestAPILFSUpload(t *testing.T) {
|
||||
assert.Nil(t, meta)
|
||||
assert.Equal(t, models.ErrLFSObjectNotExist, err)
|
||||
|
||||
req := newRequest(t, p, "")
|
||||
t.Run("InvalidAccess", func(t *testing.T) {
|
||||
req := newRequest(t, p, "invalid")
|
||||
session.MakeRequest(t, req, http.StatusUnprocessableEntity)
|
||||
})
|
||||
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
t.Run("ValidAccess", func(t *testing.T) {
|
||||
req := newRequest(t, p, "dummy5")
|
||||
|
||||
meta, err = repo.GetLFSMetaObjectByOid(p.Oid)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, meta)
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
meta, err = repo.GetLFSMetaObjectByOid(p.Oid)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, meta)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("MetaAlreadyExists", func(t *testing.T) {
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
// ResourceIndex represents a resource index which could be used as issue/release and others
|
||||
// We can create different tables i.e. issue_index, release_index and etc.
|
||||
type ResourceIndex struct {
|
||||
GroupID int64 `xorm:"unique"`
|
||||
GroupID int64 `xorm:"pk"`
|
||||
MaxIndex int64 `xorm:"index"`
|
||||
}
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
|
||||
func addIssueResourceIndexTable(x *xorm.Engine) error {
|
||||
type ResourceIndex struct {
|
||||
GroupID int64 `xorm:"index unique(s)"`
|
||||
MaxIndex int64 `xorm:"index unique(s)"`
|
||||
GroupID int64 `xorm:"pk"`
|
||||
MaxIndex int64 `xorm:"index"`
|
||||
}
|
||||
|
||||
sess := x.NewSession()
|
||||
|
||||
@@ -33,8 +33,8 @@ func Test_addIssueResourceIndexTable(t *testing.T) {
|
||||
}
|
||||
|
||||
type ResourceIndex struct {
|
||||
GroupID int64 `xorm:"index unique(s)"`
|
||||
MaxIndex int64 `xorm:"index unique(s)"`
|
||||
GroupID int64 `xorm:"pk"`
|
||||
MaxIndex int64 `xorm:"index"`
|
||||
}
|
||||
|
||||
var start = 0
|
||||
|
||||
@@ -33,7 +33,7 @@ type Engine interface {
|
||||
Table(tableNameOrBean interface{}) *xorm.Session
|
||||
Count(...interface{}) (int64, error)
|
||||
Decr(column string, arg ...interface{}) *xorm.Session
|
||||
Delete(interface{}) (int64, error)
|
||||
Delete(...interface{}) (int64, error)
|
||||
Exec(...interface{}) (sql.Result, error)
|
||||
Find(interface{}, ...interface{}) error
|
||||
Get(interface{}) (bool, error)
|
||||
|
||||
@@ -8,9 +8,12 @@ import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/auth/oauth2"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"xorm.io/xorm/schemas"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -32,3 +35,26 @@ func TestDumpDatabase(t *testing.T) {
|
||||
assert.NoError(t, DumpDatabase(filepath.Join(dir, dbType+".sql"), dbType))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDumpLoginSource(t *testing.T) {
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
loginSourceSchema, err := x.TableInfo(new(LoginSource))
|
||||
assert.NoError(t, err)
|
||||
|
||||
CreateLoginSource(&LoginSource{
|
||||
Type: LoginOAuth2,
|
||||
Name: "TestSource",
|
||||
IsActived: false,
|
||||
Cfg: &OAuth2Config{
|
||||
Provider: "TestSourceProvider",
|
||||
CustomURLMapping: &oauth2.CustomURLMapping{},
|
||||
},
|
||||
})
|
||||
|
||||
sb := new(strings.Builder)
|
||||
|
||||
x.DumpTables([]*schemas.Table{loginSourceSchema}, sb)
|
||||
|
||||
assert.Contains(t, sb.String(), `"Provider":"TestSourceProvider"`)
|
||||
}
|
||||
|
||||
@@ -425,23 +425,67 @@ func GetOrgsByUserID(userID int64, showAll bool) ([]*User, error) {
|
||||
return getOrgsByUserID(sess, userID, showAll)
|
||||
}
|
||||
|
||||
// queryUserOrgIDs returns a condition to return user's organization id
|
||||
func queryUserOrgIDs(uid int64) *builder.Builder {
|
||||
return builder.Select("team.org_id").
|
||||
From("team_user").InnerJoin("team", "team.id = team_user.team_id").
|
||||
Where(builder.Eq{"team_user.uid": uid})
|
||||
}
|
||||
|
||||
// MinimalOrg represents a simple orgnization with only needed columns
|
||||
type MinimalOrg = User
|
||||
|
||||
// GetUserOrgsList returns one user's all orgs list
|
||||
func GetUserOrgsList(uid int64) ([]*MinimalOrg, error) {
|
||||
var orgs = make([]*MinimalOrg, 0, 20)
|
||||
return orgs, x.Select("id, name, full_name, visibility, avatar, avatar_email, use_custom_avatar").
|
||||
func GetUserOrgsList(user *User) ([]*MinimalOrg, error) {
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
|
||||
schema, err := x.TableInfo(new(User))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
outputCols := []string{
|
||||
"id",
|
||||
"name",
|
||||
"full_name",
|
||||
"visibility",
|
||||
"avatar",
|
||||
"avatar_email",
|
||||
"use_custom_avatar",
|
||||
}
|
||||
|
||||
groupByCols := &strings.Builder{}
|
||||
for _, col := range outputCols {
|
||||
fmt.Fprintf(groupByCols, "`%s`.%s,", schema.Name, col)
|
||||
}
|
||||
groupByStr := groupByCols.String()
|
||||
groupByStr = groupByStr[0 : len(groupByStr)-1]
|
||||
|
||||
sess.Select(groupByStr+", count(repo_id) as org_count").
|
||||
Table("user").
|
||||
In("id", queryUserOrgIDs(uid)).
|
||||
Find(&orgs)
|
||||
Join("INNER", "team", "`team`.org_id = `user`.id").
|
||||
Join("INNER", "team_user", "`team`.id = `team_user`.team_id").
|
||||
Join("LEFT", builder.
|
||||
Select("id as repo_id, owner_id as repo_owner_id").
|
||||
From("repository").
|
||||
Where(accessibleRepositoryCondition(user)), "`repository`.repo_owner_id = `team`.org_id").
|
||||
Where("`team_user`.uid = ?", user.ID).
|
||||
GroupBy(groupByStr)
|
||||
|
||||
type OrgCount struct {
|
||||
User `xorm:"extends"`
|
||||
OrgCount int
|
||||
}
|
||||
|
||||
orgCounts := make([]*OrgCount, 0, 10)
|
||||
|
||||
if err := sess.
|
||||
Asc("`user`.name").
|
||||
Find(&orgCounts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
orgs := make([]*MinimalOrg, len(orgCounts))
|
||||
for i, orgCount := range orgCounts {
|
||||
orgCount.User.NumRepos = orgCount.OrgCount
|
||||
orgs[i] = &orgCount.User
|
||||
}
|
||||
|
||||
return orgs, nil
|
||||
}
|
||||
|
||||
func getOwnedOrgsByUserID(sess *xorm.Session, userID int64) ([]*User, error) {
|
||||
|
||||
@@ -1490,6 +1490,11 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
||||
releaseAttachments = append(releaseAttachments, attachments[i].RelativePath())
|
||||
}
|
||||
|
||||
if _, err = sess.In("release_id", builder.Select("id").From("`release`").Where(builder.Eq{"`release`.repo_id": repoID})).
|
||||
Delete(&Attachment{}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := sess.Exec("UPDATE `user` SET num_stars=num_stars-1 WHERE id IN (SELECT `uid` FROM `star` WHERE repo_id = ?)", repo.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -46,8 +46,8 @@ func (b *Blob) DataAsync() (io.ReadCloser, error) {
|
||||
|
||||
if size < 4096 {
|
||||
bs, err := ioutil.ReadAll(io.LimitReader(rd, size))
|
||||
defer cancel()
|
||||
if err != nil {
|
||||
cancel()
|
||||
return nil, err
|
||||
}
|
||||
_, err = rd.Discard(1)
|
||||
@@ -105,12 +105,12 @@ func (b *blobReader) Read(p []byte) (n int, err error) {
|
||||
|
||||
// Close implements io.Closer
|
||||
func (b *blobReader) Close() error {
|
||||
defer b.cancel()
|
||||
if b.n > 0 {
|
||||
for b.n > math.MaxInt32 {
|
||||
n, err := b.rd.Discard(math.MaxInt32)
|
||||
b.n -= int64(n)
|
||||
if err != nil {
|
||||
b.cancel()
|
||||
return err
|
||||
}
|
||||
b.n -= math.MaxInt32
|
||||
@@ -118,14 +118,12 @@ func (b *blobReader) Close() error {
|
||||
n, err := b.rd.Discard(int(b.n))
|
||||
b.n -= int64(n)
|
||||
if err != nil {
|
||||
b.cancel()
|
||||
return err
|
||||
}
|
||||
}
|
||||
if b.n == 0 {
|
||||
_, err := b.rd.Discard(1)
|
||||
b.n--
|
||||
b.cancel()
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
||||
@@ -167,6 +167,9 @@ func (g *LogNameStatusRepoParser) Next(treepath string, paths2ids map[string]int
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if len(g.next) == 0 {
|
||||
return &ret, nil
|
||||
}
|
||||
if g.next[0] == '\x00' {
|
||||
g.buffull = false
|
||||
g.next, err = g.rd.ReadSlice('\x00')
|
||||
|
||||
8
modules/markup/external/external.go
vendored
8
modules/markup/external/external.go
vendored
@@ -14,6 +14,7 @@ import (
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
@@ -99,7 +100,12 @@ func (p *Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.
|
||||
}
|
||||
|
||||
if ctx == nil || ctx.Ctx == nil {
|
||||
return fmt.Errorf("RenderContext did not provide context")
|
||||
if ctx == nil {
|
||||
log.Warn("RenderContext not provided defaulting to empty ctx")
|
||||
ctx = &markup.RenderContext{}
|
||||
}
|
||||
log.Warn("RenderContext did not provide context, defaulting to Shutdown context")
|
||||
ctx.Ctx = graceful.GetManager().ShutdownContext()
|
||||
}
|
||||
|
||||
processCtx, cancel := context.WithCancel(ctx.Ctx)
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -481,7 +482,9 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
||||
if err != nil {
|
||||
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
|
||||
} else {
|
||||
headBranch := filepath.Join(g.gitPath(), "refs", "heads", pr.Head.OwnerName, pr.Head.Ref)
|
||||
// a new branch name with <original_owner_name/original_branchname> will be created to as new head branch
|
||||
ref := path.Join(pr.Head.OwnerName, pr.Head.Ref)
|
||||
headBranch := filepath.Join(g.gitPath(), "refs", "heads", ref)
|
||||
if err := os.MkdirAll(filepath.Dir(headBranch), os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -494,10 +497,14 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pr.Head.Ref = ref
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// whatever it's a forked repo PR, we have to change head info as the same as the base info
|
||||
pr.Head.OwnerName = pr.Base.OwnerName
|
||||
pr.Head.RepoName = pr.Base.RepoName
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
@@ -254,7 +254,7 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
|
||||
if !release.Draft {
|
||||
commit, err := g.gitRepo.GetTagCommit(rel.TagName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetCommit: %v", err)
|
||||
return fmt.Errorf("GetTagCommit[%v]: %v", rel.TagName, err)
|
||||
}
|
||||
rel.NumCommits, err = commit.CommitsCount()
|
||||
if err != nil {
|
||||
@@ -808,7 +808,8 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
|
||||
}
|
||||
headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName())
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
|
||||
log.Warn("GetRefCommitID[%s]: %v, the review comment will be ignored", pr.GetGitRefName(), err)
|
||||
continue
|
||||
}
|
||||
|
||||
var patch string
|
||||
|
||||
@@ -396,12 +396,15 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("error while listing issue awards: %v", err)
|
||||
}
|
||||
if len(awards) < perPage {
|
||||
break
|
||||
}
|
||||
|
||||
for i := range awards {
|
||||
reactions = append(reactions, g.awardToReaction(awards[i]))
|
||||
}
|
||||
|
||||
if len(awards) < perPage {
|
||||
break
|
||||
}
|
||||
|
||||
awardPage++
|
||||
}
|
||||
|
||||
@@ -558,12 +561,15 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("error while listing merge requests awards: %v", err)
|
||||
}
|
||||
if len(awards) < perPage {
|
||||
break
|
||||
}
|
||||
|
||||
for i := range awards {
|
||||
reactions = append(reactions, g.awardToReaction(awards[i]))
|
||||
}
|
||||
|
||||
if len(awards) < perPage {
|
||||
break
|
||||
}
|
||||
|
||||
awardPage++
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ func RestoreRepo(ctx context.Context, repoDir, ownerName, repoName string, units
|
||||
if err := json.Unmarshal(body, &ret); err != nil {
|
||||
return http.StatusInternalServerError, fmt.Sprintf("Response body Unmarshal error: %v", err.Error())
|
||||
}
|
||||
return http.StatusInternalServerError, ret.Err
|
||||
}
|
||||
|
||||
return http.StatusOK, fmt.Sprintf("Restore repo %s/%s successfully", ownerName, repoName)
|
||||
|
||||
@@ -83,6 +83,7 @@ func TestGetDiffPreview(t *testing.T) {
|
||||
{
|
||||
LeftIdx: 3,
|
||||
RightIdx: 0,
|
||||
Match: 4,
|
||||
Type: 3,
|
||||
Content: "-Description for repo1",
|
||||
Comments: nil,
|
||||
@@ -90,6 +91,7 @@ func TestGetDiffPreview(t *testing.T) {
|
||||
{
|
||||
LeftIdx: 0,
|
||||
RightIdx: 3,
|
||||
Match: 3,
|
||||
Type: 2,
|
||||
Content: "+Description for repo1",
|
||||
Comments: nil,
|
||||
@@ -97,6 +99,7 @@ func TestGetDiffPreview(t *testing.T) {
|
||||
{
|
||||
LeftIdx: 0,
|
||||
RightIdx: 4,
|
||||
Match: -1,
|
||||
Type: 2,
|
||||
Content: "+this is a new line",
|
||||
Comments: nil,
|
||||
|
||||
@@ -31,6 +31,7 @@ type PushCommits struct {
|
||||
Commits []*PushCommit
|
||||
HeadCommit *PushCommit
|
||||
CompareURL string
|
||||
Len int
|
||||
|
||||
avatars map[string]string
|
||||
emailUsers map[string]*models.User
|
||||
@@ -182,5 +183,12 @@ func ListToPushCommits(l *list.List) *PushCommits {
|
||||
commit := CommitToPushCommit(e.Value.(*git.Commit))
|
||||
commits = append(commits, commit)
|
||||
}
|
||||
return &PushCommits{commits, nil, "", make(map[string]string), make(map[string]*models.User)}
|
||||
return &PushCommits{
|
||||
Commits: commits,
|
||||
HeadCommit: nil,
|
||||
CompareURL: "",
|
||||
Len: len(commits),
|
||||
avatars: make(map[string]string),
|
||||
emailUsers: make(map[string]*models.User),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -848,6 +848,11 @@ func ActionContent2Commits(act Actioner) *repository.PushCommits {
|
||||
if err := json.Unmarshal([]byte(act.GetContent()), push); err != nil {
|
||||
log.Error("json.Unmarshal:\n%s\nERROR: %v", act.GetContent(), err)
|
||||
}
|
||||
|
||||
if push.Len == 0 {
|
||||
push.Len = len(push.Commits)
|
||||
}
|
||||
|
||||
return push
|
||||
}
|
||||
|
||||
|
||||
@@ -1057,7 +1057,7 @@ issues.action_milestone=Pietra Miliare
|
||||
issues.action_milestone_no_select=Nessuna pietra miliare
|
||||
issues.action_assignee=Assegnatario
|
||||
issues.action_assignee_no_select=Nessun assegnatario
|
||||
issues.opened_by=aperto %[1]s da <a href="/%[2]s">%[3]s</a>
|
||||
issues.opened_by=aperto %[1]s da <a href="%[2]s">%[3]s</a>
|
||||
issues.closed_by=del <a href="%[2]s">%[3]s</a> chiuso %[1]s
|
||||
issues.closed_by_fake=della %[2]s chiusa %[1]s
|
||||
issues.previous=Pagina precedente
|
||||
|
||||
@@ -1046,7 +1046,7 @@ issues.action_milestone=Mijlpaal
|
||||
issues.action_milestone_no_select=Geen mijlpaal
|
||||
issues.action_assignee=Toegewezene
|
||||
issues.action_assignee_no_select=Geen verantwoordelijke
|
||||
issues.opened_by=%[1]s werd geopend door <a href="/%[2]s">%[3]s</a>
|
||||
issues.opened_by=%[1]s geopend door <a href="%[2]s">%[3]s</a>
|
||||
issues.closed_by=door <a href="%[2]s">%[3]s</a> gesloten %[1]s
|
||||
issues.closed_by_fake=met %[2]gesloten %[1]s
|
||||
issues.previous=Vorige
|
||||
|
||||
@@ -106,6 +106,7 @@ func GetAllOrgs(ctx *context.APIContext) {
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
users, maxResults, err := models.SearchUsers(&models.SearchUserOptions{
|
||||
Actor: ctx.User,
|
||||
Type: models.UserTypeOrganization,
|
||||
OrderBy: models.SearchOrderByAlphabetically,
|
||||
ListOptions: listOptions,
|
||||
|
||||
@@ -130,6 +130,7 @@ func GetAll(ctx *context.APIContext) {
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
publicOrgs, maxResults, err := models.SearchUsers(&models.SearchUserOptions{
|
||||
Actor: ctx.User,
|
||||
ListOptions: listOptions,
|
||||
Type: models.UserTypeOrganization,
|
||||
OrderBy: models.SearchOrderByAlphabetically,
|
||||
|
||||
@@ -56,15 +56,13 @@ func Search(ctx *context.APIContext) {
|
||||
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
opts := &models.SearchUserOptions{
|
||||
users, maxResults, err := models.SearchUsers(&models.SearchUserOptions{
|
||||
Actor: ctx.User,
|
||||
Keyword: strings.Trim(ctx.Query("q"), " "),
|
||||
UID: ctx.QueryInt64("uid"),
|
||||
Type: models.UserTypeIndividual,
|
||||
ListOptions: listOptions,
|
||||
}
|
||||
|
||||
users, maxResults, err := models.SearchUsers(opts)
|
||||
})
|
||||
if err != nil {
|
||||
ctx.JSON(http.StatusInternalServerError, map[string]interface{}{
|
||||
"ok": false,
|
||||
|
||||
@@ -217,7 +217,7 @@ func loadBranches(ctx *context.Context, skip, limit int) ([]*Branch, int) {
|
||||
branches = append(branches, deletedBranches...)
|
||||
}
|
||||
|
||||
return branches, totalNumOfBranches - 1
|
||||
return branches, totalNumOfBranches
|
||||
}
|
||||
|
||||
func loadOneBranch(ctx *context.Context, rawBranch *git.Branch, protectedBranches []*models.ProtectedBranch,
|
||||
|
||||
@@ -268,9 +268,8 @@ func Diff(ctx *context.Context) {
|
||||
repoName := ctx.Repo.Repository.Name
|
||||
commitID := ctx.Params(":sha")
|
||||
var (
|
||||
gitRepo *git.Repository
|
||||
err error
|
||||
repoPath string
|
||||
gitRepo *git.Repository
|
||||
err error
|
||||
)
|
||||
|
||||
if ctx.Data["PageIsWiki"] != nil {
|
||||
@@ -279,10 +278,9 @@ func Diff(ctx *context.Context) {
|
||||
ctx.ServerError("Repo.GitRepo.GetCommit", err)
|
||||
return
|
||||
}
|
||||
repoPath = ctx.Repo.Repository.WikiPath()
|
||||
defer gitRepo.Close()
|
||||
} else {
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
repoPath = models.RepoPath(userName, repoName)
|
||||
}
|
||||
|
||||
commit, err := gitRepo.GetCommit(commitID)
|
||||
@@ -306,7 +304,7 @@ func Diff(ctx *context.Context) {
|
||||
ctx.Data["CommitStatus"] = models.CalcCommitStatus(statuses)
|
||||
ctx.Data["CommitStatuses"] = statuses
|
||||
|
||||
diff, err := gitdiff.GetDiffCommitWithWhitespaceBehavior(repoPath,
|
||||
diff, err := gitdiff.GetDiffCommitWithWhitespaceBehavior(gitRepo,
|
||||
commitID, setting.Git.MaxGitDiffLines,
|
||||
setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles,
|
||||
gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
|
||||
|
||||
@@ -526,7 +526,7 @@ func PrepareCompareDiff(
|
||||
return true
|
||||
}
|
||||
|
||||
diff, err := gitdiff.GetDiffRangeWithWhitespaceBehavior(models.RepoPath(headUser.Name, headRepo.Name),
|
||||
diff, err := gitdiff.GetDiffRangeWithWhitespaceBehavior(headGitRepo,
|
||||
compareInfo.MergeBase, headCommitID, setting.Git.MaxGitDiffLines,
|
||||
setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, whitespaceBehavior)
|
||||
if err != nil {
|
||||
@@ -618,11 +618,14 @@ func getBranchesAndTagsForRepo(user *models.User, repo *models.Repository) (bool
|
||||
// CompareDiff show different from one commit to another commit
|
||||
func CompareDiff(ctx *context.Context) {
|
||||
headUser, headRepo, headGitRepo, compareInfo, baseBranch, headBranch := ParseCompareInfo(ctx)
|
||||
|
||||
defer func() {
|
||||
if headGitRepo != nil {
|
||||
headGitRepo.Close()
|
||||
}
|
||||
}()
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
defer headGitRepo.Close()
|
||||
|
||||
nothingToCompare := PrepareCompareDiff(ctx, headUser, headRepo, headGitRepo, compareInfo, baseBranch, headBranch,
|
||||
gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
|
||||
|
||||
@@ -419,9 +419,6 @@ func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *models.Repos
|
||||
}
|
||||
|
||||
handleTeamMentions(ctx)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func retrieveProjects(ctx *context.Context, repo *models.Repository) {
|
||||
@@ -1138,6 +1135,7 @@ func ViewIssue(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, issue.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -1303,6 +1301,7 @@ func ViewIssue(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, comment.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -1379,6 +1378,7 @@ func ViewIssue(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, comment.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -1740,6 +1740,7 @@ func UpdateIssueContent(ctx *context.Context) {
|
||||
URLPrefix: ctx.Query("context"),
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, issue.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -2170,6 +2171,7 @@ func UpdateCommentContent(ctx *context.Context) {
|
||||
URLPrefix: ctx.Query("context"),
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, comment.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -89,6 +89,7 @@ func Milestones(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, m.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -282,6 +283,7 @@ func MilestoneIssuesAndPulls(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, milestone.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -82,6 +82,7 @@ func Projects(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, projects[i].Description)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -324,6 +325,7 @@ func ViewProject(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, project.Description)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -587,10 +587,9 @@ func ViewPullFiles(ctx *context.Context) {
|
||||
pull := issue.PullRequest
|
||||
|
||||
var (
|
||||
diffRepoPath string
|
||||
startCommitID string
|
||||
endCommitID string
|
||||
gitRepo *git.Repository
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
)
|
||||
|
||||
var prInfo *git.CompareInfo
|
||||
@@ -607,9 +606,6 @@ func ViewPullFiles(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
diffRepoPath = ctx.Repo.GitRepo.Path
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
|
||||
headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
|
||||
if err != nil {
|
||||
ctx.ServerError("GetRefCommitID", err)
|
||||
@@ -623,7 +619,7 @@ func ViewPullFiles(ctx *context.Context) {
|
||||
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
|
||||
ctx.Data["AfterCommitID"] = endCommitID
|
||||
|
||||
diff, err := gitdiff.GetDiffRangeWithWhitespaceBehavior(diffRepoPath,
|
||||
diff, err := gitdiff.GetDiffRangeWithWhitespaceBehavior(gitRepo,
|
||||
startCommitID, endCommitID, setting.Git.MaxGitDiffLines,
|
||||
setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles,
|
||||
gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
|
||||
@@ -1016,10 +1012,14 @@ func CompareAndPullRequestPost(ctx *context.Context) {
|
||||
)
|
||||
|
||||
headUser, headRepo, headGitRepo, prInfo, baseBranch, headBranch := ParseCompareInfo(ctx)
|
||||
defer func() {
|
||||
if headGitRepo != nil {
|
||||
headGitRepo.Close()
|
||||
}
|
||||
}()
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
defer headGitRepo.Close()
|
||||
|
||||
labelIDs, assigneeIDs, milestoneID, _ := ValidateRepoMetas(ctx, *form, true)
|
||||
if ctx.Written() {
|
||||
|
||||
@@ -146,6 +146,7 @@ func releasesOrTags(ctx *context.Context, isTagList bool) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, r.Note)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
@@ -215,6 +216,7 @@ func SingleRelease(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, release.Note)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -742,6 +742,7 @@ func handleSettingRemoteAddrError(ctx *context.Context, err error, form *forms.R
|
||||
default:
|
||||
ctx.ServerError("Unknown error", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, form)
|
||||
}
|
||||
|
||||
@@ -135,6 +135,9 @@ func wikiContentsByName(ctx *context.Context, commit *git.Commit, wikiName strin
|
||||
func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||
wikiRepo, commit, err := findWikiRepoCommit(ctx)
|
||||
if err != nil {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
if !git.IsErrNotExist(err) {
|
||||
ctx.ServerError("GetBranchCommit", err)
|
||||
}
|
||||
@@ -222,6 +225,9 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||
|
||||
var buf strings.Builder
|
||||
if err := markdown.Render(rctx, bytes.NewReader(data), &buf); err != nil {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
ctx.ServerError("Render", err)
|
||||
return nil, nil
|
||||
}
|
||||
@@ -229,6 +235,9 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||
|
||||
buf.Reset()
|
||||
if err := markdown.Render(rctx, bytes.NewReader(sidebarContent), &buf); err != nil {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
ctx.ServerError("Render", err)
|
||||
return nil, nil
|
||||
}
|
||||
@@ -237,6 +246,9 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||
|
||||
buf.Reset()
|
||||
if err := markdown.Render(rctx, bytes.NewReader(footerContent), &buf); err != nil {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
ctx.ServerError("Render", err)
|
||||
return nil, nil
|
||||
}
|
||||
@@ -380,17 +392,14 @@ func Wiki(ctx *context.Context) {
|
||||
}
|
||||
|
||||
wikiRepo, entry := renderViewPage(ctx)
|
||||
if ctx.Written() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
}()
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
if entry == nil {
|
||||
ctx.Data["Title"] = ctx.Tr("repo.wiki")
|
||||
ctx.HTML(http.StatusOK, tplWikiStart)
|
||||
@@ -425,17 +434,15 @@ func WikiRevision(ctx *context.Context) {
|
||||
}
|
||||
|
||||
wikiRepo, entry := renderRevisionPage(ctx)
|
||||
if ctx.Written() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
if entry == nil {
|
||||
ctx.Data["Title"] = ctx.Tr("repo.wiki")
|
||||
ctx.HTML(http.StatusOK, tplWikiStart)
|
||||
@@ -472,13 +479,14 @@ func WikiPages(ctx *context.Context) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
entries, err := commit.ListEntries()
|
||||
if err != nil {
|
||||
defer func() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
entries, err := commit.ListEntries()
|
||||
if err != nil {
|
||||
ctx.ServerError("ListEntries", err)
|
||||
return
|
||||
}
|
||||
@@ -489,10 +497,6 @@ func WikiPages(ctx *context.Context) {
|
||||
}
|
||||
c, err := wikiRepo.GetCommitByPath(entry.Name())
|
||||
if err != nil {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
@@ -501,10 +505,6 @@ func WikiPages(ctx *context.Context) {
|
||||
if models.IsErrWikiInvalidFileName(err) {
|
||||
continue
|
||||
}
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
|
||||
ctx.ServerError("WikiFilenameToName", err)
|
||||
return
|
||||
}
|
||||
@@ -516,21 +516,25 @@ func WikiPages(ctx *context.Context) {
|
||||
}
|
||||
ctx.Data["Pages"] = pages
|
||||
|
||||
defer func() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
}()
|
||||
ctx.HTML(http.StatusOK, tplWikiPages)
|
||||
}
|
||||
|
||||
// WikiRaw outputs raw blob requested by user (image for example)
|
||||
func WikiRaw(ctx *context.Context) {
|
||||
wikiRepo, commit, err := findWikiRepoCommit(ctx)
|
||||
if err != nil {
|
||||
defer func() {
|
||||
if wikiRepo != nil {
|
||||
wikiRepo.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
if git.IsErrNotExist(err) {
|
||||
ctx.NotFound("findEntryForFile", nil)
|
||||
return
|
||||
}
|
||||
ctx.ServerError("findEntryForfile", err)
|
||||
return
|
||||
}
|
||||
|
||||
providedPath := ctx.Params("*")
|
||||
@@ -546,9 +550,7 @@ func WikiRaw(ctx *context.Context) {
|
||||
|
||||
if entry == nil {
|
||||
// Try to find a wiki page with that name
|
||||
if strings.HasSuffix(providedPath, ".md") {
|
||||
providedPath = providedPath[:len(providedPath)-3]
|
||||
}
|
||||
providedPath = strings.TrimSuffix(providedPath, ".md")
|
||||
|
||||
wikiPath := wiki_service.NameToFilename(providedPath)
|
||||
entry, err = findEntryForFile(commit, wikiPath)
|
||||
|
||||
@@ -49,7 +49,7 @@ func getDashboardContextUser(ctx *context.Context) *models.User {
|
||||
}
|
||||
ctx.Data["ContextUser"] = ctxUser
|
||||
|
||||
orgs, err := models.GetUserOrgsList(ctx.User.ID)
|
||||
orgs, err := models.GetUserOrgsList(ctx.User)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetUserOrgsList", err)
|
||||
return nil
|
||||
@@ -272,6 +272,7 @@ func Milestones(ctx *context.Context) {
|
||||
milestones[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{
|
||||
URLPrefix: milestones[i].Repo.Link(),
|
||||
Metas: milestones[i].Repo.ComposeMetas(),
|
||||
Ctx: ctx,
|
||||
}, milestones[i].Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -124,6 +124,7 @@ func Profile(ctx *context.Context) {
|
||||
URLPrefix: ctx.Repo.RepoLink,
|
||||
Metas: map[string]string{"mode": "document"},
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, ctxUser.Description)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
|
||||
@@ -873,7 +873,7 @@ func RegisterRoutes(m *web.Route) {
|
||||
m.Get("/_pages", repo.WikiPages)
|
||||
m.Get("/{page}/_revision", repo.WikiRevision)
|
||||
m.Get("/commit/{sha:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.Diff)
|
||||
m.Get("/commit/{sha:[a-f0-9]{7,40}}.{:patch|diff}", repo.RawDiff)
|
||||
m.Get("/commit/{sha:[a-f0-9]{7,40}}.{ext:patch|diff}", repo.RawDiff)
|
||||
|
||||
m.Group("", func() {
|
||||
m.Combo("/_new").Get(repo.NewWiki).
|
||||
|
||||
@@ -80,11 +80,11 @@ func isAttachmentDownload(req *http.Request) bool {
|
||||
return strings.HasPrefix(req.URL.Path, "/attachments/") && req.Method == "GET"
|
||||
}
|
||||
|
||||
var gitRawPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|raw/)`)
|
||||
var gitRawReleasePathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|(?:raw/)|(?:releases/download/))`)
|
||||
var lfsPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/info/lfs/`)
|
||||
|
||||
func isGitRawOrLFSPath(req *http.Request) bool {
|
||||
if gitRawPathRe.MatchString(req.URL.Path) {
|
||||
func isGitRawReleaseOrLFSPath(req *http.Request) bool {
|
||||
if gitRawReleasePathRe.MatchString(req.URL.Path) {
|
||||
return true
|
||||
}
|
||||
if setting.LFS.StartServer {
|
||||
|
||||
@@ -83,6 +83,10 @@ func Test_isGitRawOrLFSPath(t *testing.T) {
|
||||
"/owner/repo/commit/123456789012345678921234567893124567894",
|
||||
false,
|
||||
},
|
||||
{
|
||||
"/owner/repo/releases/download/tag/repo.tar.gz",
|
||||
true,
|
||||
},
|
||||
}
|
||||
lfsTests := []string{
|
||||
"/owner/repo/info/lfs/",
|
||||
@@ -102,11 +106,11 @@ func Test_isGitRawOrLFSPath(t *testing.T) {
|
||||
t.Run(tt.path, func(t *testing.T) {
|
||||
req, _ := http.NewRequest("POST", "http://localhost"+tt.path, nil)
|
||||
setting.LFS.StartServer = false
|
||||
if got := isGitRawOrLFSPath(req); got != tt.want {
|
||||
if got := isGitRawReleaseOrLFSPath(req); got != tt.want {
|
||||
t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want)
|
||||
}
|
||||
setting.LFS.StartServer = true
|
||||
if got := isGitRawOrLFSPath(req); got != tt.want {
|
||||
if got := isGitRawReleaseOrLFSPath(req); got != tt.want {
|
||||
t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
@@ -115,11 +119,11 @@ func Test_isGitRawOrLFSPath(t *testing.T) {
|
||||
t.Run(tt, func(t *testing.T) {
|
||||
req, _ := http.NewRequest("POST", tt, nil)
|
||||
setting.LFS.StartServer = false
|
||||
if got := isGitRawOrLFSPath(req); got != setting.LFS.StartServer {
|
||||
t.Errorf("isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, gitRawPathRe.MatchString(tt))
|
||||
if got := isGitRawReleaseOrLFSPath(req); got != setting.LFS.StartServer {
|
||||
t.Errorf("isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, gitRawReleasePathRe.MatchString(tt))
|
||||
}
|
||||
setting.LFS.StartServer = true
|
||||
if got := isGitRawOrLFSPath(req); got != setting.LFS.StartServer {
|
||||
if got := isGitRawReleaseOrLFSPath(req); got != setting.LFS.StartServer {
|
||||
t.Errorf("isGitOrLFSPath(%q) = %v, want %v", tt, got, setting.LFS.StartServer)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -49,7 +49,7 @@ func (b *Basic) Free() error {
|
||||
// Returns nil if header is empty or validation fails.
|
||||
func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *models.User {
|
||||
// Basic authentication should only fire on API, Download or on Git or LFSPaths
|
||||
if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawOrLFSPath(req) {
|
||||
if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawReleaseOrLFSPath(req) {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ func (r *ReverseProxy) Verify(req *http.Request, w http.ResponseWriter, store Da
|
||||
}
|
||||
|
||||
// Make sure requests to API paths, attachment downloads, git and LFS do not create a new session
|
||||
if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawOrLFSPath(req) {
|
||||
if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawReleaseOrLFSPath(req) {
|
||||
if sess != nil && (sess.Get("uid") == nil || sess.Get("uid").(int64) != user.ID) {
|
||||
handleSignIn(w, req, sess, user)
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/charset"
|
||||
@@ -75,6 +76,7 @@ const (
|
||||
type DiffLine struct {
|
||||
LeftIdx int
|
||||
RightIdx int
|
||||
Match int
|
||||
Type DiffLineType
|
||||
Content string
|
||||
Comments []*models.Comment
|
||||
@@ -943,6 +945,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio
|
||||
curFileLFSPrefix bool
|
||||
)
|
||||
|
||||
lastLeftIdx := -1
|
||||
leftLine, rightLine := 1, 1
|
||||
|
||||
for {
|
||||
@@ -1027,13 +1030,21 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio
|
||||
curFile.IsIncomplete = true
|
||||
continue
|
||||
}
|
||||
diffLine := &DiffLine{Type: DiffLineAdd, RightIdx: rightLine}
|
||||
diffLine := &DiffLine{Type: DiffLineAdd, RightIdx: rightLine, Match: -1}
|
||||
rightLine++
|
||||
if curSection == nil {
|
||||
// Create a new section to represent this hunk
|
||||
curSection = &DiffSection{}
|
||||
curFile.Sections = append(curFile.Sections, curSection)
|
||||
}
|
||||
if lastLeftIdx > -1 {
|
||||
diffLine.Match = lastLeftIdx
|
||||
curSection.Lines[lastLeftIdx].Match = len(curSection.Lines)
|
||||
lastLeftIdx++
|
||||
if lastLeftIdx >= len(curSection.Lines) || curSection.Lines[lastLeftIdx].Type != DiffLineDel {
|
||||
lastLeftIdx = -1
|
||||
}
|
||||
}
|
||||
curSection.Lines = append(curSection.Lines, diffLine)
|
||||
case '-':
|
||||
curFileLinesCount++
|
||||
@@ -1042,7 +1053,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio
|
||||
curFile.IsIncomplete = true
|
||||
continue
|
||||
}
|
||||
diffLine := &DiffLine{Type: DiffLineDel, LeftIdx: leftLine}
|
||||
diffLine := &DiffLine{Type: DiffLineDel, LeftIdx: leftLine, Match: -1}
|
||||
if leftLine > 0 {
|
||||
leftLine++
|
||||
}
|
||||
@@ -1051,6 +1062,9 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio
|
||||
curSection = &DiffSection{}
|
||||
curFile.Sections = append(curFile.Sections, curSection)
|
||||
}
|
||||
if len(curSection.Lines) == 0 || curSection.Lines[len(curSection.Lines)-1].Type != DiffLineDel {
|
||||
lastLeftIdx = len(curSection.Lines)
|
||||
}
|
||||
curSection.Lines = append(curSection.Lines, diffLine)
|
||||
case ' ':
|
||||
curFileLinesCount++
|
||||
@@ -1061,6 +1075,7 @@ func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio
|
||||
diffLine := &DiffLine{Type: DiffLinePlain, LeftIdx: leftLine, RightIdx: rightLine}
|
||||
leftLine++
|
||||
rightLine++
|
||||
lastLeftIdx = -1
|
||||
if curSection == nil {
|
||||
// Create a new section to represent this hunk
|
||||
curSection = &DiffSection{}
|
||||
@@ -1191,31 +1206,20 @@ func readFileName(rd *strings.Reader) (string, bool) {
|
||||
return name[2:], ambiguity
|
||||
}
|
||||
|
||||
// GetDiffRange builds a Diff between two commits of a repository.
|
||||
// passing the empty string as beforeCommitID returns a diff from the
|
||||
// parent commit.
|
||||
func GetDiffRange(repoPath, beforeCommitID, afterCommitID string, maxLines, maxLineCharacters, maxFiles int) (*Diff, error) {
|
||||
return GetDiffRangeWithWhitespaceBehavior(repoPath, beforeCommitID, afterCommitID, maxLines, maxLineCharacters, maxFiles, "")
|
||||
}
|
||||
|
||||
// GetDiffRangeWithWhitespaceBehavior builds a Diff between two commits of a repository.
|
||||
// Passing the empty string as beforeCommitID returns a diff from the parent commit.
|
||||
// The whitespaceBehavior is either an empty string or a git flag
|
||||
func GetDiffRangeWithWhitespaceBehavior(repoPath, beforeCommitID, afterCommitID string, maxLines, maxLineCharacters, maxFiles int, whitespaceBehavior string) (*Diff, error) {
|
||||
gitRepo, err := git.OpenRepository(repoPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gitRepo.Close()
|
||||
func GetDiffRangeWithWhitespaceBehavior(gitRepo *git.Repository, beforeCommitID, afterCommitID string, maxLines, maxLineCharacters, maxFiles int, whitespaceBehavior string) (*Diff, error) {
|
||||
repoPath := gitRepo.Path
|
||||
|
||||
commit, err := gitRepo.GetCommit(afterCommitID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// FIXME: graceful: These commands should likely have a timeout
|
||||
ctx, cancel := context.WithCancel(git.DefaultContext)
|
||||
ctx, cancel := context.WithTimeout(git.DefaultContext, time.Duration(setting.Git.Timeout.Default)*time.Second)
|
||||
defer cancel()
|
||||
|
||||
var cmd *exec.Cmd
|
||||
if (len(beforeCommitID) == 0 || beforeCommitID == git.EmptySHA) && commit.ParentCount() == 0 {
|
||||
diffArgs := []string{"diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M"}
|
||||
@@ -1289,15 +1293,10 @@ func GetDiffRangeWithWhitespaceBehavior(repoPath, beforeCommitID, afterCommitID
|
||||
return diff, nil
|
||||
}
|
||||
|
||||
// GetDiffCommit builds a Diff representing the given commitID.
|
||||
func GetDiffCommit(repoPath, commitID string, maxLines, maxLineCharacters, maxFiles int) (*Diff, error) {
|
||||
return GetDiffRangeWithWhitespaceBehavior(repoPath, "", commitID, maxLines, maxLineCharacters, maxFiles, "")
|
||||
}
|
||||
|
||||
// GetDiffCommitWithWhitespaceBehavior builds a Diff representing the given commitID.
|
||||
// The whitespaceBehavior is either an empty string or a git flag
|
||||
func GetDiffCommitWithWhitespaceBehavior(repoPath, commitID string, maxLines, maxLineCharacters, maxFiles int, whitespaceBehavior string) (*Diff, error) {
|
||||
return GetDiffRangeWithWhitespaceBehavior(repoPath, "", commitID, maxLines, maxLineCharacters, maxFiles, whitespaceBehavior)
|
||||
func GetDiffCommitWithWhitespaceBehavior(gitRepo *git.Repository, commitID string, maxLines, maxLineCharacters, maxFiles int, whitespaceBehavior string) (*Diff, error) {
|
||||
return GetDiffRangeWithWhitespaceBehavior(gitRepo, "", commitID, maxLines, maxLineCharacters, maxFiles, whitespaceBehavior)
|
||||
}
|
||||
|
||||
// CommentAsDiff returns c.Patch as *Diff
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/highlight"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
@@ -513,8 +514,13 @@ func TestDiffLine_GetCommentSide(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) {
|
||||
gitRepo, err := git.OpenRepository("./testdata/academic-module")
|
||||
if !assert.NoError(t, err) {
|
||||
return
|
||||
}
|
||||
defer gitRepo.Close()
|
||||
for _, behavior := range []string{"-w", "--ignore-space-at-eol", "-b", ""} {
|
||||
diffs, err := GetDiffRangeWithWhitespaceBehavior("./testdata/academic-module", "559c156f8e0178b71cb44355428f24001b08fc68", "bd7063cc7c04689c4d082183d32a604ed27a24f9",
|
||||
diffs, err := GetDiffRangeWithWhitespaceBehavior(gitRepo, "559c156f8e0178b71cb44355428f24001b08fc68", "bd7063cc7c04689c4d082183d32a604ed27a24f9",
|
||||
setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles, behavior)
|
||||
assert.NoError(t, err, fmt.Sprintf("Error when diff with %s", behavior))
|
||||
for _, f := range diffs.Files {
|
||||
|
||||
@@ -5,7 +5,9 @@
|
||||
package lfs
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -213,14 +215,22 @@ func BatchHandler(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
if exists {
|
||||
if meta == nil {
|
||||
if exists && meta == nil {
|
||||
accessible, err := models.LFSObjectAccessible(ctx.User, p.Oid)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err)
|
||||
writeStatus(ctx, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if accessible {
|
||||
_, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID})
|
||||
if err != nil {
|
||||
log.Error("Unable to create LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err)
|
||||
writeStatus(ctx, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
exists = false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -270,29 +280,50 @@ func UploadHandler(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID})
|
||||
if err != nil {
|
||||
log.Error("Unable to create LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err)
|
||||
writeStatus(ctx, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
contentStore := lfs_module.NewContentStore()
|
||||
|
||||
exists, err := contentStore.Exists(p)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, err)
|
||||
writeStatus(ctx, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if meta.Existing || exists {
|
||||
ctx.Resp.WriteHeader(http.StatusOK)
|
||||
return
|
||||
|
||||
uploadOrVerify := func() error {
|
||||
if exists {
|
||||
accessible, err := models.LFSObjectAccessible(ctx.User, p.Oid)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err)
|
||||
return err
|
||||
}
|
||||
if !accessible {
|
||||
// The file exists but the user has no access to it.
|
||||
// The upload gets verified by hashing and size comparison to prove access to it.
|
||||
hash := sha256.New()
|
||||
written, err := io.Copy(hash, ctx.Req.Body)
|
||||
if err != nil {
|
||||
log.Error("Error creating hash. Error: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if written != p.Size {
|
||||
return lfs_module.ErrSizeMismatch
|
||||
}
|
||||
if hex.EncodeToString(hash.Sum(nil)) != p.Oid {
|
||||
return lfs_module.ErrHashMismatch
|
||||
}
|
||||
}
|
||||
} else if err := contentStore.Put(p, ctx.Req.Body); err != nil {
|
||||
log.Error("Error putting LFS MetaObject [%s] into content store. Error: %v", p.Oid, err)
|
||||
return err
|
||||
}
|
||||
_, err := models.NewLFSMetaObject(&models.LFSMetaObject{Pointer: p, RepositoryID: repository.ID})
|
||||
return err
|
||||
}
|
||||
|
||||
defer ctx.Req.Body.Close()
|
||||
if err := contentStore.Put(meta.Pointer, ctx.Req.Body); err != nil {
|
||||
if err := uploadOrVerify(); err != nil {
|
||||
if errors.Is(err, lfs_module.ErrSizeMismatch) || errors.Is(err, lfs_module.ErrHashMismatch) {
|
||||
log.Error("Upload does not match LFS MetaObject [%s]. Error: %v", p.Oid, err)
|
||||
writeStatusMessage(ctx, http.StatusUnprocessableEntity, err.Error())
|
||||
} else {
|
||||
writeStatus(ctx, http.StatusInternalServerError)
|
||||
|
||||
@@ -126,7 +126,7 @@ func getMergeCommit(pr *models.PullRequest) (*git.Commit, error) {
|
||||
|
||||
commit, err := gitRepo.GetCommit(mergeCommit[:40])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetCommit: %v", err)
|
||||
return nil, fmt.Errorf("GetMergeCommit[%v]: %v", mergeCommit[:40], err)
|
||||
}
|
||||
|
||||
return commit, nil
|
||||
|
||||
@@ -44,7 +44,7 @@ func createTag(gitRepo *git.Repository, rel *models.Release, msg string) (bool,
|
||||
|
||||
commit, err := gitRepo.GetCommit(rel.Target)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("GetCommit: %v", err)
|
||||
return false, fmt.Errorf("createTag::GetCommit[%v]: %v", rel.Target, err)
|
||||
}
|
||||
|
||||
// Trim '--' prefix to prevent command line argument vulnerability.
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
<div class="inline field {{if .Err_Visibility}}error{{end}}">
|
||||
<span class="inline required field"><label for="visibility">{{.i18n.Tr "settings.visibility"}}</label></span>
|
||||
<div class="ui selection type dropdown">
|
||||
<input type="hidden" id="visibility" name="visibility" value="{{.visibility}}">
|
||||
<input type="hidden" id="visibility" name="visibility" value="{{if .visibility}}{{.visibility}}{{else}}{{printf "%d" .DefaultUserVisibilityMode}}{{end}}">
|
||||
<div class="text">
|
||||
{{if .DefaultUserVisibilityMode.IsPublic}}{{.i18n.Tr "settings.visibility.public"}}{{end}}
|
||||
{{if .DefaultUserVisibilityMode.IsLimited}}{{.i18n.Tr "settings.visibility.limited"}}{{end}}
|
||||
|
||||
@@ -154,33 +154,5 @@
|
||||
{{end}}
|
||||
|
||||
{{template "repo/issue/view_content/reference_issue_dialog" .}}
|
||||
|
||||
{{if .IsSplitStyle}}
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
$('tr.add-code').each(function() {
|
||||
let prev = $(this).prev();
|
||||
if (prev.is('.del-code') && prev.children().eq(5).text().trim() === '') {
|
||||
while (prev.prev().is('.del-code') && prev.prev().children().eq(5).text().trim() === '') {
|
||||
prev = prev.prev();
|
||||
}
|
||||
prev.children().eq(3).attr('data-line-num', $(this).children().eq(3).attr('data-line-num'));
|
||||
prev.children().eq(3).html($(this).children().eq(3).html());
|
||||
prev.children().eq(4).html($(this).children().eq(4).html());
|
||||
prev.children().eq(5).html($(this).children().eq(5).html());
|
||||
|
||||
prev.children().eq(0).addClass('del-code');
|
||||
prev.children().eq(1).addClass('del-code');
|
||||
prev.children().eq(2).addClass('del-code');
|
||||
prev.children().eq(3).addClass('add-code');
|
||||
prev.children().eq(4).addClass('add-code');
|
||||
prev.children().eq(5).addClass('add-code');
|
||||
|
||||
$(this).remove();
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
@@ -1,52 +1,97 @@
|
||||
{{$file := .file}}
|
||||
{{range $j, $section := $file.Sections}}
|
||||
{{range $k, $line := $section.Lines}}
|
||||
<tr class="{{DiffLineTypeToStr .GetType}}-code nl-{{$k}} ol-{{$k}}" data-line-type="{{DiffLineTypeToStr .GetType}}">
|
||||
{{if eq .GetType 4}}
|
||||
<td class="lines-num lines-num-old">
|
||||
{{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 5) }}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=down" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold-down"}}
|
||||
</a>
|
||||
{{end}}
|
||||
{{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 4) }}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=up" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold-up"}}
|
||||
</a>
|
||||
{{end}}
|
||||
{{if eq $line.GetExpandDirection 2}}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold"}}
|
||||
</a>
|
||||
{{end}}
|
||||
</td>
|
||||
<td colspan="5" class="lines-code lines-code-old "><code class="code-inner">{{$section.GetComputedInlineDiffFor $line}}</span></td>
|
||||
{{else}}
|
||||
<td class="lines-num lines-num-old" data-line-num="{{if $line.LeftIdx}}{{$line.LeftIdx}}{{end}}"><span rel="{{if $line.LeftIdx}}diff-{{Sha1 $file.Name}}L{{$line.LeftIdx}}{{end}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-old">{{if $line.LeftIdx}}<span class="mono" data-type-marker="{{$line.GetLineTypeMarker}}"></span>{{end}}</td>
|
||||
<td class="lines-code lines-code-old halfwidth">{{if and $.root.SignedUserID $.root.PageIsPullFiles (not (eq .GetType 2))}}<a class="ui primary button add-code-comment add-code-comment-left{{if (not $line.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="left" data-idx="{{$line.LeftIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $line.LeftIdx}}{{$section.GetComputedInlineDiffFor $line}}{{end}}</code></td>
|
||||
<td class="lines-num lines-num-new" data-line-num="{{if $line.RightIdx}}{{$line.RightIdx}}{{end}}"><span rel="{{if $line.RightIdx}}diff-{{Sha1 $file.Name}}R{{$line.RightIdx}}{{end}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-new">{{if $line.RightIdx}}<span class="mono" data-type-marker="{{$line.GetLineTypeMarker}}"></span>{{end}}</td>
|
||||
<td class="lines-code lines-code-new halfwidth">{{if and $.root.SignedUserID $.root.PageIsPullFiles (not (eq .GetType 3))}}<a class="ui primary button add-code-comment add-code-comment-right{{if (not $line.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="right" data-idx="{{$line.RightIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $line.RightIdx}}{{$section.GetComputedInlineDiffFor $line}}{{end}}</code></td>
|
||||
{{end}}
|
||||
</tr>
|
||||
{{if gt (len $line.Comments) 0}}
|
||||
<tr class="add-comment" data-line-type="{{DiffLineTypeToStr .GetType}}">
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-left">
|
||||
{{if eq $line.GetCommentSide "previous"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
</td>
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-right">
|
||||
{{if eq $line.GetCommentSide "proposed"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
</td>
|
||||
{{$hasmatch := ne $line.Match -1}}
|
||||
{{if or (ne .GetType 2) (not $hasmatch)}}
|
||||
<tr class="{{DiffLineTypeToStr .GetType}}-code nl-{{$k}} ol-{{$k}}" data-line-type="{{DiffLineTypeToStr .GetType}}">
|
||||
{{if eq .GetType 4}}
|
||||
<td class="lines-num lines-num-old">
|
||||
{{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 5) }}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=down" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold-down"}}
|
||||
</a>
|
||||
{{end}}
|
||||
{{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 4) }}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=up" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold-up"}}
|
||||
</a>
|
||||
{{end}}
|
||||
{{if eq $line.GetExpandDirection 2}}
|
||||
<a role="button" class="blob-excerpt" data-url="{{$.root.RepoLink}}/blob_excerpt/{{$.root.AfterCommitID}}" data-query="{{$line.GetBlobExcerptQuery}}&style=split&direction=" data-anchor="diff-{{Sha1 $file.Name}}K{{$line.SectionInfo.RightIdx}}">
|
||||
{{svg "octicon-fold"}}
|
||||
</a>
|
||||
{{end}}
|
||||
</td>
|
||||
<td colspan="5" class="lines-code lines-code-old "><code class="code-inner">{{$section.GetComputedInlineDiffFor $line}}</span></td>
|
||||
{{else if and (eq .GetType 3) $hasmatch}}{{/* DEL */}}
|
||||
{{$match := index $section.Lines $line.Match}}
|
||||
<td class="lines-num lines-num-old del-code" data-line-num="{{$line.LeftIdx}}"><span rel="diff-{{Sha1 $file.Name}}L{{$line.LeftIdx}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-old del-code"><span class="mono" data-type-marker="{{$line.GetLineTypeMarker}}"></span></td>
|
||||
<td class="lines-code lines-code-old halfwidth del-code">{{if and $.root.SignedUserID $.root.PageIsPullFiles}}<a class="ui primary button add-code-comment add-code-comment-left{{if (not $line.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="left" data-idx="{{$line.LeftIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $line.LeftIdx}}{{$section.GetComputedInlineDiffFor $line}}{{end}}</code></td>
|
||||
<td class="lines-num lines-num-new add-code" data-line-num="{{if $match.RightIdx}}{{$match.RightIdx}}{{end}}"><span rel="{{if $match.RightIdx}}diff-{{Sha1 $file.Name}}R{{$match.RightIdx}}{{end}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-new add-code">{{if $match.RightIdx}}<span class="mono" data-type-marker="{{$match.GetLineTypeMarker}}"></span>{{end}}</td>
|
||||
<td class="lines-code lines-code-new halfwidth add-code">{{if and $.root.SignedUserID $.root.PageIsPullFiles}}<a class="ui primary button add-code-comment add-code-comment-right{{if (not $match.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="right" data-idx="{{$match.RightIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $match.RightIdx}}{{$section.GetComputedInlineDiffFor $match}}{{end}}</code></td>
|
||||
{{else}}
|
||||
<td class="lines-num lines-num-old" data-line-num="{{if $line.LeftIdx}}{{$line.LeftIdx}}{{end}}"><span rel="{{if $line.LeftIdx}}diff-{{Sha1 $file.Name}}L{{$line.LeftIdx}}{{end}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-old">{{if $line.LeftIdx}}<span class="mono" data-type-marker="{{$line.GetLineTypeMarker}}"></span>{{end}}</td>
|
||||
<td class="lines-code lines-code-old halfwidth">{{if and $.root.SignedUserID $.root.PageIsPullFiles (not (eq .GetType 2))}}<a class="ui primary button add-code-comment add-code-comment-left{{if (not $line.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="left" data-idx="{{$line.LeftIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $line.LeftIdx}}{{$section.GetComputedInlineDiffFor $line}}{{end}}</code></td>
|
||||
<td class="lines-num lines-num-new" data-line-num="{{if $line.RightIdx}}{{$line.RightIdx}}{{end}}"><span rel="{{if $line.RightIdx}}diff-{{Sha1 $file.Name}}R{{$line.RightIdx}}{{end}}"></span></td>
|
||||
<td class="lines-type-marker lines-type-marker-new">{{if $line.RightIdx}}<span class="mono" data-type-marker="{{$line.GetLineTypeMarker}}"></span>{{end}}</td>
|
||||
<td class="lines-code lines-code-new halfwidth">{{if and $.root.SignedUserID $.root.PageIsPullFiles (not (eq .GetType 3))}}<a class="ui primary button add-code-comment add-code-comment-right{{if (not $line.CanComment)}} invisible{{end}}" data-path="{{$file.Name}}" data-side="right" data-idx="{{$line.RightIdx}}" data-new-comment-url="{{$.root.Issue.HTMLURL}}/files/reviews/new_comment">{{svg "octicon-plus"}}</a>{{end}}<code class="code-inner">{{if $line.RightIdx}}{{$section.GetComputedInlineDiffFor $line}}{{end}}</code></td>
|
||||
{{end}}
|
||||
</tr>
|
||||
{{if and (eq .GetType 3) $hasmatch}}
|
||||
{{$match := index $section.Lines $line.Match}}
|
||||
{{if or (gt (len $line.Comments) 0) (gt (len $match.Comments) 0)}}
|
||||
<tr class="add-comment" data-line-type="{{DiffLineTypeToStr .GetType}}">
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-left">
|
||||
{{if gt (len $line.Comments) 0}}
|
||||
{{if eq $line.GetCommentSide "previous"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{if gt (len $match.Comments) 0}}
|
||||
{{if eq $match.GetCommentSide "previous"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $match.Comments}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
</td>
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-right">
|
||||
{{if eq $line.GetCommentSide "proposed"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
{{if gt (len $match.Comments) 0}}
|
||||
{{if eq $match.GetCommentSide "proposed"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $match.Comments}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
{{else if gt (len $line.Comments) 0}}
|
||||
<tr class="add-comment" data-line-type="{{DiffLineTypeToStr .GetType}}">
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-left">
|
||||
{{if gt (len $line.Comments) 0}}
|
||||
{{if eq $line.GetCommentSide "previous"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
</td>
|
||||
<td class="lines-num"></td>
|
||||
<td class="lines-type-marker"></td>
|
||||
<td class="add-comment-right">
|
||||
{{if eq $line.GetCommentSide "proposed"}}
|
||||
{{template "repo/diff/conversation" mergeinto $.root "comments" $line.Comments}}
|
||||
{{end}}
|
||||
</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
||||
@@ -198,7 +198,7 @@
|
||||
<input type="hidden" name="action" value="push-mirror-add">
|
||||
<div class="field {{if .Err_PushMirrorAddress}}error{{end}}">
|
||||
<label for="push_mirror_address">{{.i18n.Tr "repo.settings.mirror_settings.push_mirror.remote_url"}}</label>
|
||||
<input id="push_mirror_address" name="push_mirror_address" value="{{.push_mirror_address}}" required>
|
||||
<input id="push_mirror_address" name="push_mirror_address" value="{{.push_mirror_address}}" autocomplete="off" required>
|
||||
<p class="help">{{.i18n.Tr "repo.mirror_address_desc"}}</p>
|
||||
</div>
|
||||
<details class="ui optional field" {{if or .Err_PushMirrorAuth .push_mirror_username}}open{{end}}>
|
||||
|
||||
@@ -101,7 +101,7 @@
|
||||
</li>
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{if and (gt (len $push.Commits) 1) $push.CompareURL}}<li><a href="{{AppSubUrl}}/{{$push.CompareURL}}">{{$.i18n.Tr "action.compare_commits" (len $push.Commits)}} »</a></li>{{end}}
|
||||
{{if and (gt $push.Len 1) $push.CompareURL}}<li><a href="{{AppSubUrl}}/{{$push.CompareURL}}">{{$.i18n.Tr "action.compare_commits" $push.Len}} »</a></li>{{end}}
|
||||
</ul>
|
||||
</div>
|
||||
{{else if eq .GetOpType 6}}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
</h4>
|
||||
<div class="ui attached segment">
|
||||
{{if or (.SignedUser.IsLocal) (.SignedUser.IsOAuth2)}}
|
||||
<form class="ui form" action="{{AppSubUrl}}/user/settings/account" method="post">
|
||||
<form class="ui form ignore-dirty" action="{{AppSubUrl}}/user/settings/account" method="post">
|
||||
{{.CsrfTokenHtml}}
|
||||
{{if .SignedUser.IsPasswordSet}}
|
||||
<div class="required field {{if .Err_OldPassword}}error{{end}}">
|
||||
|
||||
26
vendor/github.com/caddyserver/certmagic/config.go
generated
vendored
26
vendor/github.com/caddyserver/certmagic/config.go
generated
vendored
@@ -508,8 +508,10 @@ func (cfg *Config) obtainCert(ctx context.Context, name string, interactive bool
|
||||
var issuedCert *IssuedCertificate
|
||||
var issuerUsed Issuer
|
||||
for i, issuer := range issuers {
|
||||
log.Debug(fmt.Sprintf("trying issuer %d/%d", i+1, len(cfg.Issuers)),
|
||||
zap.String("issuer", issuer.IssuerKey()))
|
||||
if log != nil {
|
||||
log.Debug(fmt.Sprintf("trying issuer %d/%d", i+1, len(cfg.Issuers)),
|
||||
zap.String("issuer", issuer.IssuerKey()))
|
||||
}
|
||||
|
||||
if prechecker, ok := issuer.(PreChecker); ok {
|
||||
err = prechecker.PreCheck(ctx, []string{name}, interactive)
|
||||
@@ -531,10 +533,12 @@ func (cfg *Config) obtainCert(ctx context.Context, name string, interactive bool
|
||||
if errors.As(err, &problem) {
|
||||
errToLog = problem
|
||||
}
|
||||
log.Error("could not get certificate from issuer",
|
||||
zap.String("identifier", name),
|
||||
zap.String("issuer", issuer.IssuerKey()),
|
||||
zap.Error(errToLog))
|
||||
if log != nil {
|
||||
log.Error("could not get certificate from issuer",
|
||||
zap.String("identifier", name),
|
||||
zap.String("issuer", issuer.IssuerKey()),
|
||||
zap.Error(errToLog))
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
// only the error from the last issuer will be returned, but we logged the others
|
||||
@@ -745,10 +749,12 @@ func (cfg *Config) renewCert(ctx context.Context, name string, force, interactiv
|
||||
if errors.As(err, &problem) {
|
||||
errToLog = problem
|
||||
}
|
||||
log.Error("could not get certificate from issuer",
|
||||
zap.String("identifier", name),
|
||||
zap.String("issuer", issuer.IssuerKey()),
|
||||
zap.Error(errToLog))
|
||||
if log != nil {
|
||||
log.Error("could not get certificate from issuer",
|
||||
zap.String("identifier", name),
|
||||
zap.String("issuer", issuer.IssuerKey()),
|
||||
zap.Error(errToLog))
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
// only the error from the last issuer will be returned, but we logged the others
|
||||
|
||||
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
codecov:
|
||||
require_ci_to_pass: yes
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 70%
|
||||
threshold: 2%
|
||||
patch: off
|
||||
changes: no
|
||||
|
||||
parsers:
|
||||
gcov:
|
||||
branch_detection:
|
||||
conditional: yes
|
||||
loop: yes
|
||||
method: no
|
||||
macro: no
|
||||
|
||||
comment:
|
||||
layout: "header,diff"
|
||||
behavior: default
|
||||
require_changes: no
|
||||
|
||||
ignore:
|
||||
- internal/encoder/vm_color
|
||||
- internal/encoder/vm_color_indent
|
||||
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
cover.html
|
||||
cover.out
|
||||
75
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
75
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
run:
|
||||
skip-files:
|
||||
- encode_optype.go
|
||||
- ".*_test\\.go$"
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable-all: true
|
||||
disable:
|
||||
- shadow
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- dogsled
|
||||
- dupl
|
||||
- exhaustive
|
||||
- exhaustivestruct
|
||||
- errorlint
|
||||
- forbidigo
|
||||
- funlen
|
||||
- gci
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- gocognit
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- godot
|
||||
- godox
|
||||
- goerr113
|
||||
- gofumpt
|
||||
- gomnd
|
||||
- gosec
|
||||
- ifshort
|
||||
- lll
|
||||
- makezero
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- paralleltest
|
||||
- testpackage
|
||||
- thelper
|
||||
- wrapcheck
|
||||
- interfacer
|
||||
- lll
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- wsl
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
# not needed
|
||||
- path: /*.go
|
||||
text: "ST1003: should not use underscores in package names"
|
||||
linters:
|
||||
- stylecheck
|
||||
- path: /*.go
|
||||
text: "don't use an underscore in package name"
|
||||
linters:
|
||||
- golint
|
||||
- path: rtype.go
|
||||
linters:
|
||||
- golint
|
||||
- stylecheck
|
||||
- path: error.go
|
||||
linters:
|
||||
- staticcheck
|
||||
|
||||
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
|
||||
max-issues-per-linter: 0
|
||||
|
||||
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
|
||||
max-same-issues: 0
|
||||
236
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
236
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
# v0.7.4 - 2021/07/06
|
||||
|
||||
* Fix encoding of indirect layout structure ( #264 )
|
||||
|
||||
# v0.7.3 - 2021/06/29
|
||||
|
||||
* Fix encoding of pointer type in empty interface ( #262 )
|
||||
|
||||
# v0.7.2 - 2021/06/26
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Add decoder for func type to fix decoding of nil function value ( #257 )
|
||||
* Fix stream decoding of []byte type ( #258 )
|
||||
|
||||
### Performance
|
||||
|
||||
* Improve decoding performance of map[string]interface{} type ( use `mapassign_faststr` ) ( #256 )
|
||||
* Improve encoding performance of empty interface type ( remove recursive calling of `vm.Run` ) ( #259 )
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add bytedance/sonic as benchmark target ( #254 )
|
||||
|
||||
# v0.7.1 - 2021/06/18
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix error when unmarshal empty array ( #253 )
|
||||
|
||||
# v0.7.0 - 2021/06/12
|
||||
|
||||
### Support context for MarshalJSON and UnmarshalJSON ( #248 )
|
||||
|
||||
* json.MarshalContext(context.Context, interface{}, ...json.EncodeOption) ([]byte, error)
|
||||
* json.NewEncoder(io.Writer).EncodeContext(context.Context, interface{}, ...json.EncodeOption) error
|
||||
* json.UnmarshalContext(context.Context, []byte, interface{}, ...json.DecodeOption) error
|
||||
* json.NewDecoder(io.Reader).DecodeContext(context.Context, interface{}) error
|
||||
|
||||
```go
|
||||
type MarshalerContext interface {
|
||||
MarshalJSON(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
type UnmarshalerContext interface {
|
||||
UnmarshalJSON(context.Context, []byte) error
|
||||
}
|
||||
```
|
||||
|
||||
### Add DecodeFieldPriorityFirstWin option ( #242 )
|
||||
|
||||
In the default behavior, go-json, like encoding/json, will reflect the result of the last evaluation when a field with the same name exists. I've added new options to allow you to change this behavior. `json.DecodeFieldPriorityFirstWin` option reflects the result of the first evaluation if a field with the same name exists. This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated.
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix indent number contains recursive type ( #249 )
|
||||
* Fix encoding of using empty interface as map key ( #244 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding fields containing escaped characters ( #237 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Move some tests to subdirectory ( #243 )
|
||||
* Refactor package layout for decoder ( #238 )
|
||||
|
||||
# v0.6.1 - 2021/06/02
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix value of totalLength for encoding ( #236 )
|
||||
|
||||
# v0.6.0 - 2021/06/01
|
||||
|
||||
### Support Colorize option for encoding (#233)
|
||||
|
||||
```go
|
||||
b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme))
|
||||
if err != nil {
|
||||
...
|
||||
}
|
||||
fmt.Println(string(b)) // print colored json
|
||||
```
|
||||
|
||||
### Refactor
|
||||
|
||||
* Fix opcode layout - Adjust memory layout of the opcode to 128 bytes in a 64-bit environment ( #230 )
|
||||
* Refactor encode option ( #231 )
|
||||
* Refactor escape string ( #232 )
|
||||
|
||||
# v0.5.1 - 2021/5/20
|
||||
|
||||
### Optimization
|
||||
|
||||
* Add type addrShift to enable bigger encoder/decoder cache ( #213 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Keep original reference of slice element ( #229 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Refactor Debug mode for encoding ( #226 )
|
||||
* Generate VM sources for encoding ( #227 )
|
||||
* Refactor validator for null/true/false for decoding ( #221 )
|
||||
|
||||
# v0.5.0 - 2021/5/9
|
||||
|
||||
### Supports using omitempty and string tags at the same time ( #216 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix stream decoder for unicode char ( #215 )
|
||||
* Fix decoding of slice element ( #219 )
|
||||
* Fix calculating of buffer length for stream decoder ( #220 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* replace skipWhiteSpace goto by loop ( #212 )
|
||||
|
||||
# v0.4.14 - 2021/5/4
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add valyala/fastjson to benchmark ( #193 )
|
||||
* Add benchmark task for CI ( #211 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of slice with unmarshal json type ( #198 )
|
||||
* Fix decoding of null value for interface type that does not implement Unmarshaler ( #205 )
|
||||
* Fix decoding of null value to []byte by json.Unmarshal ( #206 )
|
||||
* Fix decoding of backslash char at the end of string ( #207 )
|
||||
* Fix stream decoder for null/true/false value ( #208 )
|
||||
* Fix stream decoder for slow reader ( #211 )
|
||||
|
||||
### Performance
|
||||
|
||||
* If cap of slice is enough, reuse slice data for compatibility with encoding/json ( #200 )
|
||||
|
||||
# v0.4.13 - 2021/4/20
|
||||
|
||||
### Fix json.Compact and json.Indent
|
||||
|
||||
* Support validation the input buffer for json.Compact and json.Indent ( #189 )
|
||||
* Optimize json.Compact and json.Indent ( improve memory footprint ) ( #190 )
|
||||
|
||||
# v0.4.12 - 2021/4/15
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix unnecessary indent for empty slice type ( #181 )
|
||||
* Fix encoding of omitempty feature for the slice or interface type ( #183 )
|
||||
* Fix encoding custom types zero values with omitempty when marshaller exists ( #187 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoder for invalid top level value ( #184 )
|
||||
* Fix decoder for invalid number value ( #185 )
|
||||
|
||||
# v0.4.11 - 2021/4/3
|
||||
|
||||
* Improve decoder performance for interface type
|
||||
|
||||
# v0.4.10 - 2021/4/2
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fixed a bug when encoding slice and map containing recursive structures
|
||||
* Fixed a logic to determine if indirect reference
|
||||
|
||||
# v0.4.9 - 2021/3/29
|
||||
|
||||
### Add debug mode
|
||||
|
||||
If you use `json.MarshalWithOption(v, json.Debug())` and `panic` occurred in `go-json`, produces debug information to console.
|
||||
|
||||
### Support a new feature to compatible with encoding/json
|
||||
|
||||
- invalid UTF-8 is coerced to valid UTF-8 ( without performance down )
|
||||
|
||||
### Fix encoder
|
||||
|
||||
- Fixed handling of MarshalJSON of function type
|
||||
|
||||
### Fix decoding of slice of pointer type
|
||||
|
||||
If there is a pointer value, go-json will use it. (This behavior is necessary to achieve the ability to prioritize pre-filled values). However, since slices are reused internally, there was a bug that referred to the previous pointer value. Therefore, it is not necessary to refer to the pointer value in advance for the slice element, so we explicitly initialize slice element by `nil`.
|
||||
|
||||
# v0.4.8 - 2021/3/21
|
||||
|
||||
### Reduce memory usage at compile time
|
||||
|
||||
* go-json have used about 2GB of memory at compile time, but now it can compile with about less than 550MB.
|
||||
|
||||
### Fix any encoder's bug
|
||||
|
||||
* Add many test cases for encoder
|
||||
* Fix composite type ( slice/array/map )
|
||||
* Fix pointer types
|
||||
* Fix encoding of MarshalJSON or MarshalText or json.Number type
|
||||
|
||||
### Refactor encoder
|
||||
|
||||
* Change package layout for reducing memory usage at compile
|
||||
* Remove anonymous and only operation
|
||||
* Remove root property from encodeCompileContext and opcode
|
||||
|
||||
### Fix CI
|
||||
|
||||
* Add Go 1.16
|
||||
* Remove Go 1.13
|
||||
* Fix `make cover` task
|
||||
|
||||
### Number/Delim/Token/RawMessage use the types defined in encoding/json by type alias
|
||||
|
||||
# v0.4.7 - 2021/02/22
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of deep recursive structure
|
||||
* Fix decoding of embedded unexported pointer field
|
||||
* Fix invalid test case
|
||||
* Fix decoding of invalid value
|
||||
* Fix decoding of prefilled value
|
||||
* Fix not being able to return UnmarshalTypeError when it should be returned
|
||||
* Fix decoding of null value
|
||||
* Fix decoding of type of null string
|
||||
* Use pre allocated pointer if exists it at decoding
|
||||
|
||||
### Reduce memory usage at compile
|
||||
|
||||
* Integrate int/int8/int16/int32/int64 and uint/uint8/uint16/uint32/uint64 operation to reduce memory usage at compile
|
||||
|
||||
### Remove unnecessary optype
|
||||
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Masaaki Goshima
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
PKG := github.com/goccy/go-json
|
||||
|
||||
BIN_DIR := $(CURDIR)/bin
|
||||
PKGS := $(shell go list ./... | grep -v internal/cmd|grep -v test)
|
||||
COVER_PKGS := $(foreach pkg,$(PKGS),$(subst $(PKG),.,$(pkg)))
|
||||
|
||||
COMMA := ,
|
||||
EMPTY :=
|
||||
SPACE := $(EMPTY) $(EMPTY)
|
||||
COVERPKG_OPT := $(subst $(SPACE),$(COMMA),$(COVER_PKGS))
|
||||
|
||||
$(BIN_DIR):
|
||||
@mkdir -p $(BIN_DIR)
|
||||
|
||||
.PHONY: cover
|
||||
cover:
|
||||
go test -coverpkg=$(COVERPKG_OPT) -coverprofile=cover.out ./...
|
||||
|
||||
.PHONY: cover-html
|
||||
cover-html: cover
|
||||
go tool cover -html=cover.out
|
||||
|
||||
.PHONY: lint
|
||||
lint: golangci-lint
|
||||
golangci-lint run
|
||||
|
||||
golangci-lint: | $(BIN_DIR)
|
||||
@{ \
|
||||
set -e; \
|
||||
GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \
|
||||
cd $$GOLANGCI_LINT_TMP_DIR; \
|
||||
go mod init tmp; \
|
||||
GOBIN=$(BIN_DIR) go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.36.0; \
|
||||
rm -rf $$GOLANGCI_LINT_TMP_DIR; \
|
||||
}
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
go generate ./internal/...
|
||||
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
@@ -0,0 +1,529 @@
|
||||
# go-json
|
||||
|
||||

|
||||
[](https://pkg.go.dev/github.com/goccy/go-json?tab=doc)
|
||||
[](https://codecov.io/gh/goccy/go-json)
|
||||
|
||||
Fast JSON encoder/decoder compatible with encoding/json for Go
|
||||
|
||||
<img width="400px" src="https://user-images.githubusercontent.com/209884/92572337-42b42900-f2bf-11ea-973a-c74a359553a5.png"></img>
|
||||
|
||||
# Roadmap
|
||||
|
||||
```
|
||||
* version ( expected release date )
|
||||
|
||||
* v0.7.0
|
||||
|
|
||||
| while maintaining compatibility with encoding/json, we will add convenient APIs
|
||||
|
|
||||
v
|
||||
* v1.0.0
|
||||
```
|
||||
|
||||
We are accepting requests for features that will be implemented between v0.7.0 and v.1.0.0.
|
||||
If you have the API you need, please submit your issue [here](https://github.com/goccy/go-json/issues).
|
||||
For example, I'm thinking of supporting `context.Context` of `json.Marshaler` and decoding using JSON Path.
|
||||
|
||||
# Features
|
||||
|
||||
- Drop-in replacement of `encoding/json`
|
||||
- Fast ( See [Benchmark section](https://github.com/goccy/go-json#benchmarks) )
|
||||
- Flexible customization with options
|
||||
- Coloring the encoded string
|
||||
- Can propagate context.Context to `MarshalJSON` or `UnmarshalJSON`
|
||||
|
||||
# Installation
|
||||
|
||||
```
|
||||
go get github.com/goccy/go-json
|
||||
```
|
||||
|
||||
# How to use
|
||||
|
||||
Replace import statement from `encoding/json` to `github.com/goccy/go-json`
|
||||
|
||||
```
|
||||
-import "encoding/json"
|
||||
+import "github.com/goccy/go-json"
|
||||
```
|
||||
|
||||
# JSON library comparison
|
||||
|
||||
| name | encoder | decoder | compatible with `encoding/json` |
|
||||
| :----: | :------: | :-----: | :-----------------------------: |
|
||||
| encoding/json | yes | yes | N/A |
|
||||
| [json-iterator/go](https://github.com/json-iterator/go) | yes | yes | partial |
|
||||
| [easyjson](https://github.com/mailru/easyjson) | yes | yes | no |
|
||||
| [gojay](https://github.com/francoispqt/gojay) | yes | yes | no |
|
||||
| [segmentio/encoding/json](https://github.com/segmentio/encoding/tree/master/json) | yes | yes | partial |
|
||||
| [jettison](https://github.com/wI2L/jettison) | yes | no | no |
|
||||
| [simdjson-go](https://github.com/minio/simdjson-go) | no | yes | no |
|
||||
| goccy/go-json | yes | yes | yes |
|
||||
|
||||
- `json-iterator/go` isn't compatible with `encoding/json` in many ways (e.g. https://github.com/json-iterator/go/issues/229 ), but it hasn't been supported for a long time.
|
||||
- `segmentio/encoding/json` is well supported for encoders, but some are not supported for decoder APIs such as `Token` ( streaming decode )
|
||||
|
||||
## Other libraries
|
||||
|
||||
- [jingo](https://github.com/bet365/jingo)
|
||||
|
||||
I tried the benchmark but it didn't work.
|
||||
Also, it seems to panic when it receives an unexpected value because there is no error handling...
|
||||
|
||||
- [ffjson](https://github.com/pquerna/ffjson)
|
||||
|
||||
Benchmarking gave very slow results.
|
||||
It seems that it is assumed that the user will use the buffer pool properly.
|
||||
Also, development seems to have already stopped
|
||||
|
||||
# Benchmarks
|
||||
|
||||
```
|
||||
$ cd benchmarks
|
||||
$ go test -bench .
|
||||
```
|
||||
|
||||
## Encode
|
||||
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126758-0845cb00-68f5-11eb-8db7-086fcf9bcfaa.png"></img>
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126757-07ad3480-68f5-11eb-87aa-858cc5eacfcb.png"></img>
|
||||
|
||||
## Decode
|
||||
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979944-bd1d6d80-7002-11eb-944b-9d17b6674e3f.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979931-b989e680-7002-11eb-87a0-66fc22d90dd4.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979940-bc84d700-7002-11eb-9647-869bbc25c9d9.png">
|
||||
|
||||
|
||||
# Fuzzing
|
||||
|
||||
[go-json-fuzz](https://github.com/goccy/go-json-fuzz) is the repository for fuzzing tests.
|
||||
If you run the test in this repository and find a bug, please commit to corpus to go-json-fuzz and report the issue to [go-json](https://github.com/goccy/go-json/issues).
|
||||
|
||||
# How it works
|
||||
|
||||
`go-json` is very fast in both encoding and decoding compared to other libraries.
|
||||
It's easier to implement by using automatic code generation for performance or by using a dedicated interface, but `go-json` dares to stick to compatibility with `encoding/json` and is the simple interface. Despite this, we are developing with the aim of being the fastest library.
|
||||
|
||||
Here, we explain the various speed-up techniques implemented by `go-json`.
|
||||
|
||||
## Basic technique
|
||||
|
||||
The techniques listed here are the ones used by most of the libraries listed above.
|
||||
|
||||
### Buffer reuse
|
||||
|
||||
Since the only value required for the result of `json.Marshal(interface{}) ([]byte, error)` is `[]byte`, the only value that must be allocated during encoding is the return value `[]byte` .
|
||||
|
||||
Also, as the number of allocations increases, the performance will be affected, so the number of allocations should be kept as low as possible when creating `[]byte`.
|
||||
|
||||
Therefore, there is a technique to reduce the number of times a new buffer must be allocated by reusing the buffer used for the previous encoding by using `sync.Pool`.
|
||||
|
||||
Finally, you allocate a buffer that is as long as the resulting buffer and copy the contents into it, you only need to allocate the buffer once in theory.
|
||||
|
||||
```go
|
||||
type buffer struct {
|
||||
data []byte
|
||||
}
|
||||
|
||||
var bufPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &buffer{data: make([]byte, 0, 1024)}
|
||||
},
|
||||
}
|
||||
|
||||
buf := bufPool.Get().(*buffer)
|
||||
data := encode(buf.data) // reuse buf.data
|
||||
|
||||
newBuf := make([]byte, len(data))
|
||||
copy(newBuf, buf)
|
||||
|
||||
buf.data = data
|
||||
bufPool.Put(buf)
|
||||
```
|
||||
|
||||
### Elimination of reflection
|
||||
|
||||
As you know, the reflection operation is very slow.
|
||||
|
||||
Therefore, using the fact that the address position where the type information is stored is fixed for each binary ( we call this `typeptr` ),
|
||||
we can use the address in the type information to call a pre-built optimized process.
|
||||
|
||||
For example, you can get the address to the type information from `interface{}` as follows and you can use that information to call a process that does not have reflection.
|
||||
|
||||
To process without reflection, pass a pointer (`unsafe.Pointer`) to the value is stored.
|
||||
|
||||
```go
|
||||
|
||||
type emptyInterface struct {
|
||||
typ unsafe.Pointer
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
var typeToEncoder = map[uintptr]func(unsafe.Pointer)([]byte, error){}
|
||||
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
iface := (*emptyInterface)(unsafe.Pointer(&v)
|
||||
typeptr := uintptr(iface.typ)
|
||||
if enc, exists := typeToEncoder[typeptr]; exists {
|
||||
return enc(iface.ptr)
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
※ In reality, `typeToEncoder` can be referenced by multiple goroutines, so exclusive control is required.
|
||||
|
||||
## Unique speed-up technique
|
||||
|
||||
## Encoder
|
||||
|
||||
### Do not escape arguments of `Marshal`
|
||||
|
||||
`json.Marshal` and `json.Unmarshal` receive `interface{}` value and they perform type determination dynamically to process.
|
||||
In normal case, you need to use the `reflect` library to determine the type dynamically, but since `reflect.Type` is defined as `interface`, when you call the method of `reflect.Type`, The reflect's argument is escaped.
|
||||
|
||||
Therefore, the arguments for `Marshal` and `Unmarshal` are always escape to the heap.
|
||||
However, `go-json` can use the feature of `reflect.Type` while avoiding escaping.
|
||||
|
||||
`reflect.Type` is defined as `interface`, but in reality `reflect.Type` is implemented only by the structure `rtype` defined in the `reflect` package.
|
||||
For this reason, to date `reflect.Type` is the same as `*reflect.rtype`.
|
||||
|
||||
Therefore, by directly handling `*reflect.rtype`, which is an implementation of `reflect.Type`, it is possible to avoid escaping because it changes from `interface` to using `struct`.
|
||||
|
||||
The technique for working with `*reflect.rtype` directly from `go-json` is implemented at [rtype.go](https://github.com/goccy/go-json/blob/master/internal/runtime/rtype.go)
|
||||
|
||||
Also, the same technique is cut out as a library ( https://github.com/goccy/go-reflect )
|
||||
|
||||
Initially this feature was the default behavior of `go-json`.
|
||||
But after careful testing, I found that I passed a large value to `json.Marshal()` and if the argument could not be assigned to the stack, it could not be properly escaped to the heap (a bug in the Go compiler).
|
||||
|
||||
Therefore, this feature will be provided as an **optional** until this issue is resolved.
|
||||
|
||||
To use it, add `NoEscape` like `MarshalNoEscape()`
|
||||
|
||||
### Encoding using opcode sequence
|
||||
|
||||
I explained that you can use `typeptr` to call a pre-built process from type information.
|
||||
|
||||
In other libraries, this dedicated process is processed by making it an function calling like anonymous function, but function calls are inherently slow processes and should be avoided as much as possible.
|
||||
|
||||
Therefore, `go-json` adopted the Instruction-based execution processing system, which is also used to implement virtual machines for programming language.
|
||||
|
||||
If it is the first type to encode, create the opcode ( instruction ) sequence required for encoding.
|
||||
From the second time onward, use `typeptr` to get the cached pre-built opcode sequence and encode it based on it. An example of the opcode sequence is shown below.
|
||||
|
||||
```go
|
||||
json.Marshal(struct{
|
||||
X int `json:"x"`
|
||||
Y string `json:"y"`
|
||||
}{X: 1, Y: "hello"})
|
||||
```
|
||||
|
||||
When encoding a structure like the one above, create a sequence of opcodes like this:
|
||||
|
||||
```
|
||||
- opStructFieldHead ( `{` )
|
||||
- opStructFieldInt ( `"x": 1,` )
|
||||
- opStructFieldString ( `"y": "hello"` )
|
||||
- opStructEnd ( `}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
※ When processing each operation, write the letters on the right.
|
||||
|
||||
In addition, each opcode is managed by the following structure (
|
||||
Pseudo code ).
|
||||
|
||||
```go
|
||||
type opType int
|
||||
const (
|
||||
opStructFieldHead opType = iota
|
||||
opStructFieldInt
|
||||
opStructFieldStirng
|
||||
opStructEnd
|
||||
opEnd
|
||||
)
|
||||
type opcode struct {
|
||||
op opType
|
||||
key []byte
|
||||
next *opcode
|
||||
}
|
||||
```
|
||||
|
||||
The process of encoding using the opcode sequence is roughly implemented as follows.
|
||||
|
||||
```go
|
||||
func encode(code *opcode, b []byte, p unsafe.Pointer) ([]byte, error) {
|
||||
for {
|
||||
switch code.op {
|
||||
case opStructFieldHead:
|
||||
b = append(b, '{')
|
||||
code = code.next
|
||||
case opStructFieldInt:
|
||||
b = append(b, code.key...)
|
||||
b = appendInt((*int)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructFieldString:
|
||||
b = append(b, code.key...)
|
||||
b = appendString((*string)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructEnd:
|
||||
b = append(b, '}')
|
||||
code = code.next
|
||||
case opEnd:
|
||||
goto END
|
||||
}
|
||||
}
|
||||
END:
|
||||
return b, nil
|
||||
}
|
||||
```
|
||||
|
||||
In this way, the huge `switch-case` is used to encode by manipulating the linked list opcodes to avoid unnecessary function calls.
|
||||
|
||||
### Opcode sequence optimization
|
||||
|
||||
One of the advantages of encoding using the opcode sequence is the ease of optimization.
|
||||
The opcode sequence mentioned above is actually converted into the following optimized operations and used.
|
||||
|
||||
```
|
||||
- opStructFieldHeadInt ( `{"x": 1,` )
|
||||
- opStructEndString ( `"y": "hello"}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
It has been reduced from 5 opcodes to 3 opcodes !
|
||||
Reducing the number of opcodees means reducing the number of branches with `switch-case`.
|
||||
In other words, the closer the number of operations is to 1, the faster the processing can be performed.
|
||||
|
||||
In `go-json`, optimization to reduce the number of opcodes itself like the above and it speeds up by preparing opcodes with optimized paths.
|
||||
|
||||
### Change recursive call from CALL to JMP
|
||||
|
||||
Recursive processing is required during encoding if the type is defined recursively as follows:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
X int
|
||||
U *U
|
||||
}
|
||||
|
||||
type U struct {
|
||||
T *T
|
||||
}
|
||||
|
||||
b, err := json.Marshal(&T{
|
||||
X: 1,
|
||||
U: &U{
|
||||
T: &T{
|
||||
X: 2,
|
||||
},
|
||||
},
|
||||
})
|
||||
fmt.Println(string(b)) // {"X":1,"U":{"T":{"X":2,"U":null}}}
|
||||
```
|
||||
|
||||
In `go-json`, recursive processing is processed by the operation type of ` opStructFieldRecursive`.
|
||||
|
||||
In this operation, after acquiring the opcode sequence used for recursive processing, the function is **not** called recursively as it is, but the necessary values are saved by itself and implemented by moving to the next operation.
|
||||
|
||||
The technique of implementing recursive processing with the `JMP` operation while avoiding the `CALL` operation is a famous technique for implementing a high-speed virtual machine.
|
||||
|
||||
For more details, please refer to [the article](https://engineering.mercari.com/blog/entry/1599563768-081104c850) ( but Japanese only ).
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
When retrieving the data cached from the type information by `typeptr`, we usually use map.
|
||||
Map requires exclusive control, so use `sync.Map` for a naive implementation.
|
||||
|
||||
However, this is slow, so it's a good idea to use the `atomic` package for exclusive control as implemented by `segmentio/encoding/json` ( https://github.com/segmentio/encoding/blob/master/json/codec.go#L41-L55 ).
|
||||
|
||||
This implementation slows down the set instead of speeding up the get, but it works well because of the nature of the library, it encodes much more for the same type.
|
||||
|
||||
However, as a result of profiling, I noticed that `runtime.mapaccess2` accounts for a significant percentage of the execution time. So I thought if I could change the lookup from map to slice.
|
||||
|
||||
There is an API named `typelinks` defined in the `runtime` package that the `reflect` package uses internally.
|
||||
This allows you to get all the type information defined in the binary at runtime.
|
||||
|
||||
The fact that all type information can be acquired means that by constructing slices in advance with the acquired total number of type information, it is possible to look up with the value of `typeptr` without worrying about out-of-range access.
|
||||
|
||||
However, if there is too much type information, it will use a lot of memory, so by default we will only use this optimization if the slice size fits within **2Mib** .
|
||||
|
||||
If this approach is not available, it will fall back to the `atomic` based process described above.
|
||||
|
||||
If you want to know more, please refer to the implementation [here](https://github.com/goccy/go-json/blob/master/internal/runtime/type.go#L36-L100)
|
||||
|
||||
## Decoder
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
Like the encoder, the decoder also uses typeptr to call the dedicated process.
|
||||
|
||||
### Faster termination character inspection using NUL character
|
||||
|
||||
In order to decode, you have to traverse the input buffer character by position.
|
||||
At that time, if you check whether the buffer has reached the end, it will be very slow.
|
||||
|
||||
`buf` : `[]byte` type variable. holds the string passed to the decoder
|
||||
`cursor` : `int64` type variable. holds the current read position
|
||||
|
||||
```go
|
||||
buflen := len(buf)
|
||||
for ; cursor < buflen; cursor++ { // compare cursor and buflen at all times, it is so slow.
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Therefore, by adding the `NUL` (`\000`) character to the end of the read buffer as shown below, it is possible to check the termination character at the same time as other characters.
|
||||
|
||||
```go
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Use Boundary Check Elimination
|
||||
|
||||
Due to the `NUL` character optimization, the Go compiler does a boundary check every time, even though `buf[cursor]` does not cause out-of-range access.
|
||||
|
||||
Therefore, `go-json` eliminates boundary check by fetching characters for hotspot by pointer operation. For example, the following code.
|
||||
|
||||
```go
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
p := (*sliceHeader)(&unsafe.Pointer(buf)).data
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Checking the existence of fields of struct using Bitmaps
|
||||
|
||||
I found by the profiling result, in the struct decode, lookup process for field was taking a long time.
|
||||
|
||||
For example, consider decoding a string like `{"a":1,"b":2,"c":3}` into the following structure:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
A int `json:"a"`
|
||||
B int `json:"b"`
|
||||
C int `json:"c"`
|
||||
}
|
||||
```
|
||||
|
||||
At this time, it was found that it takes a lot of time to acquire the decoding process corresponding to the field from the field name as shown below during the decoding process.
|
||||
|
||||
```go
|
||||
fieldName := decodeKey(buf, cursor) // "a" or "b" or "c"
|
||||
decoder, exists := fieldToDecoderMap[fieldName] // so slow
|
||||
if exists {
|
||||
decoder(buf, cursor)
|
||||
} else {
|
||||
skipValue(buf, cursor)
|
||||
}
|
||||
```
|
||||
|
||||
To improve this process, `json-iterator/go` is optimized so that it can be branched by switch-case when the number of fields in the structure is 10 or less (switch-case is faster than map). However, there is a risk of hash collision because the value hashed by the FNV algorithm is used for conditional branching. Also, `gojay` processes this part at high speed by letting the library user yourself write `switch-case`.
|
||||
|
||||
|
||||
`go-json` considers and implements a new approach that is different from these. I call this **bitmap field optimization**.
|
||||
|
||||
The range of values per character can be represented by `[256]byte`. Also, if the number of fields in the structure is 8 or less, `int8` type can represent the state of each field.
|
||||
In other words, it has the following structure.
|
||||
|
||||
- Base ( 8bit ): `00000000`
|
||||
- Key "a": `00000001` ( assign key "a" to the first bit )
|
||||
- Key "b": `00000010` ( assign key "b" to the second bit )
|
||||
- Key "c": `00000100` ( assign key "c" to the third bit )
|
||||
|
||||
Bitmap structure is the following
|
||||
|
||||
```
|
||||
| key index(0) |
|
||||
------------------------
|
||||
0 | 00000000 |
|
||||
1 | 00000000 |
|
||||
~~ | |
|
||||
97 (a) | 00000001 |
|
||||
98 (b) | 00000010 |
|
||||
99 (c) | 00000100 |
|
||||
~~ | |
|
||||
255 | 00000000 |
|
||||
```
|
||||
|
||||
You can think of this as a Bitmap with a height of `256` and a width of the maximum string length in the field name.
|
||||
In other words, it can be represented by the following type .
|
||||
|
||||
```go
|
||||
[maxFieldKeyLength][256]int8
|
||||
```
|
||||
|
||||
When decoding a field character, check whether the corresponding character exists by referring to the pre-built bitmap like the following.
|
||||
|
||||
```go
|
||||
var curBit int8 = math.MaxInt8 // 11111111
|
||||
|
||||
c := char(buf, cursor)
|
||||
bit := bitmap[keyIdx][c]
|
||||
curBit &= bit
|
||||
if curBit == 0 {
|
||||
// not found field
|
||||
}
|
||||
```
|
||||
|
||||
If `curBit` is not `0` until the end of the field string, then the string is
|
||||
You may have hit one of the fields.
|
||||
But the possibility is that if the decoded string is shorter than the field string, you will get a false hit.
|
||||
|
||||
- input: `{"a":1}`
|
||||
```go
|
||||
type T struct {
|
||||
X int `json:"abc"`
|
||||
}
|
||||
```
|
||||
※ Since `a` is shorter than `abc`, it can decode to the end of the field character without `curBit` being 0.
|
||||
|
||||
Rest assured. In this case, it doesn't matter because you can tell if you hit by comparing the string length of `a` with the string length of `abc`.
|
||||
|
||||
Finally, calculate the position of the bit where `1` is set and get the corresponding value, and you're done.
|
||||
|
||||
Using this technique, field lookups are possible with only bitwise operations and access to slices.
|
||||
|
||||
`go-json` uses a similar technique for fields with 9 or more and 16 or less fields. At this time, Bitmap is constructed as `[maxKeyLen][256]int16` type.
|
||||
|
||||
Currently, this optimization is not performed when the maximum length of the field name is long (specifically, 64 bytes or more) in addition to the limitation of the number of fields from the viewpoint of saving memory usage.
|
||||
|
||||
### Others
|
||||
|
||||
I have done a lot of other optimizations. I will find time to write about them. If you have any questions about what's written here or other optimizations, please visit the `#go-json` channel on `gophers.slack.com` .
|
||||
|
||||
## Reference
|
||||
|
||||
Regarding the story of go-json, there are the following articles in Japanese only.
|
||||
|
||||
- https://speakerdeck.com/goccy/zui-su-falsejsonraiburariwoqiu-mete
|
||||
- https://engineering.mercari.com/blog/entry/1599563768-081104c850/
|
||||
|
||||
# Looking for Sponsors
|
||||
|
||||
I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free.
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
||||
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
type (
|
||||
ColorFormat = encoder.ColorFormat
|
||||
ColorScheme = encoder.ColorScheme
|
||||
)
|
||||
|
||||
const escape = "\x1b"
|
||||
|
||||
type colorAttr int
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgBlackColor colorAttr = iota + 30
|
||||
fgRedColor
|
||||
fgGreenColor
|
||||
fgYellowColor
|
||||
fgBlueColor
|
||||
fgMagentaColor
|
||||
fgCyanColor
|
||||
fgWhiteColor
|
||||
)
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgHiBlackColor colorAttr = iota + 90
|
||||
fgHiRedColor
|
||||
fgHiGreenColor
|
||||
fgHiYellowColor
|
||||
fgHiBlueColor
|
||||
fgHiMagentaColor
|
||||
fgHiCyanColor
|
||||
fgHiWhiteColor
|
||||
)
|
||||
|
||||
func createColorFormat(attr colorAttr) ColorFormat {
|
||||
return ColorFormat{
|
||||
Header: wrapColor(attr),
|
||||
Footer: resetColor(),
|
||||
}
|
||||
}
|
||||
|
||||
func wrapColor(attr colorAttr) string {
|
||||
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||
}
|
||||
|
||||
func resetColor() string {
|
||||
return wrapColor(colorAttr(0))
|
||||
}
|
||||
|
||||
var (
|
||||
DefaultColorScheme = &ColorScheme{
|
||||
Int: createColorFormat(fgHiMagentaColor),
|
||||
Uint: createColorFormat(fgHiMagentaColor),
|
||||
Float: createColorFormat(fgHiMagentaColor),
|
||||
Bool: createColorFormat(fgHiYellowColor),
|
||||
String: createColorFormat(fgHiGreenColor),
|
||||
Binary: createColorFormat(fgHiRedColor),
|
||||
ObjectKey: createColorFormat(fgHiCyanColor),
|
||||
Null: createColorFormat(fgBlueColor),
|
||||
}
|
||||
)
|
||||
232
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
232
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/decoder"
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type Decoder struct {
|
||||
s *decoder.Stream
|
||||
}
|
||||
|
||||
const (
|
||||
nul = '\000'
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func unmarshal(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func unmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rctx := decoder.TakeRuntimeContext()
|
||||
rctx.Buf = src
|
||||
rctx.Option.Flags = 0
|
||||
rctx.Option.Flags |= decoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(rctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func unmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, noescape(header.ptr))
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func validateEndBuf(src []byte, cursor int64) error {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
return nil
|
||||
}
|
||||
return errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:staticcheck
|
||||
//go:nosplit
|
||||
func noescape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
||||
|
||||
func validateType(typ *runtime.Type, p uintptr) error {
|
||||
if typ == nil || typ.Kind() != reflect.Ptr || p == 0 {
|
||||
return &InvalidUnmarshalError{Type: runtime.RType2Type(typ)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
s := decoder.NewStream(r)
|
||||
return &Decoder{
|
||||
s: s,
|
||||
}
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to Decode.
|
||||
func (d *Decoder) Buffered() io.Reader {
|
||||
return d.s.Buffered()
|
||||
}
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (d *Decoder) Decode(v interface{}) error {
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
// DecodeContext reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v with context.Context.
|
||||
func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error {
|
||||
d.s.Option.Flags |= decoder.ContextOption
|
||||
d.s.Option.Context = ctx
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
func (d *Decoder) DecodeWithOption(v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
ptr := uintptr(header.ptr)
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
|
||||
if err := validateType(copiedType, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec, err := decoder.CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.s.PrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
s := d.s
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(s.Option)
|
||||
}
|
||||
if err := dec.DecodeStream(s, 0, header.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
s.Reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Decoder) More() bool {
|
||||
return d.s.More()
|
||||
}
|
||||
|
||||
func (d *Decoder) Token() (Token, error) {
|
||||
return d.s.Token()
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (d *Decoder) DisallowUnknownFields() {
|
||||
d.s.DisallowUnknownFields = true
|
||||
}
|
||||
|
||||
func (d *Decoder) InputOffset() int64 {
|
||||
return d.s.TotalOffset()
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (d *Decoder) UseNumber() {
|
||||
d.s.UseNumber = true
|
||||
}
|
||||
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: '2'
|
||||
services:
|
||||
go-json:
|
||||
image: golang:1.16
|
||||
volumes:
|
||||
- '.:/go/src/go-json'
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 620M
|
||||
working_dir: /go/src/go-json
|
||||
command: |
|
||||
sh -c "go test -c . && ls go-json.test"
|
||||
323
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
323
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
@@ -0,0 +1,323 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/encoder/vm"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||
)
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
enabledIndent bool
|
||||
enabledHTMLEscape bool
|
||||
prefix string
|
||||
indentStr string
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w, enabledHTMLEscape: true}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream, followed by a newline character.
|
||||
//
|
||||
// See the documentation for Marshal for details about the conversion of Go values to JSON.
|
||||
func (e *Encoder) Encode(v interface{}) error {
|
||||
return e.EncodeWithOption(v)
|
||||
}
|
||||
|
||||
// EncodeWithOption call Encode with EncodeOption.
|
||||
func (e *Encoder) EncodeWithOption(v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
ctx.Option.Flag = 0
|
||||
|
||||
err := e.encodeWithOption(ctx, v, optFuncs...)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// EncodeContext call Encode with context.Context and EncodeOption.
|
||||
func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag |= encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
|
||||
err := e.encodeWithOption(rctx, v, optFuncs...)
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeWithOption(ctx *encoder.RuntimeContext, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
if e.enabledHTMLEscape {
|
||||
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||
}
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
var (
|
||||
buf []byte
|
||||
err error
|
||||
)
|
||||
if e.enabledIndent {
|
||||
buf, err = encodeIndent(ctx, v, e.prefix, e.indentStr)
|
||||
} else {
|
||||
buf, err = encode(ctx, v)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.enabledIndent {
|
||||
buf = buf[:len(buf)-2]
|
||||
} else {
|
||||
buf = buf[:len(buf)-1]
|
||||
}
|
||||
buf = append(buf, '\n')
|
||||
if _, err := e.w.Write(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||
//
|
||||
// In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior.
|
||||
func (e *Encoder) SetEscapeHTML(on bool) {
|
||||
e.enabledHTMLEscape = on
|
||||
}
|
||||
|
||||
// SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||
// Calling SetIndent("", "") disables indentation.
|
||||
func (e *Encoder) SetIndent(prefix, indent string) {
|
||||
if prefix == "" && indent == "" {
|
||||
e.enabledIndent = false
|
||||
return
|
||||
}
|
||||
e.prefix = prefix
|
||||
e.indentStr = indent
|
||||
e.enabledIndent = true
|
||||
}
|
||||
|
||||
func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag = encoder.HTMLEscapeOption | encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(rctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshal(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalNoEscape(v interface{}) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||
|
||||
buf, err := encodeNoEscape(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalIndent(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.IndentOption)
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encodeIndent(ctx, v, prefix, indent)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf = buf[:len(buf)-2]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func encode(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeNoEscape(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeIndent(ctx *encoder.RuntimeContext, v interface{}, prefix, indent string) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendCommaIndent(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunIndentCode(ctx, b, codeSet, prefix, indent)
|
||||
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeRunCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm.Run(ctx, b, codeSet)
|
||||
}
|
||||
|
||||
func encodeRunIndentCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet, prefix, indent string) ([]byte, error) {
|
||||
ctx.Prefix = []byte(prefix)
|
||||
ctx.IndentStr = []byte(indent)
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.Run(ctx, b, codeSet)
|
||||
}
|
||||
39
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
39
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
|
||||
// attempting to encode a string value with invalid UTF-8 sequences.
|
||||
// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
|
||||
// replacing invalid bytes with the Unicode replacement rune U+FFFD.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type InvalidUTF8Error = errors.InvalidUTF8Error
|
||||
|
||||
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
|
||||
// (The argument to Unmarshal must be a non-nil pointer.)
|
||||
type InvalidUnmarshalError = errors.InvalidUnmarshalError
|
||||
|
||||
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
|
||||
type MarshalerError = errors.MarshalerError
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
type SyntaxError = errors.SyntaxError
|
||||
|
||||
// An UnmarshalFieldError describes a JSON object key that
|
||||
// led to an unexported (and therefore unwritable) struct field.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type UnmarshalFieldError = errors.UnmarshalFieldError
|
||||
|
||||
// An UnmarshalTypeError describes a JSON value that was
|
||||
// not appropriate for a value of a specific Go type.
|
||||
type UnmarshalTypeError = errors.UnmarshalTypeError
|
||||
|
||||
// An UnsupportedTypeError is returned by Marshal when attempting
|
||||
// to encode an unsupported value type.
|
||||
type UnsupportedTypeError = errors.UnsupportedTypeError
|
||||
|
||||
type UnsupportedValueError = errors.UnsupportedValueError
|
||||
3
vendor/github.com/goccy/go-json/go.mod
generated
vendored
Normal file
3
vendor/github.com/goccy/go-json/go.mod
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module github.com/goccy/go-json
|
||||
|
||||
go 1.12
|
||||
0
vendor/github.com/goccy/go-json/go.sum
generated
vendored
Normal file
0
vendor/github.com/goccy/go-json/go.sum
generated
vendored
Normal file
37
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
37
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type anonymousFieldDecoder struct {
|
||||
structType *runtime.Type
|
||||
offset uintptr
|
||||
dec Decoder
|
||||
}
|
||||
|
||||
func newAnonymousFieldDecoder(structType *runtime.Type, offset uintptr, dec Decoder) *anonymousFieldDecoder {
|
||||
return &anonymousFieldDecoder{
|
||||
structType: structType,
|
||||
offset: offset,
|
||||
dec: dec,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
169
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
169
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type arrayDecoder struct {
|
||||
elemType *runtime.Type
|
||||
size uintptr
|
||||
valueDecoder Decoder
|
||||
alen int
|
||||
structName string
|
||||
fieldName string
|
||||
zeroValue unsafe.Pointer
|
||||
}
|
||||
|
||||
func newArrayDecoder(dec Decoder, elemType *runtime.Type, alen int, structName, fieldName string) *arrayDecoder {
|
||||
zeroValue := *(*unsafe.Pointer)(unsafe_New(elemType))
|
||||
return &arrayDecoder{
|
||||
valueDecoder: dec,
|
||||
elemType: elemType,
|
||||
size: elemType.Size(),
|
||||
alen: alen,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
zeroValue: zeroValue,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
idx := 0
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
idx++
|
||||
switch s.skipWhiteSpace() {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
case ',':
|
||||
s.cursor++
|
||||
continue
|
||||
case nul:
|
||||
if s.read() {
|
||||
s.cursor++
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("array", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case '[':
|
||||
idx := 0
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
c, err := d.valueDecoder.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
idx++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
case ',':
|
||||
cursor++
|
||||
continue
|
||||
default:
|
||||
return 0, errors.ErrInvalidCharacter(buf[cursor], "array", cursor)
|
||||
}
|
||||
}
|
||||
default:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
78
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
78
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type boolDecoder struct {
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newBoolDecoder(structName, fieldName string) *boolDecoder {
|
||||
return &boolDecoder{structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
func (d *boolDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
}
|
||||
break
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("bool", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *boolDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("bool", cursor)
|
||||
}
|
||||
177
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
177
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type bytesDecoder struct {
|
||||
typ *runtime.Type
|
||||
sliceDecoder Decoder
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func byteUnmarshalerSliceDecoder(typ *runtime.Type, structName string, fieldName string) Decoder {
|
||||
var unmarshalDecoder Decoder
|
||||
switch {
|
||||
case runtime.PtrTo(typ).Implements(unmarshalJSONType):
|
||||
unmarshalDecoder = newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
unmarshalDecoder = newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
}
|
||||
if unmarshalDecoder == nil {
|
||||
return nil
|
||||
}
|
||||
return newSliceDecoder(unmarshalDecoder, typ, 1, structName, fieldName)
|
||||
}
|
||||
|
||||
func newBytesDecoder(typ *runtime.Type, structName string, fieldName string) *bytesDecoder {
|
||||
return &bytesDecoder{
|
||||
typ: typ,
|
||||
sliceDecoder: byteUnmarshalerSliceDecoder(typ, structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamBinary(s, depth, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
buf := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(buf, bytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*(*[]byte)(p) = buf[:n]
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeBinary(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
b := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(b, bytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
*(*[]byte)(p) = b[:n]
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func binaryBytes(s *Stream) ([]byte, error) {
|
||||
s.cursor++
|
||||
start := s.cursor
|
||||
for {
|
||||
switch s.char() {
|
||||
case '"':
|
||||
literal := s.buf[start:s.cursor]
|
||||
s.cursor++
|
||||
return literal, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("[]byte", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeStreamBinary(s *Stream, depth int64, p unsafe.Pointer) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '"':
|
||||
return binaryBytes(s)
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case '[':
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
if err := d.sliceDecoder.DecodeStream(s, depth, p); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil, errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeBinary(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) ([]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '"':
|
||||
cursor++
|
||||
start := cursor
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '"':
|
||||
literal := buf[start:cursor]
|
||||
cursor++
|
||||
return literal, cursor, nil
|
||||
case nul:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("[]byte", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case '[':
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, 0, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: cursor,
|
||||
}
|
||||
}
|
||||
c, err := d.sliceDecoder.Decode(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return nil, c, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
510
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
510
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
@@ -0,0 +1,510 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var (
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
typeAddr *runtime.TypeAddr
|
||||
cachedDecoderMap unsafe.Pointer // map[uintptr]decoder
|
||||
cachedDecoder []Decoder
|
||||
)
|
||||
|
||||
func init() {
|
||||
typeAddr = runtime.AnalyzeTypeAddr()
|
||||
if typeAddr == nil {
|
||||
typeAddr = &runtime.TypeAddr{}
|
||||
}
|
||||
cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift)
|
||||
}
|
||||
|
||||
func loadDecoderMap() map[uintptr]Decoder {
|
||||
p := atomic.LoadPointer(&cachedDecoderMap)
|
||||
return *(*map[uintptr]Decoder)(unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) {
|
||||
newDecoderMap := make(map[uintptr]Decoder, len(m)+1)
|
||||
newDecoderMap[typ] = dec
|
||||
|
||||
for k, v := range m {
|
||||
newDecoderMap[k] = v
|
||||
}
|
||||
|
||||
atomic.StorePointer(&cachedDecoderMap, *(*unsafe.Pointer)(unsafe.Pointer(&newDecoderMap)))
|
||||
}
|
||||
|
||||
func compileToGetDecoderSlowPath(typeptr uintptr, typ *runtime.Type) (Decoder, error) {
|
||||
decoderMap := loadDecoderMap()
|
||||
if dec, exists := decoderMap[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
storeDecoder(typeptr, dec, decoderMap)
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
func compileHead(typ *runtime.Type, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
}
|
||||
return compile(typ.Elem(), "", "", structTypeToDecoder)
|
||||
}
|
||||
|
||||
func compile(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return compilePtr(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Struct:
|
||||
return compileStruct(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Slice:
|
||||
elem := typ.Elem()
|
||||
if elem.Kind() == reflect.Uint8 {
|
||||
return compileBytes(elem, structName, fieldName)
|
||||
}
|
||||
return compileSlice(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Array:
|
||||
return compileArray(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Map:
|
||||
return compileMap(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Interface:
|
||||
return compileInterface(typ, structName, fieldName)
|
||||
case reflect.Uintptr:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Int:
|
||||
return compileInt(typ, structName, fieldName)
|
||||
case reflect.Int8:
|
||||
return compileInt8(typ, structName, fieldName)
|
||||
case reflect.Int16:
|
||||
return compileInt16(typ, structName, fieldName)
|
||||
case reflect.Int32:
|
||||
return compileInt32(typ, structName, fieldName)
|
||||
case reflect.Int64:
|
||||
return compileInt64(typ, structName, fieldName)
|
||||
case reflect.Uint:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Uint8:
|
||||
return compileUint8(typ, structName, fieldName)
|
||||
case reflect.Uint16:
|
||||
return compileUint16(typ, structName, fieldName)
|
||||
case reflect.Uint32:
|
||||
return compileUint32(typ, structName, fieldName)
|
||||
case reflect.Uint64:
|
||||
return compileUint64(typ, structName, fieldName)
|
||||
case reflect.String:
|
||||
return compileString(typ, structName, fieldName)
|
||||
case reflect.Bool:
|
||||
return compileBool(structName, fieldName)
|
||||
case reflect.Float32:
|
||||
return compileFloat32(structName, fieldName)
|
||||
case reflect.Float64:
|
||||
return compileFloat64(structName, fieldName)
|
||||
case reflect.Func:
|
||||
return compileFunc(typ, structName, fieldName)
|
||||
}
|
||||
return nil, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(typ),
|
||||
Offset: 0,
|
||||
Struct: structName,
|
||||
Field: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func isStringTagSupportedType(typ *runtime.Type) bool {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return false
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return false
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Map:
|
||||
return false
|
||||
case reflect.Slice:
|
||||
return false
|
||||
case reflect.Array:
|
||||
return false
|
||||
case reflect.Struct:
|
||||
return false
|
||||
case reflect.Interface:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func compileMapKey(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
if runtime.PtrTo(typ).Implements(unmarshalTextType) {
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
dec, err := compile(typ, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for {
|
||||
switch t := dec.(type) {
|
||||
case *stringDecoder, *interfaceDecoder:
|
||||
return dec, nil
|
||||
case *boolDecoder, *intDecoder, *uintDecoder, *numberDecoder:
|
||||
return newWrappedStringDecoder(typ, dec, structName, fieldName), nil
|
||||
case *ptrDecoder:
|
||||
dec = t.dec
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(typ),
|
||||
Offset: 0,
|
||||
Struct: structName,
|
||||
Field: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func compilePtr(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
dec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newPtrDecoder(dec, typ.Elem(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInt(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int)(p) = int(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int8)(p) = int8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int16)(p) = int16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int32)(p) = int32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint)(p) = uint(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint8)(p) = uint8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint16)(p) = uint16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint32)(p) = uint32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat32(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float32)(p) = float32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat64(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileString(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
if typ == runtime.Type2RType(jsonNumberType) {
|
||||
return newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*json.Number)(p) = v
|
||||
}), nil
|
||||
}
|
||||
return newStringDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBool(structName, fieldName string) (Decoder, error) {
|
||||
return newBoolDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBytes(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newBytesDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileSlice(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newSliceDecoder(decoder, elem, elem.Size(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileArray(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newArrayDecoder(decoder, elem, typ.Len(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileMap(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
keyDec, err := compileMapKey(typ.Key(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
valueDec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newMapDecoder(typ, typ.Key(), keyDec, typ.Elem(), valueDec, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInterface(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newInterfaceDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileFunc(typ *runtime.Type, strutName, fieldName string) (Decoder, error) {
|
||||
return newFuncDecoder(typ, strutName, fieldName), nil
|
||||
}
|
||||
|
||||
func removeConflictFields(fieldMap map[string]*structFieldSet, conflictedMap map[string]struct{}, dec *structDecoder, field reflect.StructField) {
|
||||
for k, v := range dec.fieldMap {
|
||||
if _, exists := conflictedMap[k]; exists {
|
||||
// already conflicted key
|
||||
continue
|
||||
}
|
||||
set, exists := fieldMap[k]
|
||||
if !exists {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: v.dec,
|
||||
offset: field.Offset + v.offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
}
|
||||
fieldMap[k] = fieldSet
|
||||
lower := strings.ToLower(k)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
fieldMap[lower] = fieldSet
|
||||
}
|
||||
continue
|
||||
}
|
||||
if set.isTaggedKey {
|
||||
if v.isTaggedKey {
|
||||
// conflict tag key
|
||||
delete(fieldMap, k)
|
||||
delete(fieldMap, strings.ToLower(k))
|
||||
conflictedMap[k] = struct{}{}
|
||||
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
if v.isTaggedKey {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: v.dec,
|
||||
offset: field.Offset + v.offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
}
|
||||
fieldMap[k] = fieldSet
|
||||
lower := strings.ToLower(k)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
fieldMap[lower] = fieldSet
|
||||
}
|
||||
} else {
|
||||
// conflict tag key
|
||||
delete(fieldMap, k)
|
||||
delete(fieldMap, strings.ToLower(k))
|
||||
conflictedMap[k] = struct{}{}
|
||||
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compileStruct(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
fieldNum := typ.NumField()
|
||||
conflictedMap := map[string]struct{}{}
|
||||
fieldMap := map[string]*structFieldSet{}
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if dec, exists := structTypeToDecoder[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
structDec := newStructDecoder(structName, fieldName, fieldMap)
|
||||
structTypeToDecoder[typeptr] = structDec
|
||||
structName = typ.Name()
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
isUnexportedField := unicode.IsLower([]rune(field.Name)[0])
|
||||
tag := runtime.StructTagFromField(field)
|
||||
dec, err := compile(runtime.Type2RType(field.Type), structName, field.Name, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if field.Anonymous && !tag.IsTaggedKey {
|
||||
if stDec, ok := dec.(*structDecoder); ok {
|
||||
if runtime.Type2RType(field.Type) == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
removeConflictFields(fieldMap, conflictedMap, stDec, field)
|
||||
} else if pdec, ok := dec.(*ptrDecoder); ok {
|
||||
contentDec := pdec.contentDecoder()
|
||||
if pdec.typ == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
var fieldSetErr error
|
||||
if isUnexportedField {
|
||||
fieldSetErr = fmt.Errorf(
|
||||
"json: cannot set embedded pointer to unexported struct: %v",
|
||||
field.Type.Elem(),
|
||||
)
|
||||
}
|
||||
if dec, ok := contentDec.(*structDecoder); ok {
|
||||
for k, v := range dec.fieldMap {
|
||||
if _, exists := conflictedMap[k]; exists {
|
||||
// already conflicted key
|
||||
continue
|
||||
}
|
||||
set, exists := fieldMap[k]
|
||||
if !exists {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||
offset: field.Offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
err: fieldSetErr,
|
||||
}
|
||||
fieldMap[k] = fieldSet
|
||||
lower := strings.ToLower(k)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
fieldMap[lower] = fieldSet
|
||||
}
|
||||
continue
|
||||
}
|
||||
if set.isTaggedKey {
|
||||
if v.isTaggedKey {
|
||||
// conflict tag key
|
||||
delete(fieldMap, k)
|
||||
delete(fieldMap, strings.ToLower(k))
|
||||
conflictedMap[k] = struct{}{}
|
||||
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
if v.isTaggedKey {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||
offset: field.Offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
err: fieldSetErr,
|
||||
}
|
||||
fieldMap[k] = fieldSet
|
||||
lower := strings.ToLower(k)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
fieldMap[lower] = fieldSet
|
||||
}
|
||||
} else {
|
||||
// conflict tag key
|
||||
delete(fieldMap, k)
|
||||
delete(fieldMap, strings.ToLower(k))
|
||||
conflictedMap[k] = struct{}{}
|
||||
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if tag.IsString && isStringTagSupportedType(runtime.Type2RType(field.Type)) {
|
||||
dec = newWrappedStringDecoder(runtime.Type2RType(field.Type), dec, structName, field.Name)
|
||||
}
|
||||
var key string
|
||||
if tag.Key != "" {
|
||||
key = tag.Key
|
||||
} else {
|
||||
key = field.Name
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: dec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: key,
|
||||
keyLen: int64(len(key)),
|
||||
}
|
||||
fieldMap[key] = fieldSet
|
||||
lower := strings.ToLower(key)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
fieldMap[lower] = fieldSet
|
||||
}
|
||||
}
|
||||
}
|
||||
delete(structTypeToDecoder, typeptr)
|
||||
structDec.tryOptimize()
|
||||
return structDec, nil
|
||||
}
|
||||
|
||||
func implementsUnmarshalJSONType(typ *runtime.Type) bool {
|
||||
return typ.Implements(unmarshalJSONType) || typ.Implements(unmarshalJSONContextType)
|
||||
}
|
||||
28
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
28
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
// +build !race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cachedDecoder[index] = dec
|
||||
return dec, nil
|
||||
}
|
||||
36
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
36
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
// +build race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var decMu sync.RWMutex
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
decMu.RLock()
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
decMu.RUnlock()
|
||||
return dec, nil
|
||||
}
|
||||
decMu.RUnlock()
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
decMu.Lock()
|
||||
cachedDecoder[index] = dec
|
||||
decMu.Unlock()
|
||||
return dec, nil
|
||||
}
|
||||
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
@@ -0,0 +1,254 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type RuntimeContext struct {
|
||||
Buf []byte
|
||||
Option *Option
|
||||
}
|
||||
|
||||
var (
|
||||
runtimeContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &RuntimeContext{
|
||||
Option: &Option{},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func TakeRuntimeContext() *RuntimeContext {
|
||||
return runtimeContextPool.Get().(*RuntimeContext)
|
||||
}
|
||||
|
||||
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||
runtimeContextPool.Put(ctx)
|
||||
}
|
||||
|
||||
var (
|
||||
isWhiteSpace = [256]bool{}
|
||||
)
|
||||
|
||||
func init() {
|
||||
isWhiteSpace[' '] = true
|
||||
isWhiteSpace['\n'] = true
|
||||
isWhiteSpace['\t'] = true
|
||||
isWhiteSpace['\r'] = true
|
||||
}
|
||||
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||
for isWhiteSpace[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
func skipObject(buf []byte, cursor, depth int64) (int64, error) {
|
||||
braceCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
braceCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
braceCount--
|
||||
if braceCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '[':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipArray(buf []byte, cursor, depth int64) (int64, error) {
|
||||
bracketCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '[':
|
||||
bracketCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
bracketCount--
|
||||
depth--
|
||||
if bracketCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '{':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipValue(buf []byte, cursor, depth int64) (int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return skipObject(buf, cursor+1, depth+1)
|
||||
case '[':
|
||||
return skipArray(buf, cursor+1, depth+1)
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
return cursor + 1, nil
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
for {
|
||||
cursor++
|
||||
if floatTable[buf[cursor]] {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
default:
|
||||
return cursor, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func validateTrue(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'r' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "true", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "true", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "true", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateFalse(buf []byte, cursor int64) error {
|
||||
if cursor+4 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'a' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "false", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "false", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 's' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "false", cursor)
|
||||
}
|
||||
if buf[cursor+4] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+4], "false", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateNull(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "null", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "null", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "null", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
158
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
158
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type floatDecoder struct {
|
||||
op func(unsafe.Pointer, float64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFloatDecoder(structName, fieldName string, op func(unsafe.Pointer, float64)) *floatDecoder {
|
||||
return &floatDecoder{op: op, structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
var (
|
||||
floatTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
'.': true,
|
||||
'e': true,
|
||||
'E': true,
|
||||
'+': true,
|
||||
'-': true,
|
||||
}
|
||||
|
||||
validEndNumberChar = [256]bool{
|
||||
nul: true,
|
||||
' ': true,
|
||||
'\t': true,
|
||||
'\r': true,
|
||||
'\n': true,
|
||||
',': true,
|
||||
':': true,
|
||||
'}': true,
|
||||
']': true,
|
||||
}
|
||||
)
|
||||
|
||||
func floatBytes(s *Stream) []byte {
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if floatTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return s.buf[start:s.cursor]
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return floatBytes(s), nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("float", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for floatTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *floatDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
str := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||
}
|
||||
d.op(p, f64)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *floatDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
bytes, c, err := d.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
if !validEndNumberChar[buf[cursor]] {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
return 0, errors.ErrSyntax(err.Error(), cursor)
|
||||
}
|
||||
d.op(p, f64)
|
||||
return cursor, nil
|
||||
}
|
||||
141
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
141
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type funcDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFuncDecoder(typ *runtime.Type, structName, fieldName string) *funcDecoder {
|
||||
fnDecoder := &funcDecoder{typ, structName, fieldName}
|
||||
return fnDecoder
|
||||
}
|
||||
|
||||
func (d *funcDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '[':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '{':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
case 't':
|
||||
if err := trueBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := falseBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors.ErrNotAtBeginningOfValue(start)
|
||||
}
|
||||
|
||||
func (d *funcDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '[':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '{':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
case 't':
|
||||
if err := validateTrue(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := validateFalse(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, errors.ErrNotAtBeginningOfValue(start)
|
||||
}
|
||||
242
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
242
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type intDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
op func(unsafe.Pointer, int64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newIntDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, int64)) *intDecoder {
|
||||
return &intDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: fmt.Sprintf("number %s", string(buf)),
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
pow10i64 = [...]int64{
|
||||
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18,
|
||||
}
|
||||
pow10i64Len = len(pow10i64)
|
||||
)
|
||||
|
||||
func (d *intDecoder) parseInt(b []byte) (int64, error) {
|
||||
isNegative := false
|
||||
if b[0] == '-' {
|
||||
b = b[1:]
|
||||
isNegative = true
|
||||
}
|
||||
maxDigit := len(b)
|
||||
if maxDigit > pow10i64Len {
|
||||
return 0, fmt.Errorf("invalid length of number")
|
||||
}
|
||||
sum := int64(0)
|
||||
for i := 0; i < maxDigit; i++ {
|
||||
c := int64(b[i]) - 48
|
||||
digitValue := pow10i64[maxDigit-i-1]
|
||||
sum += c * digitValue
|
||||
}
|
||||
if isNegative {
|
||||
return -1 * sum, nil
|
||||
}
|
||||
return sum, nil
|
||||
}
|
||||
|
||||
var (
|
||||
numTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
numZeroBuf = []byte{'0'}
|
||||
)
|
||||
|
||||
func (d *intDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
if len(num) < 2 {
|
||||
goto ERROR
|
||||
}
|
||||
return num, nil
|
||||
case '0':
|
||||
s.cursor++
|
||||
return numZeroBuf, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
return num, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("number(integer)", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *intDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '0':
|
||||
cursor++
|
||||
return numZeroBuf, cursor, nil
|
||||
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for numTable[char(b, cursor)] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, d.typeError([]byte{char(b, cursor)}, cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 <= -1*(1<<7) || (1<<7) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 <= -1*(1<<15) || (1<<15) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 <= -1*(1<<31) || (1<<31) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *intDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 <= -1*(1<<7) || (1<<7) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 <= -1*(1<<15) || (1<<15) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 <= -1*(1<<31) || (1<<31) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
return cursor, nil
|
||||
}
|
||||
458
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
458
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
@@ -0,0 +1,458 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type interfaceDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
sliceDecoder *sliceDecoder
|
||||
mapDecoder *mapDecoder
|
||||
floatDecoder *floatDecoder
|
||||
numberDecoder *numberDecoder
|
||||
stringDecoder *stringDecoder
|
||||
}
|
||||
|
||||
func newEmptyInterfaceDecoder(structName, fieldName string) *interfaceDecoder {
|
||||
ifaceDecoder := &interfaceDecoder{
|
||||
typ: emptyInterfaceType,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
}
|
||||
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||
ifaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
)
|
||||
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
ifaceDecoder.stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
ifaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
)
|
||||
return ifaceDecoder
|
||||
}
|
||||
|
||||
func newInterfaceDecoder(typ *runtime.Type, structName, fieldName string) *interfaceDecoder {
|
||||
emptyIfaceDecoder := newEmptyInterfaceDecoder(structName, fieldName)
|
||||
stringDecoder := newStringDecoder(structName, fieldName)
|
||||
return &interfaceDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
sliceDecoder: newSliceDecoder(
|
||||
emptyIfaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
),
|
||||
mapDecoder: newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
emptyIfaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
),
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: stringDecoder,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) numDecoder(s *Stream) Decoder {
|
||||
if s.UseNumber {
|
||||
return d.numberDecoder
|
||||
}
|
||||
return d.floatDecoder
|
||||
}
|
||||
|
||||
var (
|
||||
emptyInterfaceType = runtime.Type2RType(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
interfaceMapType = runtime.Type2RType(
|
||||
reflect.TypeOf((*map[string]interface{})(nil)).Elem(),
|
||||
)
|
||||
stringType = runtime.Type2RType(
|
||||
reflect.TypeOf(""),
|
||||
)
|
||||
)
|
||||
|
||||
func decodeStreamUnmarshaler(s *Stream, depth int64, unmarshaler json.Unmarshaler) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeStreamUnmarshalerContext(s *Stream, depth int64, unmarshaler unmarshalerContext) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeUnmarshaler(buf []byte, cursor, depth int64, unmarshaler json.Unmarshaler) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeUnmarshalerContext(ctx *RuntimeContext, buf []byte, cursor, depth int64, unmarshaler unmarshalerContext) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeStreamTextUnmarshaler(s *Stream, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalText(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeTextUnmarshaler(buf []byte, cursor, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
if s, ok := unquoteBytes(src); ok {
|
||||
src = s
|
||||
}
|
||||
if err := unmarshaler.UnmarshalText(src); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeStreamEmptyInterface(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.mapDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.sliceDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.numDecoder(s).DecodeStream(s, depth, p)
|
||||
case '"':
|
||||
s.cursor++
|
||||
start := s.cursor
|
||||
for {
|
||||
switch s.char() {
|
||||
case '\\':
|
||||
if _, err := decodeEscapeString(s, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
case '"':
|
||||
literal := s.buf[start:s.cursor]
|
||||
s.cursor++
|
||||
*(*interface{})(p) = string(literal)
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeStreamUnmarshalerContext(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeStreamUnmarshaler(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeStreamTextUnmarshaler(s, depth, u, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
return d.errUnmarshalType(rv.Type(), s.totalOffset())
|
||||
}
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return decoder.DecodeStream(s, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) errUnmarshalType(typ reflect.Type, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: typ.String(),
|
||||
Type: typ,
|
||||
Offset: offset,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeUnmarshalerContext(ctx, buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeUnmarshaler(buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeTextUnmarshaler(buf, cursor, depth, u, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, d.errUnmarshalType(rv.Type(), cursor)
|
||||
}
|
||||
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return decoder.Decode(ctx, cursor, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeEmptyInterface(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.mapDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.sliceDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.floatDecoder.Decode(ctx, cursor, depth, p)
|
||||
case '"':
|
||||
var v string
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.stringDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return cursor, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
173
vendor/github.com/goccy/go-json/internal/decoder/map.go
generated
vendored
Normal file
173
vendor/github.com/goccy/go-json/internal/decoder/map.go
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type mapDecoder struct {
|
||||
mapType *runtime.Type
|
||||
keyType *runtime.Type
|
||||
valueType *runtime.Type
|
||||
stringKeyType bool
|
||||
keyDecoder Decoder
|
||||
valueDecoder Decoder
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newMapDecoder(mapType *runtime.Type, keyType *runtime.Type, keyDec Decoder, valueType *runtime.Type, valueDec Decoder, structName, fieldName string) *mapDecoder {
|
||||
return &mapDecoder{
|
||||
mapType: mapType,
|
||||
keyDecoder: keyDec,
|
||||
keyType: keyType,
|
||||
stringKeyType: keyType.Kind() == reflect.String,
|
||||
valueType: valueType,
|
||||
valueDecoder: valueDec,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
//go:linkname makemap reflect.makemap
|
||||
func makemap(*runtime.Type, int) unsafe.Pointer
|
||||
|
||||
//nolint:golint
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//go:noescape
|
||||
func mapassign_faststr(t *runtime.Type, m unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign reflect.mapassign
|
||||
//go:noescape
|
||||
func mapassign(t *runtime.Type, m unsafe.Pointer, k, v unsafe.Pointer)
|
||||
|
||||
func (d *mapDecoder) mapassign(t *runtime.Type, m, k, v unsafe.Pointer) {
|
||||
if d.stringKeyType {
|
||||
mapV := mapassign_faststr(d.mapType, m, *(*string)(k))
|
||||
typedmemmove(d.valueType, mapV, v)
|
||||
} else {
|
||||
mapassign(t, m, k, v)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *mapDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
switch s.skipWhiteSpace() {
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||
return nil
|
||||
case '{':
|
||||
default:
|
||||
return errors.ErrExpected("{ character for map value", s.totalOffset())
|
||||
}
|
||||
mapValue := *(*unsafe.Pointer)(p)
|
||||
if mapValue == nil {
|
||||
mapValue = makemap(d.mapType, 0)
|
||||
}
|
||||
if s.buf[s.cursor+1] == '}' {
|
||||
*(*unsafe.Pointer)(p) = mapValue
|
||||
s.cursor += 2
|
||||
return nil
|
||||
}
|
||||
for {
|
||||
s.cursor++
|
||||
k := unsafe_New(d.keyType)
|
||||
if err := d.keyDecoder.DecodeStream(s, depth, k); err != nil {
|
||||
return err
|
||||
}
|
||||
s.skipWhiteSpace()
|
||||
if !s.equalChar(':') {
|
||||
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
v := unsafe_New(d.valueType)
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, v); err != nil {
|
||||
return err
|
||||
}
|
||||
d.mapassign(d.mapType, mapValue, k, v)
|
||||
s.skipWhiteSpace()
|
||||
if s.equalChar('}') {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
if !s.equalChar(',') {
|
||||
return errors.ErrExpected("comma after object value", s.totalOffset())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *mapDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
buflen := int64(len(buf))
|
||||
if buflen < 2 {
|
||||
return 0, errors.ErrExpected("{} for map", cursor)
|
||||
}
|
||||
switch buf[cursor] {
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
case '{':
|
||||
default:
|
||||
return 0, errors.ErrExpected("{ character for map value", cursor)
|
||||
}
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
mapValue := *(*unsafe.Pointer)(p)
|
||||
if mapValue == nil {
|
||||
mapValue = makemap(d.mapType, 0)
|
||||
}
|
||||
if buf[cursor] == '}' {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
for {
|
||||
k := unsafe_New(d.keyType)
|
||||
keyCursor, err := d.keyDecoder.Decode(ctx, cursor, depth, k)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, keyCursor)
|
||||
if buf[cursor] != ':' {
|
||||
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
cursor++
|
||||
v := unsafe_New(d.valueType)
|
||||
valueCursor, err := d.valueDecoder.Decode(ctx, cursor, depth, v)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
d.mapassign(d.mapType, mapValue, k, v)
|
||||
cursor = skipWhiteSpace(buf, valueCursor)
|
||||
if buf[cursor] == '}' {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
if buf[cursor] != ',' {
|
||||
return 0, errors.ErrExpected("comma after object value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
108
vendor/github.com/goccy/go-json/internal/decoder/number.go
generated
vendored
Normal file
108
vendor/github.com/goccy/go-json/internal/decoder/number.go
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type numberDecoder struct {
|
||||
stringDecoder *stringDecoder
|
||||
op func(unsafe.Pointer, json.Number)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newNumberDecoder(structName, fieldName string, op func(unsafe.Pointer, json.Number)) *numberDecoder {
|
||||
return &numberDecoder{
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *numberDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||
}
|
||||
d.op(p, json.Number(string(bytes)))
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *numberDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||
return 0, errors.ErrSyntax(err.Error(), c)
|
||||
}
|
||||
cursor = c
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
d.op(p, json.Number(s))
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *numberDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return floatBytes(s), nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case '"':
|
||||
return d.stringDecoder.decodeStreamByte(s)
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("json.Number", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *numberDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for floatTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
case '"':
|
||||
return d.stringDecoder.decodeByte(buf, cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("json.Number", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/goccy/go-json/internal/decoder/option.go
generated
vendored
Normal file
15
vendor/github.com/goccy/go-json/internal/decoder/option.go
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
package decoder
|
||||
|
||||
import "context"
|
||||
|
||||
type OptionFlags uint8
|
||||
|
||||
const (
|
||||
FirstWinOption OptionFlags = 1 << iota
|
||||
ContextOption
|
||||
)
|
||||
|
||||
type Option struct {
|
||||
Flags OptionFlags
|
||||
Context context.Context
|
||||
}
|
||||
87
vendor/github.com/goccy/go-json/internal/decoder/ptr.go
generated
vendored
Normal file
87
vendor/github.com/goccy/go-json/internal/decoder/ptr.go
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type ptrDecoder struct {
|
||||
dec Decoder
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newPtrDecoder(dec Decoder, typ *runtime.Type, structName, fieldName string) *ptrDecoder {
|
||||
return &ptrDecoder{
|
||||
dec: dec,
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) contentDecoder() Decoder {
|
||||
dec, ok := d.dec.(*ptrDecoder)
|
||||
if !ok {
|
||||
return d.dec
|
||||
}
|
||||
return dec.contentDecoder()
|
||||
}
|
||||
|
||||
//nolint:golint
|
||||
//go:linkname unsafe_New reflect.unsafe_New
|
||||
func unsafe_New(*runtime.Type) unsafe.Pointer
|
||||
|
||||
func (d *ptrDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
if s.skipWhiteSpace() == nul {
|
||||
s.read()
|
||||
}
|
||||
if s.char() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
var newptr unsafe.Pointer
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
newptr = unsafe_New(d.typ)
|
||||
*(*unsafe.Pointer)(p) = newptr
|
||||
} else {
|
||||
newptr = *(*unsafe.Pointer)(p)
|
||||
}
|
||||
if err := d.dec.DecodeStream(s, depth, newptr); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if p != nil {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
}
|
||||
var newptr unsafe.Pointer
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
newptr = unsafe_New(d.typ)
|
||||
*(*unsafe.Pointer)(p) = newptr
|
||||
} else {
|
||||
newptr = *(*unsafe.Pointer)(p)
|
||||
}
|
||||
c, err := d.dec.Decode(ctx, cursor, depth, newptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
return cursor, nil
|
||||
}
|
||||
294
vendor/github.com/goccy/go-json/internal/decoder/slice.go
generated
vendored
Normal file
294
vendor/github.com/goccy/go-json/internal/decoder/slice.go
generated
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type sliceDecoder struct {
|
||||
elemType *runtime.Type
|
||||
isElemPointerType bool
|
||||
valueDecoder Decoder
|
||||
size uintptr
|
||||
arrayPool sync.Pool
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
// If use reflect.SliceHeader, data type is uintptr.
|
||||
// In this case, Go compiler cannot trace reference created by newArray().
|
||||
// So, define using unsafe.Pointer as data type
|
||||
type sliceHeader struct {
|
||||
data unsafe.Pointer
|
||||
len int
|
||||
cap int
|
||||
}
|
||||
|
||||
const (
|
||||
defaultSliceCapacity = 2
|
||||
)
|
||||
|
||||
func newSliceDecoder(dec Decoder, elemType *runtime.Type, size uintptr, structName, fieldName string) *sliceDecoder {
|
||||
return &sliceDecoder{
|
||||
valueDecoder: dec,
|
||||
elemType: elemType,
|
||||
isElemPointerType: elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map,
|
||||
size: size,
|
||||
arrayPool: sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &sliceHeader{
|
||||
data: newArray(elemType, defaultSliceCapacity),
|
||||
len: 0,
|
||||
cap: defaultSliceCapacity,
|
||||
}
|
||||
},
|
||||
},
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) newSlice(src *sliceHeader) *sliceHeader {
|
||||
slice := d.arrayPool.Get().(*sliceHeader)
|
||||
if src.len > 0 {
|
||||
// copy original elem
|
||||
if slice.cap < src.cap {
|
||||
data := newArray(d.elemType, src.cap)
|
||||
slice = &sliceHeader{data: data, len: src.len, cap: src.cap}
|
||||
} else {
|
||||
slice.len = src.len
|
||||
}
|
||||
copySlice(d.elemType, *slice, *src)
|
||||
} else {
|
||||
slice.len = 0
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) releaseSlice(p *sliceHeader) {
|
||||
d.arrayPool.Put(p)
|
||||
}
|
||||
|
||||
//go:linkname copySlice reflect.typedslicecopy
|
||||
func copySlice(elemType *runtime.Type, dst, src sliceHeader) int
|
||||
|
||||
//go:linkname newArray reflect.unsafe_NewArray
|
||||
func newArray(*runtime.Type, int) unsafe.Pointer
|
||||
|
||||
//go:linkname typedmemmove reflect.typedmemmove
|
||||
func typedmemmove(t *runtime.Type, dst, src unsafe.Pointer)
|
||||
|
||||
func (d *sliceDecoder) errNumber(offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: reflect.SliceOf(runtime.RType2Type(d.elemType)),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
case '[':
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == ']' {
|
||||
dst := (*sliceHeader)(p)
|
||||
if dst.data == nil {
|
||||
dst.data = newArray(d.elemType, 0)
|
||||
} else {
|
||||
dst.len = 0
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
idx := 0
|
||||
slice := d.newSlice((*sliceHeader)(p))
|
||||
srcLen := slice.len
|
||||
capacity := slice.cap
|
||||
data := slice.data
|
||||
for {
|
||||
if capacity <= idx {
|
||||
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
capacity *= 2
|
||||
data = newArray(d.elemType, capacity)
|
||||
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
copySlice(d.elemType, dst, src)
|
||||
}
|
||||
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||
|
||||
// if srcLen is greater than idx, keep the original reference
|
||||
if srcLen <= idx {
|
||||
if d.isElemPointerType {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||
} else {
|
||||
// assign new element to the slice
|
||||
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||
}
|
||||
}
|
||||
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, ep); err != nil {
|
||||
return err
|
||||
}
|
||||
s.skipWhiteSpace()
|
||||
RETRY:
|
||||
switch s.char() {
|
||||
case ']':
|
||||
slice.cap = capacity
|
||||
slice.len = idx + 1
|
||||
slice.data = data
|
||||
dst := (*sliceHeader)(p)
|
||||
dst.len = idx + 1
|
||||
if dst.len > dst.cap {
|
||||
dst.data = newArray(d.elemType, dst.len)
|
||||
dst.cap = dst.len
|
||||
}
|
||||
copySlice(d.elemType, *dst, *slice)
|
||||
d.releaseSlice(slice)
|
||||
s.cursor++
|
||||
return nil
|
||||
case ',':
|
||||
idx++
|
||||
case nul:
|
||||
if s.read() {
|
||||
goto RETRY
|
||||
}
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
goto ERROR
|
||||
default:
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.errNumber(s.totalOffset())
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("slice", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return cursor, nil
|
||||
case '[':
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
dst := (*sliceHeader)(p)
|
||||
if dst.data == nil {
|
||||
dst.data = newArray(d.elemType, 0)
|
||||
} else {
|
||||
dst.len = 0
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
idx := 0
|
||||
slice := d.newSlice((*sliceHeader)(p))
|
||||
srcLen := slice.len
|
||||
capacity := slice.cap
|
||||
data := slice.data
|
||||
for {
|
||||
if capacity <= idx {
|
||||
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
capacity *= 2
|
||||
data = newArray(d.elemType, capacity)
|
||||
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
copySlice(d.elemType, dst, src)
|
||||
}
|
||||
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||
// if srcLen is greater than idx, keep the original reference
|
||||
if srcLen <= idx {
|
||||
if d.isElemPointerType {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||
} else {
|
||||
// assign new element to the slice
|
||||
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||
}
|
||||
}
|
||||
c, err := d.valueDecoder.Decode(ctx, cursor, depth, ep)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
slice.cap = capacity
|
||||
slice.len = idx + 1
|
||||
slice.data = data
|
||||
dst := (*sliceHeader)(p)
|
||||
dst.len = idx + 1
|
||||
if dst.len > dst.cap {
|
||||
dst.data = newArray(d.elemType, dst.len)
|
||||
dst.cap = dst.len
|
||||
}
|
||||
copySlice(d.elemType, *dst, *slice)
|
||||
d.releaseSlice(slice)
|
||||
cursor++
|
||||
return cursor, nil
|
||||
case ',':
|
||||
idx++
|
||||
default:
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
return 0, errors.ErrInvalidCharacter(buf[cursor], "slice", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, d.errNumber(cursor)
|
||||
default:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("slice", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
554
vendor/github.com/goccy/go-json/internal/decoder/stream.go
generated
vendored
Normal file
554
vendor/github.com/goccy/go-json/internal/decoder/stream.go
generated
vendored
Normal file
@@ -0,0 +1,554 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
const (
|
||||
initBufSize = 512
|
||||
)
|
||||
|
||||
type Stream struct {
|
||||
buf []byte
|
||||
bufSize int64
|
||||
length int64
|
||||
r io.Reader
|
||||
offset int64
|
||||
cursor int64
|
||||
filledBuffer bool
|
||||
allRead bool
|
||||
UseNumber bool
|
||||
DisallowUnknownFields bool
|
||||
Option *Option
|
||||
}
|
||||
|
||||
func NewStream(r io.Reader) *Stream {
|
||||
return &Stream{
|
||||
r: r,
|
||||
bufSize: initBufSize,
|
||||
buf: make([]byte, initBufSize),
|
||||
Option: &Option{},
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) TotalOffset() int64 {
|
||||
return s.totalOffset()
|
||||
}
|
||||
|
||||
func (s *Stream) Buffered() io.Reader {
|
||||
buflen := int64(len(s.buf))
|
||||
for i := s.cursor; i < buflen; i++ {
|
||||
if s.buf[i] == nul {
|
||||
return bytes.NewReader(s.buf[s.cursor:i])
|
||||
}
|
||||
}
|
||||
return bytes.NewReader(s.buf[s.cursor:])
|
||||
}
|
||||
|
||||
func (s *Stream) PrepareForDecode() error {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\t', '\r', '\n':
|
||||
s.cursor++
|
||||
continue
|
||||
case ',', ':':
|
||||
s.cursor++
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return io.EOF
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Stream) totalOffset() int64 {
|
||||
return s.offset + s.cursor
|
||||
}
|
||||
|
||||
func (s *Stream) char() byte {
|
||||
return s.buf[s.cursor]
|
||||
}
|
||||
|
||||
func (s *Stream) equalChar(c byte) bool {
|
||||
cur := s.buf[s.cursor]
|
||||
if cur == nul {
|
||||
s.read()
|
||||
cur = s.buf[s.cursor]
|
||||
}
|
||||
return cur == c
|
||||
}
|
||||
|
||||
func (s *Stream) stat() ([]byte, int64, unsafe.Pointer) {
|
||||
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) bufptr() unsafe.Pointer {
|
||||
return (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) statForRetry() ([]byte, int64, unsafe.Pointer) {
|
||||
s.cursor-- // for retry ( because caller progress cursor position in each loop )
|
||||
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) Reset() {
|
||||
s.reset()
|
||||
s.bufSize = initBufSize
|
||||
}
|
||||
|
||||
func (s *Stream) More() bool {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
s.cursor++
|
||||
continue
|
||||
case '}', ']':
|
||||
return false
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
break
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *Stream) Token() (interface{}, error) {
|
||||
for {
|
||||
c := s.char()
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
s.cursor++
|
||||
case '{', '[', ']', '}':
|
||||
s.cursor++
|
||||
return json.Delim(c), nil
|
||||
case ',', ':':
|
||||
s.cursor++
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
bytes := floatBytes(s)
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return f64, nil
|
||||
case '"':
|
||||
bytes, err := stringBytes(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return string(bytes), nil
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return true, nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return false, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto END
|
||||
default:
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "token", s.totalOffset())
|
||||
}
|
||||
}
|
||||
END:
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
func (s *Stream) reset() {
|
||||
s.offset += s.cursor
|
||||
s.buf = s.buf[s.cursor:]
|
||||
s.length -= s.cursor
|
||||
s.cursor = 0
|
||||
}
|
||||
|
||||
func (s *Stream) readBuf() []byte {
|
||||
if s.filledBuffer {
|
||||
s.bufSize *= 2
|
||||
remainBuf := s.buf
|
||||
s.buf = make([]byte, s.bufSize)
|
||||
copy(s.buf, remainBuf)
|
||||
}
|
||||
remainLen := s.length - s.cursor
|
||||
remainNotNulCharNum := int64(0)
|
||||
for i := int64(0); i < remainLen; i++ {
|
||||
if s.buf[s.cursor+i] == nul {
|
||||
break
|
||||
}
|
||||
remainNotNulCharNum++
|
||||
}
|
||||
s.length = s.cursor + remainNotNulCharNum
|
||||
return s.buf[s.cursor+remainNotNulCharNum:]
|
||||
}
|
||||
|
||||
func (s *Stream) read() bool {
|
||||
if s.allRead {
|
||||
return false
|
||||
}
|
||||
buf := s.readBuf()
|
||||
last := len(buf) - 1
|
||||
buf[last] = nul
|
||||
n, err := s.r.Read(buf[:last])
|
||||
s.length += int64(n)
|
||||
if n == last {
|
||||
s.filledBuffer = true
|
||||
} else {
|
||||
s.filledBuffer = false
|
||||
}
|
||||
if err == io.EOF {
|
||||
s.allRead = true
|
||||
} else if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *Stream) skipWhiteSpace() byte {
|
||||
p := s.bufptr()
|
||||
LOOP:
|
||||
c := char(p, s.cursor)
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
goto LOOP
|
||||
case nul:
|
||||
if s.read() {
|
||||
p = s.bufptr()
|
||||
goto LOOP
|
||||
}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func (s *Stream) skipObject(depth int64) error {
|
||||
braceCount := 1
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '{':
|
||||
braceCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case '}':
|
||||
braceCount--
|
||||
depth--
|
||||
if braceCount == 0 {
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
}
|
||||
case '[':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case ']':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) skipArray(depth int64) error {
|
||||
bracketCount := 1
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '[':
|
||||
bracketCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case ']':
|
||||
bracketCount--
|
||||
depth--
|
||||
if bracketCount == 0 {
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
}
|
||||
case '{':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) skipValue(depth int64) error {
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of object", s.totalOffset())
|
||||
case '{':
|
||||
s.cursor = cursor + 1
|
||||
return s.skipObject(depth + 1)
|
||||
case '[':
|
||||
s.cursor = cursor + 1
|
||||
return s.skipArray(depth + 1)
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||
}
|
||||
case '"':
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||
}
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
for {
|
||||
cursor++
|
||||
c := char(p, cursor)
|
||||
if floatTable[c] {
|
||||
continue
|
||||
} else if c == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
}
|
||||
s.cursor = cursor
|
||||
return nil
|
||||
}
|
||||
case 't':
|
||||
s.cursor = cursor
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case 'f':
|
||||
s.cursor = cursor
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case 'n':
|
||||
s.cursor = cursor
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func nullBytes(s *Stream) error {
|
||||
// current cursor's character is 'n'
|
||||
s.cursor++
|
||||
if s.char() != 'u' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadNull(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "null", s.totalOffset())
|
||||
}
|
||||
|
||||
func trueBytes(s *Stream) error {
|
||||
// current cursor's character is 't'
|
||||
s.cursor++
|
||||
if s.char() != 'r' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'u' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'e' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadTrue(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "bool(true)", s.totalOffset())
|
||||
}
|
||||
|
||||
func falseBytes(s *Stream) error {
|
||||
// current cursor's character is 'f'
|
||||
s.cursor++
|
||||
if s.char() != 'a' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 's' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'e' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadFalse(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "bool(false)", s.totalOffset())
|
||||
}
|
||||
361
vendor/github.com/goccy/go-json/internal/decoder/string.go
generated
vendored
Normal file
361
vendor/github.com/goccy/go-json/internal/decoder/string.go
generated
vendored
Normal file
@@ -0,0 +1,361 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type stringDecoder struct {
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newStringDecoder(structName, fieldName string) *stringDecoder {
|
||||
return &stringDecoder{
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *stringDecoder) errUnmarshalType(typeName string, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: typeName,
|
||||
Type: reflect.TypeOf(""),
|
||||
Offset: offset,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *stringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *stringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
var (
|
||||
hexToInt = [256]int{
|
||||
'0': 0,
|
||||
'1': 1,
|
||||
'2': 2,
|
||||
'3': 3,
|
||||
'4': 4,
|
||||
'5': 5,
|
||||
'6': 6,
|
||||
'7': 7,
|
||||
'8': 8,
|
||||
'9': 9,
|
||||
'A': 10,
|
||||
'B': 11,
|
||||
'C': 12,
|
||||
'D': 13,
|
||||
'E': 14,
|
||||
'F': 15,
|
||||
'a': 10,
|
||||
'b': 11,
|
||||
'c': 12,
|
||||
'd': 13,
|
||||
'e': 14,
|
||||
'f': 15,
|
||||
}
|
||||
)
|
||||
|
||||
func unicodeToRune(code []byte) rune {
|
||||
var r rune
|
||||
for i := 0; i < len(code); i++ {
|
||||
r = r*16 + rune(hexToInt[code[i]])
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func decodeUnicodeRune(s *Stream, p unsafe.Pointer) (rune, int64, unsafe.Pointer, error) {
|
||||
const defaultOffset = 5
|
||||
const surrogateOffset = 11
|
||||
|
||||
if s.cursor+defaultOffset >= s.length {
|
||||
if !s.read() {
|
||||
return rune(0), 0, nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||
}
|
||||
p = s.bufptr()
|
||||
}
|
||||
|
||||
r := unicodeToRune(s.buf[s.cursor+1 : s.cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
if s.cursor+surrogateOffset >= s.length {
|
||||
s.read()
|
||||
p = s.bufptr()
|
||||
}
|
||||
if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor+defaultOffset] != '\\' || s.buf[s.cursor+defaultOffset+1] != 'u' {
|
||||
return unicode.ReplacementChar, defaultOffset, p, nil
|
||||
}
|
||||
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
return r, surrogateOffset, p, nil
|
||||
}
|
||||
}
|
||||
return r, defaultOffset, p, nil
|
||||
}
|
||||
|
||||
func decodeUnicode(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||
const backSlashAndULen = 2 // length of \u
|
||||
|
||||
r, offset, pp, err := decodeUnicodeRune(s, p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
unicode := []byte(string(r))
|
||||
unicodeLen := int64(len(unicode))
|
||||
s.buf = append(append(s.buf[:s.cursor-1], unicode...), s.buf[s.cursor+offset:]...)
|
||||
unicodeOrgLen := offset - 1
|
||||
s.length = s.length - (backSlashAndULen + (unicodeOrgLen - unicodeLen))
|
||||
s.cursor = s.cursor - backSlashAndULen + unicodeLen
|
||||
return pp, nil
|
||||
}
|
||||
|
||||
func decodeEscapeString(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||
s.cursor++
|
||||
RETRY:
|
||||
switch s.buf[s.cursor] {
|
||||
case '"':
|
||||
s.buf[s.cursor] = '"'
|
||||
case '\\':
|
||||
s.buf[s.cursor] = '\\'
|
||||
case '/':
|
||||
s.buf[s.cursor] = '/'
|
||||
case 'b':
|
||||
s.buf[s.cursor] = '\b'
|
||||
case 'f':
|
||||
s.buf[s.cursor] = '\f'
|
||||
case 'n':
|
||||
s.buf[s.cursor] = '\n'
|
||||
case 'r':
|
||||
s.buf[s.cursor] = '\r'
|
||||
case 't':
|
||||
s.buf[s.cursor] = '\t'
|
||||
case 'u':
|
||||
return decodeUnicode(s, p)
|
||||
case nul:
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||
}
|
||||
goto RETRY
|
||||
default:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
s.buf = append(s.buf[:s.cursor-1], s.buf[s.cursor:]...)
|
||||
s.length--
|
||||
s.cursor--
|
||||
return p, nil
|
||||
}
|
||||
|
||||
var (
|
||||
runeErrBytes = []byte(string(utf8.RuneError))
|
||||
runeErrBytesLen = int64(len(runeErrBytes))
|
||||
)
|
||||
|
||||
func stringBytes(s *Stream) ([]byte, error) {
|
||||
_, cursor, p := s.stat()
|
||||
cursor++ // skip double quote char
|
||||
start := cursor
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
s.cursor = cursor
|
||||
pp, err := decodeEscapeString(s, p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p = pp
|
||||
cursor = s.cursor
|
||||
case '"':
|
||||
literal := s.buf[start:cursor]
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return literal, nil
|
||||
case
|
||||
// 0x00 is nul, 0x5c is '\\', 0x22 is '"' .
|
||||
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, // 0x10-0x1F
|
||||
0x20, 0x21 /*0x22,*/, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, // 0x20-0x2F
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F, // 0x30-0x3F
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x4B, 0x4C, 0x4D, 0x4E, 0x4F, // 0x40-0x4F
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x5B /*0x5C,*/, 0x5D, 0x5E, 0x5F, // 0x50-0x5F
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, // 0x60-0x6F
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x7B, 0x7C, 0x7D, 0x7E, 0x7F: // 0x70-0x7F
|
||||
// character is ASCII. skip to next char
|
||||
case
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F, // 0x80-0x8F
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, // 0x90-0x9F
|
||||
0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xAC, 0xAD, 0xAE, 0xAF, // 0xA0-0xAF
|
||||
0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF, // 0xB0-0xBF
|
||||
0xC0, 0xC1, // 0xC0-0xC1
|
||||
0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF: // 0xF5-0xFE
|
||||
// character is invalid
|
||||
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...)
|
||||
_, _, p = s.stat()
|
||||
cursor += runeErrBytesLen
|
||||
s.length += runeErrBytesLen
|
||||
continue
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
case 0xEF:
|
||||
// RuneError is {0xEF, 0xBF, 0xBD}
|
||||
if s.buf[cursor+1] == 0xBF && s.buf[cursor+2] == 0xBD {
|
||||
// found RuneError: skip
|
||||
cursor += 2
|
||||
break
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
// multi bytes character
|
||||
r, _ := utf8.DecodeRune(s.buf[cursor:])
|
||||
b := []byte(string(r))
|
||||
if r == utf8.RuneError {
|
||||
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), b...), s.buf[cursor+1:]...)
|
||||
_, _, p = s.stat()
|
||||
}
|
||||
cursor += int64(len(b))
|
||||
s.length += int64(len(b))
|
||||
continue
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *stringDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '[':
|
||||
return nil, d.errUnmarshalType("array", s.totalOffset())
|
||||
case '{':
|
||||
return nil, d.errUnmarshalType("object", s.totalOffset())
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return nil, d.errUnmarshalType("number", s.totalOffset())
|
||||
case '"':
|
||||
return stringBytes(s)
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil, errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *stringDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '[':
|
||||
return nil, 0, d.errUnmarshalType("array", cursor)
|
||||
case '{':
|
||||
return nil, 0, d.errUnmarshalType("object", cursor)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return nil, 0, d.errUnmarshalType("number", cursor)
|
||||
case '"':
|
||||
cursor++
|
||||
start := cursor
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
switch char(b, cursor) {
|
||||
case '"':
|
||||
buf[cursor] = '"'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case '\\':
|
||||
buf[cursor] = '\\'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case '/':
|
||||
buf[cursor] = '/'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 'b':
|
||||
buf[cursor] = '\b'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 'f':
|
||||
buf[cursor] = '\f'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 'n':
|
||||
buf[cursor] = '\n'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 'r':
|
||||
buf[cursor] = '\r'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 't':
|
||||
buf[cursor] = '\t'
|
||||
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||
case 'u':
|
||||
buflen := int64(len(buf))
|
||||
if cursor+5 >= buflen {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||
}
|
||||
code := unicodeToRune(buf[cursor+1 : cursor+5])
|
||||
unicode := []byte(string(code))
|
||||
buf = append(append(buf[:cursor-1], unicode...), buf[cursor+5:]...)
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||
}
|
||||
continue
|
||||
case '"':
|
||||
literal := buf[start:cursor]
|
||||
cursor++
|
||||
return literal, cursor, nil
|
||||
case nul:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
819
vendor/github.com/goccy/go-json/internal/decoder/struct.go
generated
vendored
Normal file
819
vendor/github.com/goccy/go-json/internal/decoder/struct.go
generated
vendored
Normal file
@@ -0,0 +1,819 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"math/bits"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type structFieldSet struct {
|
||||
dec Decoder
|
||||
offset uintptr
|
||||
isTaggedKey bool
|
||||
fieldIdx int
|
||||
key string
|
||||
keyLen int64
|
||||
err error
|
||||
}
|
||||
|
||||
type structDecoder struct {
|
||||
fieldMap map[string]*structFieldSet
|
||||
fieldUniqueNameNum int
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
isTriedOptimize bool
|
||||
keyBitmapUint8 [][256]uint8
|
||||
keyBitmapUint16 [][256]uint16
|
||||
sortedFieldSets []*structFieldSet
|
||||
keyDecoder func(*structDecoder, []byte, int64) (int64, *structFieldSet, error)
|
||||
keyStreamDecoder func(*structDecoder, *Stream) (*structFieldSet, string, error)
|
||||
}
|
||||
|
||||
var (
|
||||
largeToSmallTable [256]byte
|
||||
)
|
||||
|
||||
func init() {
|
||||
for i := 0; i < 256; i++ {
|
||||
c := i
|
||||
if 'A' <= c && c <= 'Z' {
|
||||
c += 'a' - 'A'
|
||||
}
|
||||
largeToSmallTable[i] = byte(c)
|
||||
}
|
||||
}
|
||||
|
||||
func newStructDecoder(structName, fieldName string, fieldMap map[string]*structFieldSet) *structDecoder {
|
||||
return &structDecoder{
|
||||
fieldMap: fieldMap,
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
keyDecoder: decodeKey,
|
||||
keyStreamDecoder: decodeKeyStream,
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
allowOptimizeMaxKeyLen = 64
|
||||
allowOptimizeMaxFieldLen = 16
|
||||
)
|
||||
|
||||
func (d *structDecoder) tryOptimize() {
|
||||
fieldUniqueNameMap := map[string]int{}
|
||||
fieldIdx := -1
|
||||
for k, v := range d.fieldMap {
|
||||
lower := strings.ToLower(k)
|
||||
idx, exists := fieldUniqueNameMap[lower]
|
||||
if exists {
|
||||
v.fieldIdx = idx
|
||||
} else {
|
||||
fieldIdx++
|
||||
v.fieldIdx = fieldIdx
|
||||
}
|
||||
fieldUniqueNameMap[lower] = fieldIdx
|
||||
}
|
||||
d.fieldUniqueNameNum = len(fieldUniqueNameMap)
|
||||
|
||||
if d.isTriedOptimize {
|
||||
return
|
||||
}
|
||||
fieldMap := map[string]*structFieldSet{}
|
||||
conflicted := map[string]struct{}{}
|
||||
for k, v := range d.fieldMap {
|
||||
key := strings.ToLower(k)
|
||||
if key != k {
|
||||
// already exists same key (e.g. Hello and HELLO has same lower case key
|
||||
if _, exists := conflicted[key]; exists {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
conflicted[key] = struct{}{}
|
||||
}
|
||||
if field, exists := fieldMap[key]; exists {
|
||||
if field != v {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
}
|
||||
fieldMap[key] = v
|
||||
}
|
||||
|
||||
if len(fieldMap) > allowOptimizeMaxFieldLen {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
|
||||
var maxKeyLen int
|
||||
sortedKeys := []string{}
|
||||
for key := range fieldMap {
|
||||
keyLen := len(key)
|
||||
if keyLen > allowOptimizeMaxKeyLen {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
if maxKeyLen < keyLen {
|
||||
maxKeyLen = keyLen
|
||||
}
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
|
||||
// By allocating one extra capacity than `maxKeyLen`,
|
||||
// it is possible to avoid the process of comparing the index of the key with the length of the bitmap each time.
|
||||
bitmapLen := maxKeyLen + 1
|
||||
if len(sortedKeys) <= 8 {
|
||||
keyBitmap := make([][256]uint8, bitmapLen)
|
||||
for i, key := range sortedKeys {
|
||||
for j := 0; j < len(key); j++ {
|
||||
c := key[j]
|
||||
keyBitmap[j][c] |= (1 << uint(i))
|
||||
}
|
||||
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||
}
|
||||
d.keyBitmapUint8 = keyBitmap
|
||||
d.keyDecoder = decodeKeyByBitmapUint8
|
||||
d.keyStreamDecoder = decodeKeyByBitmapUint8Stream
|
||||
} else {
|
||||
keyBitmap := make([][256]uint16, bitmapLen)
|
||||
for i, key := range sortedKeys {
|
||||
for j := 0; j < len(key); j++ {
|
||||
c := key[j]
|
||||
keyBitmap[j][c] |= (1 << uint(i))
|
||||
}
|
||||
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||
}
|
||||
d.keyBitmapUint16 = keyBitmap
|
||||
d.keyDecoder = decodeKeyByBitmapUint16
|
||||
d.keyStreamDecoder = decodeKeyByBitmapUint16Stream
|
||||
}
|
||||
}
|
||||
|
||||
// decode from '\uXXXX'
|
||||
func decodeKeyCharByUnicodeRune(buf []byte, cursor int64) ([]byte, int64) {
|
||||
const defaultOffset = 4
|
||||
const surrogateOffset = 6
|
||||
|
||||
r := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
cursor += defaultOffset
|
||||
if cursor+surrogateOffset >= int64(len(buf)) || buf[cursor] != '\\' || buf[cursor+1] != 'u' {
|
||||
return []byte(string(unicode.ReplacementChar)), cursor + defaultOffset - 1
|
||||
}
|
||||
cursor += 2
|
||||
r2 := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
return []byte(string(r)), cursor + defaultOffset - 1
|
||||
}
|
||||
}
|
||||
return []byte(string(r)), cursor + defaultOffset - 1
|
||||
}
|
||||
|
||||
func decodeKeyCharByEscapedChar(buf []byte, cursor int64) ([]byte, int64) {
|
||||
c := buf[cursor]
|
||||
cursor++
|
||||
switch c {
|
||||
case '"':
|
||||
return []byte{'"'}, cursor
|
||||
case '\\':
|
||||
return []byte{'\\'}, cursor
|
||||
case '/':
|
||||
return []byte{'/'}, cursor
|
||||
case 'b':
|
||||
return []byte{'\b'}, cursor
|
||||
case 'f':
|
||||
return []byte{'\f'}, cursor
|
||||
case 'n':
|
||||
return []byte{'\n'}, cursor
|
||||
case 'r':
|
||||
return []byte{'\r'}, cursor
|
||||
case 't':
|
||||
return []byte{'\t'}, cursor
|
||||
case 'u':
|
||||
return decodeKeyCharByUnicodeRune(buf, cursor)
|
||||
}
|
||||
return nil, cursor
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint8(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
var (
|
||||
curBit uint8 = math.MaxUint8
|
||||
)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '"':
|
||||
cursor++
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint8
|
||||
start := cursor
|
||||
for {
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
case '\\':
|
||||
cursor++
|
||||
chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor)
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor = nextCursor
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return cursor, nil, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint16(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
var (
|
||||
curBit uint16 = math.MaxUint16
|
||||
)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '"':
|
||||
cursor++
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint16
|
||||
start := cursor
|
||||
for {
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
case '\\':
|
||||
cursor++
|
||||
chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor)
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor = nextCursor
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return cursor, nil, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyNotFound(b unsafe.Pointer, cursor int64) (int64, *structFieldSet, error) {
|
||||
for {
|
||||
cursor++
|
||||
switch char(b, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(b, cursor) == nul {
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKey(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
key, c, err := d.stringDecoder.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
cursor = c
|
||||
k := *(*string)(unsafe.Pointer(&key))
|
||||
field, exists := d.fieldMap[k]
|
||||
if !exists {
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint8Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
var (
|
||||
curBit uint8 = math.MaxUint8
|
||||
)
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
case '"':
|
||||
cursor++
|
||||
FIRST_CHAR:
|
||||
start := cursor
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, "", nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
goto FIRST_CHAR
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint8
|
||||
for {
|
||||
c := char(p, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return nil, field.key, nil
|
||||
}
|
||||
return field, field.key, nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
case '\\':
|
||||
s.cursor = cursor + 1 // skip '\' char
|
||||
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
cursor = s.cursor
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return nil, "", errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint16Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
var (
|
||||
curBit uint16 = math.MaxUint16
|
||||
)
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
case '"':
|
||||
cursor++
|
||||
FIRST_CHAR:
|
||||
start := cursor
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, "", nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
goto FIRST_CHAR
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint16
|
||||
for {
|
||||
c := char(p, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return nil, field.key, nil
|
||||
}
|
||||
return field, field.key, nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
case '\\':
|
||||
s.cursor = cursor + 1 // skip '\' char
|
||||
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
cursor = s.cursor
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return nil, "", errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// decode from '\uXXXX'
|
||||
func decodeKeyCharByUnicodeRuneStream(s *Stream) ([]byte, error) {
|
||||
const defaultOffset = 4
|
||||
const surrogateOffset = 6
|
||||
|
||||
if s.cursor+defaultOffset >= s.length {
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped unicode char", s.totalOffset())
|
||||
}
|
||||
}
|
||||
|
||||
r := unicodeToRune(s.buf[s.cursor : s.cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
s.cursor += defaultOffset
|
||||
if s.cursor+surrogateOffset >= s.length {
|
||||
s.read()
|
||||
}
|
||||
if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor] != '\\' || s.buf[s.cursor+1] != 'u' {
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(unicode.ReplacementChar)), nil
|
||||
}
|
||||
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(r)), nil
|
||||
}
|
||||
}
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(r)), nil
|
||||
}
|
||||
|
||||
func decodeKeyCharByEscapeCharStream(s *Stream) ([]byte, error) {
|
||||
c := s.buf[s.cursor]
|
||||
s.cursor++
|
||||
RETRY:
|
||||
switch c {
|
||||
case '"':
|
||||
return []byte{'"'}, nil
|
||||
case '\\':
|
||||
return []byte{'\\'}, nil
|
||||
case '/':
|
||||
return []byte{'/'}, nil
|
||||
case 'b':
|
||||
return []byte{'\b'}, nil
|
||||
case 'f':
|
||||
return []byte{'\f'}, nil
|
||||
case 'n':
|
||||
return []byte{'\n'}, nil
|
||||
case 'r':
|
||||
return []byte{'\r'}, nil
|
||||
case 't':
|
||||
return []byte{'\t'}, nil
|
||||
case 'u':
|
||||
return decodeKeyCharByUnicodeRuneStream(s)
|
||||
case nul:
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped char", s.totalOffset())
|
||||
}
|
||||
goto RETRY
|
||||
default:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("struct field", s.totalOffset())
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyNotFoundStream(s *Stream, start int64) (*structFieldSet, string, error) {
|
||||
buf, cursor, p := s.stat()
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
b := buf[start:cursor]
|
||||
key := *(*string)(unsafe.Pointer(&b))
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, key, nil
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if !s.read() {
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
buf, cursor, p = s.statForRetry()
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if !s.read() {
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
buf, cursor, p = s.statForRetry()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyStream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
key, err := d.stringDecoder.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
k := *(*string)(unsafe.Pointer(&key))
|
||||
return d.fieldMap[k], k, nil
|
||||
}
|
||||
|
||||
func (d *structDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
c := s.skipWhiteSpace()
|
||||
switch c {
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
if s.char() != '{' {
|
||||
return errors.ErrNotAtBeginningOfValue(s.totalOffset())
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == '}' {
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
var (
|
||||
seenFields map[int]struct{}
|
||||
seenFieldNum int
|
||||
)
|
||||
firstWin := (s.Option.Flags & FirstWinOption) != 0
|
||||
if firstWin {
|
||||
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||
}
|
||||
for {
|
||||
s.reset()
|
||||
field, key, err := d.keyStreamDecoder(d, s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if s.skipWhiteSpace() != ':' {
|
||||
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
if field != nil {
|
||||
if field.err != nil {
|
||||
return field.err
|
||||
}
|
||||
if firstWin {
|
||||
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||
return err
|
||||
}
|
||||
seenFieldNum++
|
||||
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||
return s.skipObject(depth)
|
||||
}
|
||||
seenFields[field.fieldIdx] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if s.DisallowUnknownFields {
|
||||
return fmt.Errorf("json: unknown field %q", key)
|
||||
} else {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
c := s.skipWhiteSpace()
|
||||
if c == '}' {
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
if c != ',' {
|
||||
return errors.ErrExpected("comma after object element", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (d *structDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
buflen := int64(len(buf))
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
switch char(b, cursor) {
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case '{':
|
||||
default:
|
||||
return 0, errors.ErrNotAtBeginningOfValue(cursor)
|
||||
}
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '}' {
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
var (
|
||||
seenFields map[int]struct{}
|
||||
seenFieldNum int
|
||||
)
|
||||
firstWin := (ctx.Option.Flags & FirstWinOption) != 0
|
||||
if firstWin {
|
||||
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||
}
|
||||
for {
|
||||
c, field, err := d.keyDecoder(d, buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, c)
|
||||
if char(b, cursor) != ':' {
|
||||
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
cursor++
|
||||
if cursor >= buflen {
|
||||
return 0, errors.ErrExpected("object value after colon", cursor)
|
||||
}
|
||||
if field != nil {
|
||||
if field.err != nil {
|
||||
return 0, field.err
|
||||
}
|
||||
if firstWin {
|
||||
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
} else {
|
||||
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
seenFieldNum++
|
||||
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||
return skipObject(buf, cursor, depth)
|
||||
}
|
||||
seenFields[field.fieldIdx] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if char(b, cursor) == '}' {
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
if char(b, cursor) != ',' {
|
||||
return 0, errors.ErrExpected("comma after object element", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
29
vendor/github.com/goccy/go-json/internal/decoder/type.go
generated
vendored
Normal file
29
vendor/github.com/goccy/go-json/internal/decoder/type.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type Decoder interface {
|
||||
Decode(*RuntimeContext, int64, int64, unsafe.Pointer) (int64, error)
|
||||
DecodeStream(*Stream, int64, unsafe.Pointer) error
|
||||
}
|
||||
|
||||
const (
|
||||
nul = '\000'
|
||||
maxDecodeNestingDepth = 10000
|
||||
)
|
||||
|
||||
type unmarshalerContext interface {
|
||||
UnmarshalJSON(context.Context, []byte) error
|
||||
}
|
||||
|
||||
var (
|
||||
unmarshalJSONType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
|
||||
unmarshalJSONContextType = reflect.TypeOf((*unmarshalerContext)(nil)).Elem()
|
||||
unmarshalTextType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
)
|
||||
190
vendor/github.com/goccy/go-json/internal/decoder/uint.go
generated
vendored
Normal file
190
vendor/github.com/goccy/go-json/internal/decoder/uint.go
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type uintDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
op func(unsafe.Pointer, uint64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUintDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, uint64)) *uintDecoder {
|
||||
return &uintDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *uintDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: fmt.Sprintf("number %s", string(buf)),
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
pow10u64 = [...]uint64{
|
||||
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19,
|
||||
}
|
||||
pow10u64Len = len(pow10u64)
|
||||
)
|
||||
|
||||
func (d *uintDecoder) parseUint(b []byte) (uint64, error) {
|
||||
maxDigit := len(b)
|
||||
if maxDigit > pow10u64Len {
|
||||
return 0, fmt.Errorf("invalid length of number")
|
||||
}
|
||||
sum := uint64(0)
|
||||
for i := 0; i < maxDigit; i++ {
|
||||
c := uint64(b[i]) - 48
|
||||
digitValue := pow10u64[maxDigit-i-1]
|
||||
sum += c * digitValue
|
||||
}
|
||||
return sum, nil
|
||||
}
|
||||
|
||||
func (d *uintDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '0':
|
||||
s.cursor++
|
||||
return numZeroBuf, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
return num, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
default:
|
||||
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("number(unsigned integer)", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *uintDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '0':
|
||||
cursor++
|
||||
return numZeroBuf, cursor, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for numTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, d.typeError([]byte{buf[cursor]}, cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *uintDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
u64, err := d.parseUint(bytes)
|
||||
if err != nil {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Uint8:
|
||||
if (1 << 8) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if (1 << 16) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if (1 << 32) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
}
|
||||
d.op(p, u64)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *uintDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
u64, err := d.parseUint(bytes)
|
||||
if err != nil {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Uint8:
|
||||
if (1 << 8) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if (1 << 16) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if (1 << 32) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
}
|
||||
d.op(p, u64)
|
||||
return cursor, nil
|
||||
}
|
||||
91
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go
generated
vendored
Normal file
91
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type unmarshalJSONDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUnmarshalJSONDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalJSONDecoder {
|
||||
return &unmarshalJSONDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) annotateError(cursor int64, err error) {
|
||||
switch e := err.(type) {
|
||||
case *errors.UnmarshalTypeError:
|
||||
e.Struct = d.structName
|
||||
e.Field = d.fieldName
|
||||
case *errors.SyntaxError:
|
||||
e.Offset = cursor
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
if (s.Option.Flags & ContextOption) != 0 {
|
||||
if err := v.(unmarshalerContext).UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
if (ctx.Option.Flags & ContextOption) != 0 {
|
||||
if err := v.(unmarshalerContext).UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
280
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go
generated
vendored
Normal file
280
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go
generated
vendored
Normal file
@@ -0,0 +1,280 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type unmarshalTextDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUnmarshalTextDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalTextDecoder {
|
||||
return &unmarshalTextDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) annotateError(cursor int64, err error) {
|
||||
switch e := err.(type) {
|
||||
case *errors.UnmarshalTypeError:
|
||||
e.Struct = d.structName
|
||||
e.Field = d.fieldName
|
||||
case *errors.SyntaxError:
|
||||
e.Offset = cursor
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
nullbytes = []byte(`null`)
|
||||
)
|
||||
|
||||
func (d *unmarshalTextDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '[':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '{':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if b, ok := unquoteBytes(dst); ok {
|
||||
dst = b
|
||||
}
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
if err := v.(encoding.TextUnmarshaler).UnmarshalText(dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '[':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '{':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s, ok := unquoteBytes(src); ok {
|
||||
src = s
|
||||
}
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
if err := v.(encoding.TextUnmarshaler).UnmarshalText(src); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func unquoteBytes(s []byte) (t []byte, ok bool) {
|
||||
length := len(s)
|
||||
if length < 2 || s[0] != '"' || s[length-1] != '"' {
|
||||
return
|
||||
}
|
||||
s = s[1 : length-1]
|
||||
length -= 2
|
||||
|
||||
// Check for unusual characters. If there are none,
|
||||
// then no unquoting is needed, so return a slice of the
|
||||
// original bytes.
|
||||
r := 0
|
||||
for r < length {
|
||||
c := s[r]
|
||||
if c == '\\' || c == '"' || c < ' ' {
|
||||
break
|
||||
}
|
||||
if c < utf8.RuneSelf {
|
||||
r++
|
||||
continue
|
||||
}
|
||||
rr, size := utf8.DecodeRune(s[r:])
|
||||
if rr == utf8.RuneError && size == 1 {
|
||||
break
|
||||
}
|
||||
r += size
|
||||
}
|
||||
if r == length {
|
||||
return s, true
|
||||
}
|
||||
|
||||
b := make([]byte, length+2*utf8.UTFMax)
|
||||
w := copy(b, s[0:r])
|
||||
for r < length {
|
||||
// Out of room? Can only happen if s is full of
|
||||
// malformed UTF-8 and we're replacing each
|
||||
// byte with RuneError.
|
||||
if w >= len(b)-2*utf8.UTFMax {
|
||||
nb := make([]byte, (len(b)+utf8.UTFMax)*2)
|
||||
copy(nb, b[0:w])
|
||||
b = nb
|
||||
}
|
||||
switch c := s[r]; {
|
||||
case c == '\\':
|
||||
r++
|
||||
if r >= length {
|
||||
return
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
return
|
||||
case '"', '\\', '/', '\'':
|
||||
b[w] = s[r]
|
||||
r++
|
||||
w++
|
||||
case 'b':
|
||||
b[w] = '\b'
|
||||
r++
|
||||
w++
|
||||
case 'f':
|
||||
b[w] = '\f'
|
||||
r++
|
||||
w++
|
||||
case 'n':
|
||||
b[w] = '\n'
|
||||
r++
|
||||
w++
|
||||
case 'r':
|
||||
b[w] = '\r'
|
||||
r++
|
||||
w++
|
||||
case 't':
|
||||
b[w] = '\t'
|
||||
r++
|
||||
w++
|
||||
case 'u':
|
||||
r--
|
||||
rr := getu4(s[r:])
|
||||
if rr < 0 {
|
||||
return
|
||||
}
|
||||
r += 6
|
||||
if utf16.IsSurrogate(rr) {
|
||||
rr1 := getu4(s[r:])
|
||||
if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
|
||||
// A valid pair; consume.
|
||||
r += 6
|
||||
w += utf8.EncodeRune(b[w:], dec)
|
||||
break
|
||||
}
|
||||
// Invalid surrogate; fall back to replacement rune.
|
||||
rr = unicode.ReplacementChar
|
||||
}
|
||||
w += utf8.EncodeRune(b[w:], rr)
|
||||
}
|
||||
|
||||
// Quote, control characters are invalid.
|
||||
case c == '"', c < ' ':
|
||||
return
|
||||
|
||||
// ASCII
|
||||
case c < utf8.RuneSelf:
|
||||
b[w] = c
|
||||
r++
|
||||
w++
|
||||
|
||||
// Coerce to well-formed UTF-8.
|
||||
default:
|
||||
rr, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
w += utf8.EncodeRune(b[w:], rr)
|
||||
}
|
||||
}
|
||||
return b[0:w], true
|
||||
}
|
||||
|
||||
func getu4(s []byte) rune {
|
||||
if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
|
||||
return -1
|
||||
}
|
||||
var r rune
|
||||
for _, c := range s[2:6] {
|
||||
switch {
|
||||
case '0' <= c && c <= '9':
|
||||
c = c - '0'
|
||||
case 'a' <= c && c <= 'f':
|
||||
c = c - 'a' + 10
|
||||
case 'A' <= c && c <= 'F':
|
||||
c = c - 'A' + 10
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
r = r*16 + rune(c)
|
||||
}
|
||||
return r
|
||||
}
|
||||
68
vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go
generated
vendored
Normal file
68
vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type wrappedStringDecoder struct {
|
||||
typ *runtime.Type
|
||||
dec Decoder
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
isPtrType bool
|
||||
}
|
||||
|
||||
func newWrappedStringDecoder(typ *runtime.Type, dec Decoder, structName, fieldName string) *wrappedStringDecoder {
|
||||
return &wrappedStringDecoder{
|
||||
typ: typ,
|
||||
dec: dec,
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
isPtrType: typ.Kind() == reflect.Ptr,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *wrappedStringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.stringDecoder.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
if d.isPtrType {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
b := make([]byte, len(bytes)+1)
|
||||
copy(b, bytes)
|
||||
if _, err := d.dec.Decode(&RuntimeContext{Buf: b}, 0, depth, p); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *wrappedStringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.stringDecoder.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
if d.isPtrType {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
bytes = append(bytes, nul)
|
||||
oldBuf := ctx.Buf
|
||||
ctx.Buf = bytes
|
||||
if _, err := d.dec.Decode(ctx, 0, depth, p); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
ctx.Buf = oldBuf
|
||||
return c, nil
|
||||
}
|
||||
286
vendor/github.com/goccy/go-json/internal/encoder/compact.go
generated
vendored
Normal file
286
vendor/github.com/goccy/go-json/internal/encoder/compact.go
generated
vendored
Normal file
@@ -0,0 +1,286 @@
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
isWhiteSpace = [256]bool{
|
||||
' ': true,
|
||||
'\n': true,
|
||||
'\t': true,
|
||||
'\r': true,
|
||||
}
|
||||
isHTMLEscapeChar = [256]bool{
|
||||
'<': true,
|
||||
'>': true,
|
||||
'&': true,
|
||||
}
|
||||
nul = byte('\000')
|
||||
)
|
||||
|
||||
func Compact(buf *bytes.Buffer, src []byte, escape bool) error {
|
||||
if len(src) == 0 {
|
||||
return errors.ErrUnexpectedEndOfJSON("", 0)
|
||||
}
|
||||
buf.Grow(len(src))
|
||||
dst := buf.Bytes()
|
||||
|
||||
ctx := TakeRuntimeContext()
|
||||
ctxBuf := ctx.Buf[:0]
|
||||
ctxBuf = append(append(ctxBuf, src...), nul)
|
||||
ctx.Buf = ctxBuf
|
||||
|
||||
if err := compactAndWrite(buf, dst, ctxBuf, escape); err != nil {
|
||||
ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
ReleaseRuntimeContext(ctx)
|
||||
return nil
|
||||
}
|
||||
|
||||
func compactAndWrite(buf *bytes.Buffer, dst []byte, src []byte, escape bool) error {
|
||||
dst, err := compact(dst, src, escape)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := buf.Write(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func compact(dst, src []byte, escape bool) ([]byte, error) {
|
||||
buf, cursor, err := compactValue(dst, src, 0, escape)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := validateEndBuf(src, cursor); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func validateEndBuf(src []byte, cursor int64) error {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
return nil
|
||||
}
|
||||
return errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||
LOOP:
|
||||
if isWhiteSpace[buf[cursor]] {
|
||||
cursor++
|
||||
goto LOOP
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
func compactValue(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return compactObject(dst, src, cursor, escape)
|
||||
case '}':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor)
|
||||
case '[':
|
||||
return compactArray(dst, src, cursor, escape)
|
||||
case ']':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor)
|
||||
case '"':
|
||||
return compactString(dst, src, cursor, escape)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return compactNumber(dst, src, cursor)
|
||||
case 't':
|
||||
return compactTrue(dst, src, cursor)
|
||||
case 'f':
|
||||
return compactFalse(dst, src, cursor)
|
||||
case 'n':
|
||||
return compactNull(dst, src, cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compactObject(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '{' {
|
||||
dst = append(dst, '{')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected { character for object value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == '}' {
|
||||
dst = append(dst, '}')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
var err error
|
||||
for {
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
dst, cursor, err = compactString(dst, src, cursor, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
if src[cursor] != ':' {
|
||||
return nil, 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
dst = append(dst, ':')
|
||||
dst, cursor, err = compactValue(dst, src, cursor+1, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case '}':
|
||||
dst = append(dst, '}')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrExpected("comma after object value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func compactArray(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '[' {
|
||||
dst = append(dst, '[')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected [ character for array value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == ']' {
|
||||
dst = append(dst, ']')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
var err error
|
||||
for {
|
||||
dst, cursor, err = compactValue(dst, src, cursor, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case ']':
|
||||
dst = append(dst, ']')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrExpected("comma after array value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func compactString(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] != '"' {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "string", cursor)
|
||||
}
|
||||
start := cursor
|
||||
for {
|
||||
cursor++
|
||||
c := src[cursor]
|
||||
if escape {
|
||||
if isHTMLEscapeChar[c] {
|
||||
dst = append(dst, src[start:cursor]...)
|
||||
dst = append(dst, `\u00`...)
|
||||
dst = append(dst, hex[c>>4], hex[c&0xF])
|
||||
start = cursor + 1
|
||||
} else if c == 0xE2 && cursor+2 < int64(len(src)) && src[cursor+1] == 0x80 && src[cursor+2]&^1 == 0xA8 {
|
||||
dst = append(dst, src[start:cursor]...)
|
||||
dst = append(dst, `\u202`...)
|
||||
dst = append(dst, hex[src[cursor+2]&0xF])
|
||||
cursor += 2
|
||||
start = cursor + 3
|
||||
}
|
||||
}
|
||||
switch c {
|
||||
case '\\':
|
||||
cursor++
|
||||
if src[cursor] == nul {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||
}
|
||||
case '"':
|
||||
cursor++
|
||||
return append(dst, src[start:cursor]...), cursor, nil
|
||||
case nul:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compactNumber(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
start := cursor
|
||||
for {
|
||||
cursor++
|
||||
if floatTable[src[cursor]] {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
num := src[start:cursor]
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&num)), 64); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
dst = append(dst, num...)
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactTrue(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+3 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+4], []byte(`true`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "true", cursor)
|
||||
}
|
||||
dst = append(dst, "true"...)
|
||||
cursor += 4
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactFalse(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+4 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+5], []byte(`false`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "false", cursor)
|
||||
}
|
||||
dst = append(dst, "false"...)
|
||||
cursor += 5
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactNull(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+3 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+4], []byte(`null`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "null", cursor)
|
||||
}
|
||||
dst = append(dst, "null"...)
|
||||
cursor += 4
|
||||
return dst, cursor, nil
|
||||
}
|
||||
1569
vendor/github.com/goccy/go-json/internal/encoder/compiler.go
generated
vendored
Normal file
1569
vendor/github.com/goccy/go-json/internal/encoder/compiler.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
56
vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go
generated
vendored
Normal file
56
vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// +build !race
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
func CompileToGetCodeSet(typeptr uintptr) (*OpcodeSet, error) {
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetCodeSetSlowPath(typeptr)
|
||||
}
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||
return codeSet, nil
|
||||
}
|
||||
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
|
||||
noescapeKeyCode, err := compileHead(&compileContext{
|
||||
typ: copiedType,
|
||||
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
escapeKeyCode, err := compileHead(&compileContext{
|
||||
typ: copiedType,
|
||||
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||
escapeKey: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
noescapeKeyCode = copyOpcode(noescapeKeyCode)
|
||||
escapeKeyCode = copyOpcode(escapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(noescapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(escapeKeyCode)
|
||||
interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode)
|
||||
interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode)
|
||||
codeLength := noescapeKeyCode.TotalLength()
|
||||
codeSet := &OpcodeSet{
|
||||
Type: copiedType,
|
||||
NoescapeKeyCode: noescapeKeyCode,
|
||||
EscapeKeyCode: escapeKeyCode,
|
||||
InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode,
|
||||
InterfaceEscapeKeyCode: interfaceEscapeKeyCode,
|
||||
CodeLength: codeLength,
|
||||
EndCode: ToEndCode(interfaceNoescapeKeyCode),
|
||||
}
|
||||
cachedOpcodeSets[index] = codeSet
|
||||
return codeSet, nil
|
||||
}
|
||||
65
vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go
generated
vendored
Normal file
65
vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
// +build race
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var setsMu sync.RWMutex
|
||||
|
||||
func CompileToGetCodeSet(typeptr uintptr) (*OpcodeSet, error) {
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetCodeSetSlowPath(typeptr)
|
||||
}
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
setsMu.RLock()
|
||||
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||
setsMu.RUnlock()
|
||||
return codeSet, nil
|
||||
}
|
||||
setsMu.RUnlock()
|
||||
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
|
||||
noescapeKeyCode, err := compileHead(&compileContext{
|
||||
typ: copiedType,
|
||||
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
escapeKeyCode, err := compileHead(&compileContext{
|
||||
typ: copiedType,
|
||||
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||
escapeKey: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
noescapeKeyCode = copyOpcode(noescapeKeyCode)
|
||||
escapeKeyCode = copyOpcode(escapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(noescapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(escapeKeyCode)
|
||||
interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode)
|
||||
interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode)
|
||||
codeLength := noescapeKeyCode.TotalLength()
|
||||
codeSet := &OpcodeSet{
|
||||
Type: copiedType,
|
||||
NoescapeKeyCode: noescapeKeyCode,
|
||||
EscapeKeyCode: escapeKeyCode,
|
||||
InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode,
|
||||
InterfaceEscapeKeyCode: interfaceEscapeKeyCode,
|
||||
CodeLength: codeLength,
|
||||
EndCode: ToEndCode(interfaceNoescapeKeyCode),
|
||||
}
|
||||
setsMu.Lock()
|
||||
cachedOpcodeSets[index] = codeSet
|
||||
setsMu.Unlock()
|
||||
return codeSet, nil
|
||||
}
|
||||
141
vendor/github.com/goccy/go-json/internal/encoder/context.go
generated
vendored
Normal file
141
vendor/github.com/goccy/go-json/internal/encoder/context.go
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type compileContext struct {
|
||||
typ *runtime.Type
|
||||
opcodeIndex uint32
|
||||
ptrIndex int
|
||||
indent uint32
|
||||
escapeKey bool
|
||||
structTypeToCompiledCode map[uintptr]*CompiledCode
|
||||
|
||||
parent *compileContext
|
||||
}
|
||||
|
||||
func (c *compileContext) context() *compileContext {
|
||||
return &compileContext{
|
||||
typ: c.typ,
|
||||
opcodeIndex: c.opcodeIndex,
|
||||
ptrIndex: c.ptrIndex,
|
||||
indent: c.indent,
|
||||
escapeKey: c.escapeKey,
|
||||
structTypeToCompiledCode: c.structTypeToCompiledCode,
|
||||
parent: c,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compileContext) withType(typ *runtime.Type) *compileContext {
|
||||
ctx := c.context()
|
||||
ctx.typ = typ
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *compileContext) incIndent() *compileContext {
|
||||
ctx := c.context()
|
||||
ctx.indent++
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *compileContext) decIndent() *compileContext {
|
||||
ctx := c.context()
|
||||
ctx.indent--
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *compileContext) incIndex() {
|
||||
c.incOpcodeIndex()
|
||||
c.incPtrIndex()
|
||||
}
|
||||
|
||||
func (c *compileContext) decIndex() {
|
||||
c.decOpcodeIndex()
|
||||
c.decPtrIndex()
|
||||
}
|
||||
|
||||
func (c *compileContext) incOpcodeIndex() {
|
||||
c.opcodeIndex++
|
||||
if c.parent != nil {
|
||||
c.parent.incOpcodeIndex()
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compileContext) decOpcodeIndex() {
|
||||
c.opcodeIndex--
|
||||
if c.parent != nil {
|
||||
c.parent.decOpcodeIndex()
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compileContext) incPtrIndex() {
|
||||
c.ptrIndex++
|
||||
if c.parent != nil {
|
||||
c.parent.incPtrIndex()
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compileContext) decPtrIndex() {
|
||||
c.ptrIndex--
|
||||
if c.parent != nil {
|
||||
c.parent.decPtrIndex()
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
bufSize = 1024
|
||||
)
|
||||
|
||||
var (
|
||||
runtimeContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &RuntimeContext{
|
||||
Buf: make([]byte, 0, bufSize),
|
||||
Ptrs: make([]uintptr, 128),
|
||||
KeepRefs: make([]unsafe.Pointer, 0, 8),
|
||||
Option: &Option{},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type RuntimeContext struct {
|
||||
Context context.Context
|
||||
Buf []byte
|
||||
MarshalBuf []byte
|
||||
Ptrs []uintptr
|
||||
KeepRefs []unsafe.Pointer
|
||||
SeenPtr []uintptr
|
||||
BaseIndent uint32
|
||||
Prefix []byte
|
||||
IndentStr []byte
|
||||
Option *Option
|
||||
}
|
||||
|
||||
func (c *RuntimeContext) Init(p uintptr, codelen int) {
|
||||
if len(c.Ptrs) < codelen {
|
||||
c.Ptrs = make([]uintptr, codelen)
|
||||
}
|
||||
c.Ptrs[0] = p
|
||||
c.KeepRefs = c.KeepRefs[:0]
|
||||
c.SeenPtr = c.SeenPtr[:0]
|
||||
c.BaseIndent = 0
|
||||
}
|
||||
|
||||
func (c *RuntimeContext) Ptr() uintptr {
|
||||
header := (*runtime.SliceHeader)(unsafe.Pointer(&c.Ptrs))
|
||||
return uintptr(header.Data)
|
||||
}
|
||||
|
||||
func TakeRuntimeContext() *RuntimeContext {
|
||||
return runtimeContextPool.Get().(*RuntimeContext)
|
||||
}
|
||||
|
||||
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||
runtimeContextPool.Put(ctx)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user