diff --git a/config/config.go b/config/config.go
index 7d75092..d24e0aa 100644
--- a/config/config.go
+++ b/config/config.go
@@ -72,4 +72,11 @@ func GetMysqlConfig() *MysqlConfig{
}else {
return nil
}
+}
+func GetLogConfig() *LogConfig {
+ if gConf.init{
+ return &gConf.Logs
+ }else{
+ return nil
+ }
}
\ No newline at end of file
diff --git a/controller/hanlder.go b/controller/hanlder.go
index a2d42f1..5d3734a 100644
--- a/controller/hanlder.go
+++ b/controller/hanlder.go
@@ -76,27 +76,9 @@ func Auth(c *gin.Context) {
// @Success 200 {array} util.RespBase
// @Router /accounts [get]
func SetUser(c *gin.Context){
- var req model.Users
- var resp RespBase
- resp.Msg = "操作失败"
- resp.Status = 20
- defer func() {
- c.JSON(200,resp)
- }()
- e := c.BindJSON(&req)
- if nil != e{
- logs.Error(e.Error())
- return
- }
-
+
}
func DelUser(c *gin.Context){
- var resp RespBase
- resp.Msg = "操作失败"
- resp.Status = 20
- defer func() {
- c.JSON(200,resp)
- }()
}
@@ -118,11 +100,11 @@ func GetUser(c *gin.Context) {
if nil != e {
logs.Error(e.Error())
}
- delete(users,"socketToken")
+ delete(users,"socketToken" +
+ "")
resp.Status = 0
resp.Msg = "操作成功"
resp.Data = users
-
}
// GetUsers godoc
// @Summary GetUsers
@@ -134,10 +116,6 @@ func GetUser(c *gin.Context) {
// @Param displayname query string false "name search by q"
// @Param department_id query string false "name search by q"
// @Param permission_type query string false "name search by q"
-// @Success 200 {array} util.RespBase
-// @Failure 400 {object} util.RespBase
-// @Failure 404 {object} util.RespBase
-// @Failure 500 {object} util.RespBase
// @Router /api/users [get]
func GetUsers(c *gin.Context) {
var statuscode int
diff --git a/controller/mail/mail.go b/controller/mail/mail.go
new file mode 100644
index 0000000..3b2306e
--- /dev/null
+++ b/controller/mail/mail.go
@@ -0,0 +1,197 @@
+package mail
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "github.com/gin-gonic/gin"
+ "io/ioutil"
+ "log"
+ "net/smtp"
+ "os"
+ "regexp"
+ "strings"
+ "text/template"
+)
+
+type RespJson struct {
+ Msg string `json:"msg"`
+ Data interface{} `json:"data,omitempty"`
+ Affected int64 `json:"affected,omitempty"`
+}
+
+func PathExists(path string) (bool, error) {
+ _, err := os.Stat(path)
+ if err == nil {
+ return true, nil
+ }
+ if os.IsExist(err) {
+ return true, nil
+ }
+ return false, err
+}
+
+func GetTemplateFile(template string) ([]byte, error) {
+ path := "G:\\GoPath\\Email\\" + template + ".tpl"
+ ok, err := PathExists(path)
+ if !ok {
+ return nil, errors.New("Count not Find File " + path)
+ }
+ file, err := os.Open(path)
+ if err != nil {
+ log.Println(err.Error())
+ }
+ ret, err := ioutil.ReadAll(file)
+ file.Close()
+ return ret, nil
+}
+
+//静态文件存放地
+const PATH_STATIC = "G://GoPath//Email//static//"
+
+type ReqSendEmail struct {
+ From string `json:"from"`
+ To string `json:"to"`
+ Tittle string `json:"tittle"`
+ TempData interface{} `json:"temp_data"`
+ Template string `json:"template"`
+ Generate bool `json:"generate"`
+}
+
+func SendToMail(title, user, password, host, to, tplname string, content interface{}, mailtype string, ifgenerate bool) error {
+ var content_type string
+ var paseresult bytes.Buffer
+ writer := bufio.NewWriter(&paseresult)
+ hp := strings.Split(host, ":")
+ auth := smtp.PlainAuth("", user, password, hp[0])
+
+ if mailtype == "html" {
+ content_type = "Content-Type: text/" + mailtype + "; charset=UTF-8"
+ } else {
+ content_type = "Content-Type: text/plain" + "; charset=UTF-8"
+ }
+ //todo 获取模板文件内容
+ tpl, err := GetTemplateFile(tplname)
+ if err != nil {
+ log.Println(err.Error())
+ return err
+ }
+ //todo 渲染
+ tparse := template.New(tplname)
+ tparse, _ = tparse.Parse(string(tpl))
+ tparse.Execute(writer, content)
+ writer.Flush()
+ if err != nil {
+ log.Println(err.Error())
+ return err
+ }
+
+ msg := []byte("To: " + to + "\r\nFrom: " + user + "\r\nSubject: " + title + "\r\n" +
+ content_type + "\r\n\r\n" + paseresult.String() + "\r\n")
+ send_to := strings.Split(to, ";")
+
+ //todo 如果生成Html文件
+ if ifgenerate {
+ file, err := os.Create(PATH_STATIC + tplname + ".html")
+ if err != nil {
+ log.Println(err.Error())
+ }
+ file.WriteString(paseresult.String())
+ file.Close()
+ }
+ //检测是否是邮件地址
+ for k, _ := range send_to {
+ match, _ := regexp.MatchString("[\\w!#$%&'*+/=?^_`{|}~-]+(?:\\.[\\w!#$%&'*+/=?^_`{|}~-]+)*@(?:[\\w](?:[\\w-]*[\\w])?\\.)+[\\w](?:[\\w-]*[\\w])?", send_to[k])
+ if !match {
+ return errors.New("Format Error")
+ }
+ }
+
+ err = smtp.SendMail(host, auth, user, send_to, msg)
+ if err != nil {
+ return err
+ }
+ return err
+}
+
+/*
+func OnSendEmailSendCloud(c *gin.Context){
+ var req ReqSendEmail
+ var resp mysqlcurd.RespJson
+ defer func() {
+ c.JSON(200, resp)
+ }()
+
+ e := c.Bind(&req)
+ if e!= nil{
+ log.Println(e.Error())
+ resp.Msg = "ParaErr"
+ return
+ }
+
+ sendcloud.UpdateApiInfo("a7458969_test_KIIqjl", "ovErXj6M8UJeiPJt")
+ var to = make([]map[string]string, 1)
+ to[0] = map[string]string{"to":"290198252@qq.com", "%url%": "http://www.baidu.com"}
+ var ok, err, result = sendcloud.SendTemplateMail("test_template_active", "290198252@sendcloud.org", "测试", "", "测试", to, nil)
+ if err != nil{
+ log.Print(err.Error())
+ resp.Msg = "Fail"
+ }
+ if !ok{
+ resp.Msg = "Fail"
+ }
+ resp.Msg = result
+}*/
+
+/*
+{
+ "tittle":"运维通知",
+ "from":"c7458969@163.com",
+ "to":"290198252@qq.com",
+ "template":"test",
+ "generate":true,
+ "temp_data":{
+ "content":"f发撒旦法时代阿达是否",
+ "title":"运维通知2",
+ "topimg":"http://img2.imgtn.bdimg.com/it/u=387283908,3372540416&fm=27&gp=0.jpg",
+ "button1text":"客服热线",
+ "button1url":"http://www.baidu.com",
+ "button2text":"查看详情",
+ "button2url":"http://www.baidu.com",
+ "logoimg":"http://news.tom.com/dimg/2016/1027/img-1479784632271.jpg"
+ }
+}
+*/
+//from: 发送人邮箱
+//to:接收邮件,可以是"290198252@qq.com;29019822@qq.com;2901982@qq.com" 邮箱之间用分号隔开
+//template:模板名字
+//content :网页模板的参数 key-value结构
+//temp_data 模板内具体要替换的变量名字 Key-value结构
+//generate 是否生成静态html
+func OnSendEmail(c *gin.Context) {
+ var req ReqSendEmail
+ var resp RespJson
+ defer func() {
+ c.JSON(200, resp)
+ }()
+
+ e := c.Bind(&req)
+ if e != nil {
+ log.Println(e.Error())
+ resp.Msg = "ParaErr"
+ return
+ }
+ user := "c7458969@163.com"
+ password := "caiyu123"
+ host := "smtp.163.com:25"
+ //抄送给自己
+ //e = SendToMail(user,password,host,req.From,req.Template,req.Content,"html")
+ //发送
+ e = SendToMail(req.Tittle, user, password, host, req.To, req.Template, req.TempData, "html", req.Generate)
+ if nil != e {
+ log.Println(e.Error())
+ resp.Msg = "Error"
+ return
+ }
+ resp.Msg = "OK"
+}
diff --git a/controller/mail/upload.go b/controller/mail/upload.go
new file mode 100644
index 0000000..c8528e4
--- /dev/null
+++ b/controller/mail/upload.go
@@ -0,0 +1,39 @@
+package mail
+
+import (
+ "fmt"
+ "github.com/gin-gonic/gin"
+ "io"
+ "log"
+ "os"
+)
+
+func OnUpload(c *gin.Context) {
+ file, header, err := c.Request.FormFile("upload")
+ filename := header.Filename
+ fmt.Println(header.Filename)
+ out, err := os.Create("G://GoPath//image//" + filename + ".png")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer out.Close()
+ _, err = io.Copy(out, file)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+func OnDownLoad(c *gin.Context) {
+ file, header, err := c.Request.FormFile("upload")
+ filename := header.Filename
+ fmt.Println(header.Filename)
+ out, err := os.Create("G://GoPath//image//" + filename + ".png")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer out.Close()
+ _, err = io.Copy(out, file)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/db/db.go b/db/db.go
index 6311506..037a3f7 100644
--- a/db/db.go
+++ b/db/db.go
@@ -3,13 +3,13 @@ package db
import (
"database/sql"
- "document/logs"
"errors"
"fmt"
"reflect"
"strconv"
"sync"
"time"
+ "user/logs"
)
// 数据容器抽象对象定义
diff --git a/db/sqlManager.go b/db/sqlManager.go
index 0b04394..844dd51 100644
--- a/db/sqlManager.go
+++ b/db/sqlManager.go
@@ -2,17 +2,15 @@ package db
import (
"database/sql"
- "user/config"
"fmt"
_ "github.com/go-sql-driver/mysql"
- "log"
+ "user/config"
)
var gDb Database
func Init() {
mysqlconf := config.GetMysqlConfig()
- log.Println(mysqlconf)
cnn := fmt.Sprintf("%s:%s@tcp(%s)/%s?charset=utf8",mysqlconf.UserName,mysqlconf.Password,
mysqlconf.Addr,mysqlconf.Db)
_db,err := sql.Open("mysql",cnn)
diff --git a/main.go b/main.go
index f5b1b18..4c7e316 100644
--- a/main.go
+++ b/main.go
@@ -2,6 +2,7 @@ package main
import (
"github.com/gin-gonic/gin"
+ "github.com/tommy351/gin-sessions"
"log"
"strconv"
"user/controller"
@@ -24,8 +25,12 @@ func main() {
if nil != e{
log.Println(e.Error())
}
+ logs.Init(config.GetLogConfig().Dir,config.GetLogConfig().File,config.GetLogConfig().Level,config.GetLogConfig().SaveFile)
db.Init()
r := gin.Default()
+ store := sessions.NewCookieStore([]byte("secret123"))
+ r.Use(sessions.Middleware("my_session", store))
+
{
/** 添加或修改用户 **/
r.POST("/api/user", controller.SetUser)
diff --git a/user.yaml b/user.yaml
index 2fe7c41..ed29134 100644
--- a/user.yaml
+++ b/user.yaml
@@ -5,7 +5,7 @@ logs:
dir: "/var/log/user"
file: "user.log"
level: 1
- savefile: true
+ savefile: false
redis:
addr: 118.24.238.198
password: 6379
diff --git a/vendor/cloud.google.com/go/LICENSE b/vendor/cloud.google.com/go/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/vendor/cloud.google.com/go/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/cloud.google.com/go/civil/civil.go b/vendor/cloud.google.com/go/civil/civil.go
index 1cb2675..29272ef 100644
--- a/vendor/cloud.google.com/go/civil/civil.go
+++ b/vendor/cloud.google.com/go/civil/civil.go
@@ -1,4 +1,4 @@
-// Copyright 2016 Google Inc. All Rights Reserved.
+// Copyright 2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/vendor/git.jiaxianghudong.com/go/utils/bytebuffer.go b/vendor/git.jiaxianghudong.com/go/utils/bytebuffer.go
index 73b1216..60f4fea 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/bytebuffer.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/bytebuffer.go
@@ -1,11 +1,20 @@
package utils
-// 加入8字节
+import (
+ "bytes"
+ "encoding/binary"
+ // "errors"
+ // "fmt"
+ // "reflect"
+ // "unsafe"
+)
+
+// Put8bit 加入8字节到[]byte中
func Put8bit(buf []byte, n byte) []byte {
return append(buf, n)
}
-// 加入16字节
+// Put16bit 加入16字节
func Put16bit(buf []byte, n uint16) []byte {
var by [2]byte
@@ -15,7 +24,7 @@ func Put16bit(buf []byte, n uint16) []byte {
return append(buf, by[:]...)
}
-// 加入32字节
+// Put32bit 加入32字节
func Put32bit(buf []byte, n uint32) []byte {
var by [4]byte
@@ -27,7 +36,7 @@ func Put32bit(buf []byte, n uint32) []byte {
return append(buf, by[:]...)
}
-// 加入64字节
+// Put64bit 加入64字节
func Put64bit(buf []byte, n uint64) []byte {
var by [8]byte
@@ -43,12 +52,12 @@ func Put64bit(buf []byte, n uint64) []byte {
return append(buf, by[:]...)
}
-// 获取8bit
+// Get8bit 获取8bit
func Get8bit(buf []byte, start int) byte {
return buf[start]
}
-// 获取16bit
+// Get16bit 获取16bit
func Get16bit(buf []byte, start int) uint16 {
var ret uint16
@@ -58,7 +67,7 @@ func Get16bit(buf []byte, start int) uint16 {
return ret
}
-// 获取32big
+// Get32bit 获取32big
func Get32bit(buf []byte, start int) uint32 {
var ret uint32
@@ -70,7 +79,7 @@ func Get32bit(buf []byte, start int) uint32 {
return ret
}
-// 获取64bit
+// Get64bit 获取64bit
func Get64bit(buf []byte, start int) uint64 {
var ret uint64
@@ -85,3 +94,114 @@ func Get64bit(buf []byte, start int) uint64 {
return ret
}
+
+// BytesToInt 字节数组转int
+func BytesToInt(b []byte) int {
+ buf := bytes.NewBuffer(b)
+ var x int
+ binary.Read(buf, binary.BigEndian, &x)
+
+ return int(x)
+}
+
+// IntToBytes int转字节数组
+func IntToBytes(n int) []byte {
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, n)
+
+ return buf.Bytes()
+}
+
+// BytesToInt16 字节数组转int16
+func BytesToInt16(b []byte) int16 {
+ buf := bytes.NewBuffer(b)
+ var x int16
+ binary.Read(buf, binary.BigEndian, &x)
+
+ return int16(x)
+}
+
+// Int16ToBytes int16转字节数组
+func Int16ToBytes(n int16) []byte {
+ x := int16(n)
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, x)
+
+ return buf.Bytes()
+}
+
+// BytesToInt32 字节数组转int32
+func BytesToInt32(b []byte) int32 {
+ buf := bytes.NewBuffer(b)
+ var x int32
+ binary.Read(buf, binary.BigEndian, &x)
+
+ return int32(x)
+}
+
+// Int32ToBytes int32转字节数组
+func Int32ToBytes(n int32) []byte {
+ x := int32(n)
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, x)
+
+ return buf.Bytes()
+}
+
+// 字节数组转int64
+func BytesToInt64(b []byte) int64 {
+ buf := bytes.NewBuffer(b)
+ var x int64
+ binary.Read(buf, binary.BigEndian, &x)
+
+ return int64(x)
+}
+
+// Int64ToBytes int64转字节数组
+func Int64ToBytes(n int64) []byte {
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, n)
+
+ return buf.Bytes()
+}
+
+// BytesToUInt64 字节数组转uint64
+func BytesToUInt64(b []byte) uint64 {
+ buf := bytes.NewBuffer(b)
+ var x uint64
+ binary.Read(buf, binary.BigEndian, &x)
+
+ return uint64(x)
+}
+
+// UInt64ToBytes uint64转字节数组
+func UInt64ToBytes(n uint64) []byte {
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, n)
+
+ return buf.Bytes()
+}
+
+// UInt32ToBytes uint32转字节数组
+func UInt32ToBytes(n uint32) []byte {
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, n)
+
+ return buf.Bytes()
+}
+
+// BytesToBool 字节数组转bool
+func BytesToBool(b []byte) bool {
+ buf := bytes.NewBuffer(b)
+ var x bool
+ binary.Read(buf, binary.BigEndian, &x)
+ return x
+}
+
+// BoolToBytes bool转字节数组
+func BoolToBytes(x bool) []byte {
+ buf := bytes.NewBuffer([]byte{})
+ binary.Write(buf, binary.BigEndian, x)
+
+ return buf.Bytes()
+}
diff --git a/vendor/git.jiaxianghudong.com/go/utils/coding.go b/vendor/git.jiaxianghudong.com/go/utils/coding.go
index 18180b2..7023eb4 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/coding.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/coding.go
@@ -52,6 +52,9 @@ func Authcode(text string, params ...interface{}) string {
if l > 1 {
key = params[1].(string)
+ if key=="" {
+ key = "DH-Framework"
+ }
}
if l > 2 {
@@ -169,18 +172,18 @@ func AuthcodeUrl(text string, params ...interface{}) string {
}
}
-// JsonEncode 编码JSON
-func JsonEncode(m interface{}) string {
+// JsonEncodeWithError 编码JSON
+func JsonEncodeWithError(m interface{}) (string, error) {
b, err := json.Marshal(m)
if err != nil {
- log.Printf("Json Encode[%#v] Error:%s", m, err.Error())
- return ""
+ return "", err
}
- return string(b)
+ return string(b), nil
}
-// JsonDecode 解码JSON
-func JsonDecode(str string, v ...interface{}) (interface{}, error) {
+
+// JsonDecodeWithError 解码JSON
+func JsonDecodeWithError(str string, v ...interface{}) (interface{}, error) {
var m interface{}
if len(v) > 0 {
m = v[0]
@@ -196,6 +199,26 @@ func JsonDecode(str string, v ...interface{}) (interface{}, error) {
return m, nil
}
+// JsonEncode 编码JSON
+func JsonEncode(m interface{}) string {
+ s, err := JsonEncodeWithError(m)
+ if err != nil {
+ log.Printf("Json Encode[%#v] Error:%s", m, err.Error())
+ return ""
+ }
+ return s
+}
+
+// JsonDecode 解码JSON
+func JsonDecode(str string, v ...interface{}) interface{} {
+ i, err := JsonDecodeWithError(str, v...)
+ if err != nil {
+ log.Printf("Json Decode[%s] Error:%s", str, err.Error())
+ return nil
+ }
+ return i
+}
+
func Crc32(text string) string {
h := crc32.NewIEEE()
io.WriteString(h, text)
diff --git a/vendor/git.jiaxianghudong.com/go/utils/conf.go b/vendor/git.jiaxianghudong.com/go/utils/conf.go
index af8814f..d160fa1 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/conf.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/conf.go
@@ -6,6 +6,7 @@ import (
"os"
"os/exec"
"path/filepath"
+ "fmt"
)
const (
@@ -16,6 +17,7 @@ const (
// ReadTextFile 读取文件
func ReadTextFile(path string) string {
+ fmt.Println("path:",path)
fp, err := os.Open(path)
if nil != err {
return ""
diff --git a/vendor/git.jiaxianghudong.com/go/utils/convert.go b/vendor/git.jiaxianghudong.com/go/utils/convert.go
index c159409..1b19ac9 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/convert.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/convert.go
@@ -4,8 +4,10 @@ import (
"bytes"
"fmt"
"io/ioutil"
+ "net/url"
"reflect"
"regexp"
+ "sort"
"strconv"
"strings"
"time"
@@ -308,6 +310,33 @@ func Long2ip(ip uint32) string {
return fmt.Sprintf("%d.%d.%d.%d", ip>>24, ip<<8>>24, ip<<16>>24, ip<<24>>24)
}
+// SignStrBuild
+func MapToRawStr(data map[string]string) string {
+
+ str := ""
+ if len(data) > 0 {
+ keys := make([]string, 0)
+ for k, _ := range data {
+ if k != "sign" && k != "encode" && k != "v" {
+ keys = append(keys, k)
+ }
+ }
+ sort.Strings(keys)
+ //
+ for _, k := range keys {
+ if data[k] != "" {
+ if str == "" {
+ str = fmt.Sprintf("%s=%s", k, url.QueryEscape(strings.Replace(data[k], " ", "+", -1)))
+ } else {
+ str = fmt.Sprintf("%s&%s=%s", str, k, url.QueryEscape(strings.Replace(data[k], " ", "+", -1)))
+ }
+ }
+ }
+ }
+
+ return str
+}
+
// GbkToUtf8 GBK转UTF-8
func GbkToUtf8(s []byte) ([]byte, error) {
reader := transform.NewReader(bytes.NewReader(s), simplifiedchinese.GBK.NewDecoder())
diff --git a/vendor/git.jiaxianghudong.com/go/utils/http_helper.go b/vendor/git.jiaxianghudong.com/go/utils/http_helper.go
index 04ed988..d8ca26f 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/http_helper.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/http_helper.go
@@ -27,7 +27,6 @@ func Get(apiUrl string, parm map[string]string, header map[string]string, isHttp
}
apiUrl = fmt.Sprintf("%s%s", apiUrl, p)
}
-
client := &http.Client{}
if isHttps {
@@ -35,8 +34,10 @@ func Get(apiUrl string, parm map[string]string, header map[string]string, isHttp
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
}
- reqest, _ := http.NewRequest("GET", apiUrl, nil)
-
+ reqest, err := http.NewRequest("GET", apiUrl, nil)
+ if err != nil {
+ return nil, err
+ }
for k, v := range header {
reqest.Header.Set(k, v)
}
@@ -142,3 +143,43 @@ func GetRemoteIP(r *http.Request) string {
return strings.Split(addr, ":")[0]
}
+
+// ParseQuery 参数解析,兼容部分字段有encode部分没encode的情况
+func ParseQuery(query string) (m url.Values, err error) {
+ m = make(url.Values)
+ for query != "" {
+ key := query
+ if i := strings.IndexAny(key, "&"); i >= 0 {
+ key, query = key[:i], key[i+1:]
+ } else {
+ query = ""
+ }
+ if key == "" {
+ continue
+ }
+ value := ""
+ if i := strings.Index(key, "="); i >= 0 {
+ key, value = key[:i], key[i+1:]
+ }
+ key, err1 := url.QueryUnescape(key)
+ if err1 != nil {
+ if err == nil {
+ err = err1
+ }
+ continue
+ }
+ value1, err1 := url.QueryUnescape(value)
+ if err1 != nil {
+ if !strings.HasPrefix(err1.Error(), "invalid URL escape") {
+ if err == nil {
+ err = err1
+ }
+ continue
+ }
+ m[key] = append(m[key], value)
+ } else {
+ m[key] = append(m[key], value1)
+ }
+ }
+ return m, err
+}
diff --git a/vendor/git.jiaxianghudong.com/go/utils/lua.go b/vendor/git.jiaxianghudong.com/go/utils/lua.go
index dee2540..3ac1b77 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/lua.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/lua.go
@@ -4,6 +4,7 @@ import (
"strconv"
"strings"
"net/http"
+ "fmt"
)
// buildLuaResponse构造Lua响应结果
@@ -52,6 +53,11 @@ func BuildLuaResponse(m interface{}) string {
vStr = "{" + Substr(vStr, 1) + "}"
case string:
vStr = `"` + strings.Replace(v, `"`, `\"`, -1) + `"`
+ case map[string]interface{}:
+ for ks, vs := range v {
+ vStr += `,` + ks + `=` + `"`+strings.Replace(fmt.Sprint(vs), `"`, `\"`, -1)+`"`
+ }
+ vStr = "{" + Substr(vStr, 1) + "}"
default:
if vv, ok := v.(map[interface{}]interface{}); ok {
vStr = BuildLuaResponse(vv)
diff --git a/vendor/git.jiaxianghudong.com/go/utils/regexp.go b/vendor/git.jiaxianghudong.com/go/utils/regexp.go
index 170776f..bf24cdb 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/regexp.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/regexp.go
@@ -12,12 +12,28 @@ const (
regEmail = `^[a-z0-9]+([._\\-]*[a-z0-9])*@([a-z0-9]+[-a-z0-9]*[a-z0-9]+.){1,63}[a-z0-9]+$`
regPhone = `^((\d3)|(\d{3}\-))?13[0-9]\d{8}|14[0-9]\d{8}|15[0-9]\d{8}|17[0-9]\d{8}|18[0-9]\d{8}`
regUrl = `^((https?|ftp|news|http):\/\/)?([a-z]([a-z0-9\-]*[\.。])+([a-z]{2}|aero|arpa|biz|com|coop|edu|gov|info|int|jobs|mil|museum|name|nato|net|org|pro|travel)|(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))(\/[a-z0-9_\-\.~]+)*(\/([a-z0-9_\-\.]*)(\?[a-z0-9+_\-\.%=&]*)?)?(#[a-z][a-z0-9_]*)?$`
- regGuid = `[a-zA-Z0-9-_]{0,40}`
+ regGuid = `[a-zA-Z0-9-_]{1,40}`
regDescription = `^{0,64}$`
regOutTypeDescription = `^{0,20}$`
regMac = `^{0,40}$`
regTradeNo = `^[a-zA-Z0-9_-]{1,40}$`
regAttach = `^{0,127}$`
+
+ // LT
+ regID = `^[0-9]{0,11}$`
+ regTitle = `^[\s\S]{0,40}$`
+ regIntro = `^[\s\S]{0,120}$`
+ regHash = `^[\S]{0,160}$`
+ reqAuthorName = `^{0,60}$`
+ reqAtType = `^[1-7]{1}$`
+ reqActionListAtType = `^[1,3]{1}$`
+ regContent = `^[\s\S]{0,255}$`
+ reqSrcType = `^[1,2,3]{1}$`
+ reqCommentActionAtType = `^[1,2,6]{1}$`
+ reqUserActionAtType = `^[1,2,3,4]{1}$`
+ reqKeyword = `^.{0,40}$`
+ regReason = `^[\s\S]{0,140}$`
+ regUserSuggestTitle = `^[\s\S]{0,120}$`
)
func CheckString(data string, pat string) bool {
@@ -87,3 +103,73 @@ func CheckTradeNo(tradeNo string) bool {
func CheckAttach(attach string) bool {
return CheckString(attach, regAttach)
}
+
+// 检测资源id
+func CheckID(anchorId string) bool {
+ return CheckString(anchorId, regID)
+}
+
+// 检测资源title
+func CheckTitle(title string) bool {
+ return CheckString(title, regTitle)
+}
+
+// 检测资源intro
+func CheckIntro(intro string) bool {
+ return CheckString(intro, regIntro)
+}
+
+// 检测资源Hash
+func CheckHash(hash string) bool {
+ return CheckString(hash, regHash)
+}
+
+// 检测资源AuthorName
+func CheckAuthorName(authorName string) bool {
+ return CheckString(authorName, reqAuthorName)
+}
+
+// 检测资源atType
+func CheckAtType(atType string) bool {
+ return CheckString(atType, reqAtType)
+}
+
+// 检测资源atType
+func CheckActionListAtType(atType string) bool {
+ return CheckString(atType, reqActionListAtType)
+}
+
+// 检测资源atType
+func CheckContent(content string) bool {
+ return CheckString(content, regContent)
+}
+
+// 检测资源atType
+func CheckSrcType(srcType string) bool {
+ return CheckString(srcType, reqSrcType)
+}
+
+// 检测资源atType
+func CheckCommentActionAtType(atType string) bool {
+ return CheckString(atType, reqCommentActionAtType)
+}
+
+// 检测资源atType
+func CheckUserActionAtType(atType string) bool {
+ return CheckString(atType, reqUserActionAtType)
+}
+
+// 检测资源keyword
+func CheckKeyword(keyword string) bool {
+ return CheckString(keyword, reqKeyword)
+}
+
+// 检测资源Reason
+func CheckReason(reason string) bool {
+ return CheckString(reason, regReason)
+}
+
+// 检测资源SuggestTitle
+func CheckSuggestTitle(title string) bool {
+ return CheckString(title, regUserSuggestTitle)
+}
diff --git a/vendor/git.jiaxianghudong.com/go/utils/utils.go b/vendor/git.jiaxianghudong.com/go/utils/utils.go
index 26df7df..c025279 100644
--- a/vendor/git.jiaxianghudong.com/go/utils/utils.go
+++ b/vendor/git.jiaxianghudong.com/go/utils/utils.go
@@ -4,6 +4,7 @@ import (
"fmt"
"log"
"math/rand"
+ "net"
"net/url"
"reflect"
"strconv"
@@ -355,10 +356,22 @@ func LenSyncMap(m *sync.Map) int {
return length
}
-// 模拟三元操作符
+// Ternary 模拟三元操作符
func Ternary(b bool, trueVal, falseVal interface{}) interface{} {
if b {
return trueVal
}
return falseVal
}
+
+// GetMacAddr 获取本地mac地址
+func GetLocalMacAddr() ([]byte, error) {
+
+ // 获取本机的MAC地址
+ interfaces, err := net.Interfaces()
+ if err != nil || len(interfaces) == 0 {
+ return nil, err
+ }
+ return interfaces[0].HardwareAddr, nil
+
+}
diff --git a/vendor/github.com/denisenkom/go-mssqldb/mssql.go b/vendor/github.com/denisenkom/go-mssqldb/mssql.go
index aa173b3..9065da5 100644
--- a/vendor/github.com/denisenkom/go-mssqldb/mssql.go
+++ b/vendor/github.com/denisenkom/go-mssqldb/mssql.go
@@ -29,24 +29,19 @@ var driverInstanceNoProcess = &Driver{processQueryText: false}
func init() {
sql.Register("mssql", driverInstance)
sql.Register("sqlserver", driverInstanceNoProcess)
- createDialer = func(p *connectParams) dialer {
- return tcpDialer{&net.Dialer{KeepAlive: p.keepAlive}}
+ createDialer = func(p *connectParams) Dialer {
+ return netDialer{&net.Dialer{KeepAlive: p.keepAlive}}
}
}
-// Abstract the dialer for testing and for non-TCP based connections.
-type dialer interface {
- Dial(ctx context.Context, addr string) (net.Conn, error)
-}
+var createDialer func(p *connectParams) Dialer
-var createDialer func(p *connectParams) dialer
-
-type tcpDialer struct {
+type netDialer struct {
nd *net.Dialer
}
-func (d tcpDialer) Dial(ctx context.Context, addr string) (net.Conn, error) {
- return d.nd.DialContext(ctx, "tcp", addr)
+func (d netDialer) DialContext(ctx context.Context, network string, addr string) (net.Conn, error) {
+ return d.nd.DialContext(ctx, network, addr)
}
type Driver struct {
@@ -125,6 +120,21 @@ type Connector struct {
// SessionInitSQL is optional. The session will be reset even if
// SessionInitSQL is empty.
SessionInitSQL string
+
+ // Dialer sets a custom dialer for all network operations.
+ // If Dialer is not set, normal net dialers are used.
+ Dialer Dialer
+}
+
+type Dialer interface {
+ DialContext(ctx context.Context, network string, addr string) (net.Conn, error)
+}
+
+func (c *Connector) getDialer(p *connectParams) Dialer {
+ if c != nil && c.Dialer != nil {
+ return c.Dialer
+ }
+ return createDialer(p)
}
type Conn struct {
@@ -310,12 +320,12 @@ func (d *Driver) open(ctx context.Context, dsn string) (*Conn, error) {
if err != nil {
return nil, err
}
- return d.connect(ctx, params)
+ return d.connect(ctx, nil, params)
}
// connect to the server, using the provided context for dialing only.
-func (d *Driver) connect(ctx context.Context, params connectParams) (*Conn, error) {
- sess, err := connect(ctx, d.log, params)
+func (d *Driver) connect(ctx context.Context, c *Connector, params connectParams) (*Conn, error) {
+ sess, err := connect(ctx, c, d.log, params)
if err != nil {
// main server failed, try fail-over partner
if params.failOverPartner == "" {
@@ -327,7 +337,7 @@ func (d *Driver) connect(ctx context.Context, params connectParams) (*Conn, erro
params.port = params.failOverPort
}
- sess, err = connect(ctx, d.log, params)
+ sess, err = connect(ctx, c, d.log, params)
if err != nil {
// fail-over partner also failed, now fail
return nil, err
@@ -335,6 +345,7 @@ func (d *Driver) connect(ctx context.Context, params connectParams) (*Conn, erro
}
conn := &Conn{
+ connector: c,
sess: sess,
transactionCtx: context.Background(),
processQueryText: d.processQueryText,
diff --git a/vendor/github.com/denisenkom/go-mssqldb/mssql_go110.go b/vendor/github.com/denisenkom/go-mssqldb/mssql_go110.go
index 3d5ab57..833f047 100644
--- a/vendor/github.com/denisenkom/go-mssqldb/mssql_go110.go
+++ b/vendor/github.com/denisenkom/go-mssqldb/mssql_go110.go
@@ -34,10 +34,7 @@ func (c *Conn) ResetSession(ctx context.Context) error {
// Connect to the server and return a TDS connection.
func (c *Connector) Connect(ctx context.Context) (driver.Conn, error) {
- conn, err := c.driver.connect(ctx, c.params)
- if conn != nil {
- conn.connector = c
- }
+ conn, err := c.driver.connect(ctx, c, c.params)
if err == nil {
err = conn.ResetSession(ctx)
}
diff --git a/vendor/github.com/denisenkom/go-mssqldb/tds.go b/vendor/github.com/denisenkom/go-mssqldb/tds.go
index a45711d..16d9ca8 100644
--- a/vendor/github.com/denisenkom/go-mssqldb/tds.go
+++ b/vendor/github.com/denisenkom/go-mssqldb/tds.go
@@ -50,12 +50,11 @@ func parseInstances(msg []byte) map[string]map[string]string {
return results
}
-func getInstances(ctx context.Context, address string) (map[string]map[string]string, error) {
+func getInstances(ctx context.Context, d Dialer, address string) (map[string]map[string]string, error) {
maxTime := 5 * time.Second
- dialer := &net.Dialer{
- Timeout: maxTime,
- }
- conn, err := dialer.DialContext(ctx, "udp", address+":1434")
+ ctx, cancel := context.WithTimeout(ctx, maxTime)
+ defer cancel()
+ conn, err := d.DialContext(ctx, "udp", address+":1434")
if err != nil {
return nil, err
}
@@ -1112,7 +1111,7 @@ type auth interface {
// SQL Server AlwaysOn Availability Group Listeners are bound by DNS to a
// list of IP addresses. So if there is more than one, try them all and
// use the first one that allows a connection.
-func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err error) {
+func dialConnection(ctx context.Context, c *Connector, p connectParams) (conn net.Conn, err error) {
var ips []net.IP
ips, err = net.LookupIP(p.host)
if err != nil {
@@ -1123,9 +1122,9 @@ func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err er
ips = []net.IP{ip}
}
if len(ips) == 1 {
- d := createDialer(&p)
+ d := c.getDialer(&p)
addr := net.JoinHostPort(ips[0].String(), strconv.Itoa(int(p.port)))
- conn, err = d.Dial(ctx, addr)
+ conn, err = d.DialContext(ctx, "tcp", addr)
} else {
//Try Dials in parallel to avoid waiting for timeouts.
@@ -1134,9 +1133,9 @@ func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err er
portStr := strconv.Itoa(int(p.port))
for _, ip := range ips {
go func(ip net.IP) {
- d := createDialer(&p)
+ d := c.getDialer(&p)
addr := net.JoinHostPort(ip.String(), portStr)
- conn, err := d.Dial(ctx, addr)
+ conn, err := d.DialContext(ctx, "tcp", addr)
if err == nil {
connChan <- conn
} else {
@@ -1174,7 +1173,7 @@ func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err er
return conn, err
}
-func connect(ctx context.Context, log optionalLogger, p connectParams) (res *tdsSession, err error) {
+func connect(ctx context.Context, c *Connector, log optionalLogger, p connectParams) (res *tdsSession, err error) {
dialCtx := ctx
if p.dial_timeout > 0 {
var cancel func()
@@ -1184,7 +1183,8 @@ func connect(ctx context.Context, log optionalLogger, p connectParams) (res *tds
// if instance is specified use instance resolution service
if p.instance != "" {
p.instance = strings.ToUpper(p.instance)
- instances, err := getInstances(dialCtx, p.host)
+ d := c.getDialer(&p)
+ instances, err := getInstances(dialCtx, d, p.host)
if err != nil {
f := "Unable to get instances from Sql Server Browser on host %v: %v"
return nil, fmt.Errorf(f, p.host, err.Error())
@@ -1202,7 +1202,7 @@ func connect(ctx context.Context, log optionalLogger, p connectParams) (res *tds
}
initiate_connection:
- conn, err := dialConnection(dialCtx, p)
+ conn, err := dialConnection(dialCtx, c, p)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/disintegration/imaging/resize.go b/vendor/github.com/disintegration/imaging/resize.go
index 0c78e47..97f498a 100644
--- a/vendor/github.com/disintegration/imaging/resize.go
+++ b/vendor/github.com/disintegration/imaging/resize.go
@@ -116,25 +116,23 @@ func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.
for y := range ys {
src.scan(0, y, src.w, y+1, scanLine)
j0 := y * dst.Stride
- for x := range weights {
+ for x := 0; x < width; x++ {
var r, g, b, a float64
for _, w := range weights[x] {
i := w.index * 4
- s := scanLine[i : i+4 : i+4]
- aw := float64(s[3]) * w.weight
- r += float64(s[0]) * aw
- g += float64(s[1]) * aw
- b += float64(s[2]) * aw
+ aw := float64(scanLine[i+3]) * w.weight
+ r += float64(scanLine[i+0]) * aw
+ g += float64(scanLine[i+1]) * aw
+ b += float64(scanLine[i+2]) * aw
a += aw
}
if a != 0 {
aInv := 1 / a
j := j0 + x*4
- d := dst.Pix[j : j+4 : j+4]
- d[0] = clamp(r * aInv)
- d[1] = clamp(g * aInv)
- d[2] = clamp(b * aInv)
- d[3] = clamp(a)
+ dst.Pix[j+0] = clamp(r * aInv)
+ dst.Pix[j+1] = clamp(g * aInv)
+ dst.Pix[j+2] = clamp(b * aInv)
+ dst.Pix[j+3] = clamp(a)
}
}
}
@@ -150,25 +148,23 @@ func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.N
scanLine := make([]uint8, src.h*4)
for x := range xs {
src.scan(x, 0, x+1, src.h, scanLine)
- for y := range weights {
+ for y := 0; y < height; y++ {
var r, g, b, a float64
for _, w := range weights[y] {
i := w.index * 4
- s := scanLine[i : i+4 : i+4]
- aw := float64(s[3]) * w.weight
- r += float64(s[0]) * aw
- g += float64(s[1]) * aw
- b += float64(s[2]) * aw
+ aw := float64(scanLine[i+3]) * w.weight
+ r += float64(scanLine[i+0]) * aw
+ g += float64(scanLine[i+1]) * aw
+ b += float64(scanLine[i+2]) * aw
a += aw
}
if a != 0 {
aInv := 1 / a
j := y*dst.Stride + x*4
- d := dst.Pix[j : j+4 : j+4]
- d[0] = clamp(r * aInv)
- d[1] = clamp(g * aInv)
- d[2] = clamp(b * aInv)
- d[3] = clamp(a)
+ dst.Pix[j+0] = clamp(r * aInv)
+ dst.Pix[j+1] = clamp(g * aInv)
+ dst.Pix[j+2] = clamp(b * aInv)
+ dst.Pix[j+3] = clamp(a)
}
}
}
diff --git a/vendor/github.com/disintegration/imaging/scanner.go b/vendor/github.com/disintegration/imaging/scanner.go
index 37d92ce..c4dbfe1 100644
--- a/vendor/github.com/disintegration/imaging/scanner.go
+++ b/vendor/github.com/disintegration/imaging/scanner.go
@@ -33,23 +33,10 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
size := (x2 - x1) * 4
j := 0
i := y1*img.Stride + x1*4
- if size == 4 {
- for y := y1; y < y2; y++ {
- d := dst[j : j+4 : j+4]
- s := img.Pix[i : i+4 : i+4]
- d[0] = s[0]
- d[1] = s[1]
- d[2] = s[2]
- d[3] = s[3]
- j += size
- i += img.Stride
- }
- } else {
- for y := y1; y < y2; y++ {
- copy(dst[j:j+size], img.Pix[i:i+size])
- j += size
- i += img.Stride
- }
+ for y := y1; y < y2; y++ {
+ copy(dst[j:j+size], img.Pix[i:i+size])
+ j += size
+ i += img.Stride
}
case *image.NRGBA64:
@@ -57,12 +44,10 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*8
for x := x1; x < x2; x++ {
- s := img.Pix[i : i+8 : i+8]
- d := dst[j : j+4 : j+4]
- d[0] = s[0]
- d[1] = s[2]
- d[2] = s[4]
- d[3] = s[6]
+ dst[j+0] = img.Pix[i+0]
+ dst[j+1] = img.Pix[i+2]
+ dst[j+2] = img.Pix[i+4]
+ dst[j+3] = img.Pix[i+6]
j += 4
i += 8
}
@@ -73,31 +58,26 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*4
for x := x1; x < x2; x++ {
- d := dst[j : j+4 : j+4]
a := img.Pix[i+3]
switch a {
case 0:
- d[0] = 0
- d[1] = 0
- d[2] = 0
- d[3] = a
+ dst[j+0] = 0
+ dst[j+1] = 0
+ dst[j+2] = 0
case 0xff:
- s := img.Pix[i : i+4 : i+4]
- d[0] = s[0]
- d[1] = s[1]
- d[2] = s[2]
- d[3] = a
+ dst[j+0] = img.Pix[i+0]
+ dst[j+1] = img.Pix[i+1]
+ dst[j+2] = img.Pix[i+2]
default:
- s := img.Pix[i : i+4 : i+4]
- r16 := uint16(s[0])
- g16 := uint16(s[1])
- b16 := uint16(s[2])
+ r16 := uint16(img.Pix[i+0])
+ g16 := uint16(img.Pix[i+1])
+ b16 := uint16(img.Pix[i+2])
a16 := uint16(a)
- d[0] = uint8(r16 * 0xff / a16)
- d[1] = uint8(g16 * 0xff / a16)
- d[2] = uint8(b16 * 0xff / a16)
- d[3] = a
+ dst[j+0] = uint8(r16 * 0xff / a16)
+ dst[j+1] = uint8(g16 * 0xff / a16)
+ dst[j+2] = uint8(b16 * 0xff / a16)
}
+ dst[j+3] = a
j += 4
i += 4
}
@@ -108,28 +88,26 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
for y := y1; y < y2; y++ {
i := y*img.Stride + x1*8
for x := x1; x < x2; x++ {
- s := img.Pix[i : i+8 : i+8]
- d := dst[j : j+4 : j+4]
- a := s[6]
+ a := img.Pix[i+6]
switch a {
case 0:
- d[0] = 0
- d[1] = 0
- d[2] = 0
+ dst[j+0] = 0
+ dst[j+1] = 0
+ dst[j+2] = 0
case 0xff:
- d[0] = s[0]
- d[1] = s[2]
- d[2] = s[4]
+ dst[j+0] = img.Pix[i+0]
+ dst[j+1] = img.Pix[i+2]
+ dst[j+2] = img.Pix[i+4]
default:
- r32 := uint32(s[0])<<8 | uint32(s[1])
- g32 := uint32(s[2])<<8 | uint32(s[3])
- b32 := uint32(s[4])<<8 | uint32(s[5])
- a32 := uint32(s[6])<<8 | uint32(s[7])
- d[0] = uint8((r32 * 0xffff / a32) >> 8)
- d[1] = uint8((g32 * 0xffff / a32) >> 8)
- d[2] = uint8((b32 * 0xffff / a32) >> 8)
+ r32 := uint32(img.Pix[i+0])<<8 | uint32(img.Pix[i+1])
+ g32 := uint32(img.Pix[i+2])<<8 | uint32(img.Pix[i+3])
+ b32 := uint32(img.Pix[i+4])<<8 | uint32(img.Pix[i+5])
+ a32 := uint32(img.Pix[i+6])<<8 | uint32(img.Pix[i+7])
+ dst[j+0] = uint8((r32 * 0xffff / a32) >> 8)
+ dst[j+1] = uint8((g32 * 0xffff / a32) >> 8)
+ dst[j+2] = uint8((b32 * 0xffff / a32) >> 8)
}
- d[3] = a
+ dst[j+3] = a
j += 4
i += 8
}
@@ -141,11 +119,10 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
i := y*img.Stride + x1
for x := x1; x < x2; x++ {
c := img.Pix[i]
- d := dst[j : j+4 : j+4]
- d[0] = c
- d[1] = c
- d[2] = c
- d[3] = 0xff
+ dst[j+0] = c
+ dst[j+1] = c
+ dst[j+2] = c
+ dst[j+3] = 0xff
j += 4
i++
}
@@ -157,11 +134,10 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
i := y*img.Stride + x1*2
for x := x1; x < x2; x++ {
c := img.Pix[i]
- d := dst[j : j+4 : j+4]
- d[0] = c
- d[1] = c
- d[2] = c
- d[3] = 0xff
+ dst[j+0] = c
+ dst[j+1] = c
+ dst[j+2] = c
+ dst[j+3] = 0xff
j += 4
i += 2
}
@@ -173,61 +149,52 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
x2 += img.Rect.Min.X
y1 += img.Rect.Min.Y
y2 += img.Rect.Min.Y
-
- hy := img.Rect.Min.Y / 2
- hx := img.Rect.Min.X / 2
for y := y1; y < y2; y++ {
iy := (y-img.Rect.Min.Y)*img.YStride + (x1 - img.Rect.Min.X)
-
- var yBase int
- switch img.SubsampleRatio {
- case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio422:
- yBase = (y - img.Rect.Min.Y) * img.CStride
- case image.YCbCrSubsampleRatio420, image.YCbCrSubsampleRatio440:
- yBase = (y/2 - hy) * img.CStride
- }
-
for x := x1; x < x2; x++ {
var ic int
switch img.SubsampleRatio {
- case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio440:
- ic = yBase + (x - img.Rect.Min.X)
- case image.YCbCrSubsampleRatio422, image.YCbCrSubsampleRatio420:
- ic = yBase + (x/2 - hx)
+ case image.YCbCrSubsampleRatio444:
+ ic = (y-img.Rect.Min.Y)*img.CStride + (x - img.Rect.Min.X)
+ case image.YCbCrSubsampleRatio422:
+ ic = (y-img.Rect.Min.Y)*img.CStride + (x/2 - img.Rect.Min.X/2)
+ case image.YCbCrSubsampleRatio420:
+ ic = (y/2-img.Rect.Min.Y/2)*img.CStride + (x/2 - img.Rect.Min.X/2)
+ case image.YCbCrSubsampleRatio440:
+ ic = (y/2-img.Rect.Min.Y/2)*img.CStride + (x - img.Rect.Min.X)
default:
ic = img.COffset(x, y)
}
- yy1 := int32(img.Y[iy]) * 0x10101
- cb1 := int32(img.Cb[ic]) - 128
- cr1 := int32(img.Cr[ic]) - 128
+ yy := int(img.Y[iy])
+ cb := int(img.Cb[ic]) - 128
+ cr := int(img.Cr[ic]) - 128
- r := yy1 + 91881*cr1
- if uint32(r)&0xff000000 == 0 {
- r >>= 16
- } else {
- r = ^(r >> 31)
+ r := (yy<<16 + 91881*cr + 1<<15) >> 16
+ if r > 0xff {
+ r = 0xff
+ } else if r < 0 {
+ r = 0
}
- g := yy1 - 22554*cb1 - 46802*cr1
- if uint32(g)&0xff000000 == 0 {
- g >>= 16
- } else {
- g = ^(g >> 31)
+ g := (yy<<16 - 22554*cb - 46802*cr + 1<<15) >> 16
+ if g > 0xff {
+ g = 0xff
+ } else if g < 0 {
+ g = 0
}
- b := yy1 + 116130*cb1
- if uint32(b)&0xff000000 == 0 {
- b >>= 16
- } else {
- b = ^(b >> 31)
+ b := (yy<<16 + 116130*cb + 1<<15) >> 16
+ if b > 0xff {
+ b = 0xff
+ } else if b < 0 {
+ b = 0
}
- d := dst[j : j+4 : j+4]
- d[0] = uint8(r)
- d[1] = uint8(g)
- d[2] = uint8(b)
- d[3] = 0xff
+ dst[j+0] = uint8(r)
+ dst[j+1] = uint8(g)
+ dst[j+2] = uint8(b)
+ dst[j+3] = 0xff
iy++
j += 4
@@ -240,11 +207,10 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
i := y*img.Stride + x1
for x := x1; x < x2; x++ {
c := s.palette[img.Pix[i]]
- d := dst[j : j+4 : j+4]
- d[0] = c.R
- d[1] = c.G
- d[2] = c.B
- d[3] = c.A
+ dst[j+0] = c.R
+ dst[j+1] = c.G
+ dst[j+2] = c.B
+ dst[j+3] = c.A
j += 4
i++
}
@@ -260,23 +226,22 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
for y := y1; y < y2; y++ {
for x := x1; x < x2; x++ {
r16, g16, b16, a16 := s.image.At(x, y).RGBA()
- d := dst[j : j+4 : j+4]
switch a16 {
case 0xffff:
- d[0] = uint8(r16 >> 8)
- d[1] = uint8(g16 >> 8)
- d[2] = uint8(b16 >> 8)
- d[3] = 0xff
+ dst[j+0] = uint8(r16 >> 8)
+ dst[j+1] = uint8(g16 >> 8)
+ dst[j+2] = uint8(b16 >> 8)
+ dst[j+3] = 0xff
case 0:
- d[0] = 0
- d[1] = 0
- d[2] = 0
- d[3] = 0
+ dst[j+0] = 0
+ dst[j+1] = 0
+ dst[j+2] = 0
+ dst[j+3] = 0
default:
- d[0] = uint8(((r16 * 0xffff) / a16) >> 8)
- d[1] = uint8(((g16 * 0xffff) / a16) >> 8)
- d[2] = uint8(((b16 * 0xffff) / a16) >> 8)
- d[3] = uint8(a16 >> 8)
+ dst[j+0] = uint8(((r16 * 0xffff) / a16) >> 8)
+ dst[j+1] = uint8(((g16 * 0xffff) / a16) >> 8)
+ dst[j+2] = uint8(((b16 * 0xffff) / a16) >> 8)
+ dst[j+3] = uint8(a16 >> 8)
}
j += 4
}
diff --git a/vendor/github.com/gin-gonic/gin/Makefile b/vendor/github.com/gin-gonic/gin/Makefile
index b0d2e24..51b9969 100644
--- a/vendor/github.com/gin-gonic/gin/Makefile
+++ b/vendor/github.com/gin-gonic/gin/Makefile
@@ -1,9 +1,7 @@
-GO ?= go
GOFMT ?= gofmt "-s"
-PACKAGES ?= $(shell $(GO) list ./... | grep -v /vendor/)
-VETPACKAGES ?= $(shell $(GO) list ./... | grep -v /vendor/ | grep -v /examples/)
+PACKAGES ?= $(shell go list ./... | grep -v /vendor/)
+VETPACKAGES ?= $(shell go list ./... | grep -v /vendor/ | grep -v /examples/)
GOFILES := $(shell find . -name "*.go" -type f -not -path "./vendor/*")
-TESTFOLDER := $(shell $(GO) list ./... | grep -E 'gin$$|binding$$|render$$' | grep -v examples)
all: install
@@ -12,19 +10,7 @@ install: deps
.PHONY: test
test:
- echo "mode: count" > coverage.out
- for d in $(TESTFOLDER); do \
- $(GO) test -v -covermode=count -coverprofile=profile.out $$d > tmp.out; \
- cat tmp.out; \
- if grep -q "^--- FAIL" tmp.out; then \
- rm tmp.out; \
- exit 1;\
- fi; \
- if [ -f profile.out ]; then \
- cat profile.out | grep -v "mode:" >> coverage.out; \
- rm profile.out; \
- fi; \
- done
+ sh coverage.sh
.PHONY: fmt
fmt:
@@ -32,6 +18,7 @@ fmt:
.PHONY: fmt-check
fmt-check:
+ # get all go files and run go fmt on them
@diff=$$($(GOFMT) -d $(GOFILES)); \
if [ -n "$$diff" ]; then \
echo "Please run 'make fmt' and commit the result:"; \
@@ -40,14 +27,14 @@ fmt-check:
fi;
vet:
- $(GO) vet $(VETPACKAGES)
+ go vet $(VETPACKAGES)
deps:
@hash govendor > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u github.com/kardianos/govendor; \
+ go get -u github.com/kardianos/govendor; \
fi
@hash embedmd > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u github.com/campoy/embedmd; \
+ go get -u github.com/campoy/embedmd; \
fi
embedmd:
@@ -56,26 +43,20 @@ embedmd:
.PHONY: lint
lint:
@hash golint > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u golang.org/x/lint/golint; \
+ go get -u github.com/golang/lint/golint; \
fi
for PKG in $(PACKAGES); do golint -set_exit_status $$PKG || exit 1; done;
.PHONY: misspell-check
misspell-check:
@hash misspell > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u github.com/client9/misspell/cmd/misspell; \
+ go get -u github.com/client9/misspell/cmd/misspell; \
fi
misspell -error $(GOFILES)
.PHONY: misspell
misspell:
@hash misspell > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u github.com/client9/misspell/cmd/misspell; \
+ go get -u github.com/client9/misspell/cmd/misspell; \
fi
misspell -w $(GOFILES)
-
-.PHONY: tools
-tools:
- go install golang.org/x/lint/golint; \
- go install github.com/client9/misspell/cmd/misspell; \
- go install github.com/campoy/embedmd;
diff --git a/vendor/github.com/gin-gonic/gin/README.md b/vendor/github.com/gin-gonic/gin/README.md
index e7b92b2..28598ba 100644
--- a/vendor/github.com/gin-gonic/gin/README.md
+++ b/vendor/github.com/gin-gonic/gin/README.md
@@ -9,7 +9,6 @@
[](https://gitter.im/gin-gonic/gin?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[](https://sourcegraph.com/github.com/gin-gonic/gin?badge)
[](https://www.codetriage.com/gin-gonic/gin)
-[](https://github.com/gin-gonic/gin/releases)
Gin is a web framework written in Go (Golang). It features a martini-like API with much better performance, up to 40 times faster thanks to [httprouter](https://github.com/julienschmidt/httprouter). If you need performance and good productivity, you will love Gin.
@@ -39,10 +38,9 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi
- [Custom Validators](#custom-validators)
- [Only Bind Query String](#only-bind-query-string)
- [Bind Query String or Post Data](#bind-query-string-or-post-data)
- - [Bind Uri](#bind-uri)
- [Bind HTML checkboxes](#bind-html-checkboxes)
- [Multipart/Urlencoded binding](#multiparturlencoded-binding)
- - [XML, JSON, YAML and ProtoBuf rendering](#xml-json-yaml-and-protobuf-rendering)
+ - [XML, JSON and YAML rendering](#xml-json-and-yaml-rendering)
- [JSONP rendering](#jsonp)
- [Serving static files](#serving-static-files)
- [Serving data from reader](#serving-data-from-reader)
@@ -60,10 +58,8 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi
- [Bind form-data request with custom struct](#bind-form-data-request-with-custom-struct)
- [Try to bind body into different structs](#try-to-bind-body-into-different-structs)
- [http2 server push](#http2-server-push)
- - [Define format for the log of routes](#define-format-for-the-log-of-routes)
- - [Set and get a cookie](#set-and-get-a-cookie)
- [Testing](#testing)
-- [Users](#users)
+- [Users](#users--)
## Installation
@@ -104,7 +100,7 @@ $ mkdir -p $GOPATH/src/github.com/myusername/project && cd "$_"
```sh
$ govendor init
-$ govendor fetch github.com/gin-gonic/gin@v1.3
+$ govendor fetch github.com/gin-gonic/gin@v1.2
```
4. Copy a starting template inside your project
@@ -202,7 +198,7 @@ BenchmarkVulcan_GithubAll | 5000 | 394253 | 19894
## Build with [jsoniter](https://github.com/json-iterator/go)
-Gin uses `encoding/json` as default json package but you can change to [jsoniter](https://github.com/json-iterator/go) by build from other tags.
+Gin use `encoding/json` as default json package but you can change to [jsoniter](https://github.com/json-iterator/go) by build from other tags.
```sh
$ go build -tags=jsoniter .
@@ -530,7 +526,7 @@ func main() {
### Model binding and validation
-To bind a request body into a type, use model binding. We currently support binding of JSON, XML, YAML and standard form values (foo=bar&boo=baz).
+To bind a request body into a type, use model binding. We currently support binding of JSON, XML and standard form values (foo=bar&boo=baz).
Gin uses [**go-playground/validator.v8**](https://github.com/go-playground/validator) for validation. Check the full docs on tags usage [here](http://godoc.org/gopkg.in/go-playground/validator.v8#hdr-Baked_In_Validators_and_Tags).
@@ -538,10 +534,10 @@ Note that you need to set the corresponding binding tag on all fields you want t
Also, Gin provides two sets of methods for binding:
- **Type** - Must bind
- - **Methods** - `Bind`, `BindJSON`, `BindXML`, `BindQuery`, `BindYAML`
+ - **Methods** - `Bind`, `BindJSON`, `BindQuery`
- **Behavior** - These methods use `MustBindWith` under the hood. If there is a binding error, the request is aborted with `c.AbortWithError(400, err).SetType(ErrorTypeBind)`. This sets the response status code to 400 and the `Content-Type` header is set to `text/plain; charset=utf-8`. Note that if you try to set the response code after this, it will result in a warning `[GIN-debug] [WARNING] Headers were already written. Wanted to override status code 400 with 422`. If you wish to have greater control over the behavior, consider using the `ShouldBind` equivalent method.
- **Type** - Should bind
- - **Methods** - `ShouldBind`, `ShouldBindJSON`, `ShouldBindXML`, `ShouldBindQuery`, `ShouldBindYAML`
+ - **Methods** - `ShouldBind`, `ShouldBindJSON`, `ShouldBindQuery`
- **Behavior** - These methods use `ShouldBindWith` under the hood. If there is a binding error, the error is returned and it is the developer's responsibility to handle the request and error appropriately.
When using the Bind-method, Gin tries to infer the binder depending on the Content-Type header. If you are sure what you are binding, you can use `MustBindWith` or `ShouldBindWith`.
@@ -551,8 +547,8 @@ You can also specify that specific fields are required. If a field is decorated
```go
// Binding from JSON
type Login struct {
- User string `form:"user" json:"user" xml:"user" binding:"required"`
- Password string `form:"password" json:"password" xml:"password" binding:"required"`
+ User string `form:"user" json:"user" binding:"required"`
+ Password string `form:"password" json:"password" binding:"required"`
}
func main() {
@@ -561,55 +557,30 @@ func main() {
// Example for binding JSON ({"user": "manu", "password": "123"})
router.POST("/loginJSON", func(c *gin.Context) {
var json Login
- if err := c.ShouldBindJSON(&json); err != nil {
+ if err := c.ShouldBindJSON(&json); err == nil {
+ if json.User == "manu" && json.Password == "123" {
+ c.JSON(http.StatusOK, gin.H{"status": "you are logged in"})
+ } else {
+ c.JSON(http.StatusUnauthorized, gin.H{"status": "unauthorized"})
+ }
+ } else {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
}
-
- if json.User != "manu" || json.Password != "123" {
- c.JSON(http.StatusUnauthorized, gin.H{"status": "unauthorized"})
- return
- }
-
- c.JSON(http.StatusOK, gin.H{"status": "you are logged in"})
- })
-
- // Example for binding XML (
- //
- //
- // user
- // 123
- // )
- router.POST("/loginXML", func(c *gin.Context) {
- var xml Login
- if err := c.ShouldBindXML(&xml); err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
-
- if xml.User != "manu" || xml.Password != "123" {
- c.JSON(http.StatusUnauthorized, gin.H{"status": "unauthorized"})
- return
- }
-
- c.JSON(http.StatusOK, gin.H{"status": "you are logged in"})
})
// Example for binding a HTML form (user=manu&password=123)
router.POST("/loginForm", func(c *gin.Context) {
var form Login
// This will infer what binder to use depending on the content-type header.
- if err := c.ShouldBind(&form); err != nil {
+ if err := c.ShouldBind(&form); err == nil {
+ if form.User == "manu" && form.Password == "123" {
+ c.JSON(http.StatusOK, gin.H{"status": "you are logged in"})
+ } else {
+ c.JSON(http.StatusUnauthorized, gin.H{"status": "unauthorized"})
+ }
+ } else {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
}
-
- if form.User != "manu" || form.Password != "123" {
- c.JSON(http.StatusUnauthorized, gin.H{"status": "unauthorized"})
- return
- }
-
- c.JSON(http.StatusOK, gin.H{"status": "you are logged in"})
})
// Listen and serve on 0.0.0.0:8080
@@ -661,7 +632,6 @@ import (
"gopkg.in/go-playground/validator.v8"
)
-// Booking contains binded and validated data.
type Booking struct {
CheckIn time.Time `form:"check_in" binding:"required,bookabledate" time_format:"2006-01-02"`
CheckOut time.Time `form:"check_out" binding:"required,gtfield=CheckIn" time_format:"2006-01-02"`
@@ -709,7 +679,7 @@ $ curl "localhost:8085/bookable?check_in=2018-03-08&check_out=2018-03-09"
{"error":"Key: 'Booking.CheckIn' Error:Field validation for 'CheckIn' failed on the 'bookabledate' tag"}
```
-[Struct level validations](https://github.com/go-playground/validator/releases/tag/v8.7) can also be registered this way.
+[Struct level validations](https://github.com/go-playground/validator/releases/tag/v8.7) can also be registed this way.
See the [struct-lvl-validation example](examples/struct-lvl-validations) to learn more.
### Only Bind Query String
@@ -755,12 +725,9 @@ See the [detail information](https://github.com/gin-gonic/gin/issues/742#issueco
```go
package main
-import (
- "log"
- "time"
-
- "github.com/gin-gonic/gin"
-)
+import "log"
+import "github.com/gin-gonic/gin"
+import "time"
type Person struct {
Name string `form:"name"`
@@ -794,40 +761,6 @@ Test it with:
$ curl -X GET "localhost:8085/testing?name=appleboy&address=xyz&birthday=1992-03-15"
```
-### Bind Uri
-
-See the [detail information](https://github.com/gin-gonic/gin/issues/846).
-
-```go
-package main
-
-import "github.com/gin-gonic/gin"
-
-type Person struct {
- ID string `uri:"id" binding:"required,uuid"`
- Name string `uri:"name" binding:"required"`
-}
-
-func main() {
- route := gin.Default()
- route.GET("/:name/:id", func(c *gin.Context) {
- var person Person
- if err := c.ShouldBindUri(&person); err != nil {
- c.JSON(400, gin.H{"msg": err})
- return
- }
- c.JSON(200, gin.H{"name": person.Name, "uuid": person.ID})
- })
- route.Run(":8088")
-}
-```
-
-Test it with:
-```sh
-$ curl -v localhost:8088/thinkerou/987fbc97-4bed-5078-9f07-9141ba07c9f3
-$ curl -v localhost:8088/thinkerou/not-uuid
-```
-
### Bind HTML checkboxes
See the [detail information](https://github.com/gin-gonic/gin/issues/129#issuecomment-124260092)
@@ -859,12 +792,12 @@ form.html
```
@@ -913,7 +846,7 @@ Test it with:
$ curl -v --form user=user --form password=password http://localhost:8080/login
```
-### XML, JSON, YAML and ProtoBuf rendering
+### XML, JSON and YAML rendering
```go
func main() {
@@ -947,19 +880,6 @@ func main() {
c.YAML(http.StatusOK, gin.H{"message": "hey", "status": http.StatusOK})
})
- r.GET("/someProtoBuf", func(c *gin.Context) {
- reps := []int64{int64(1), int64(2)}
- label := "test"
- // The specific definition of protobuf is written in the testdata/protoexample file.
- data := &protoexample.Test{
- Label: &label,
- Reps: reps,
- }
- // Note that data becomes binary data in the response
- // Will output protoexample.Test protobuf serialized data
- c.ProtoBuf(http.StatusOK, data)
- })
-
// Listen and serve on 0.0.0.0:8080
r.Run(":8080")
}
@@ -1033,34 +953,6 @@ func main() {
}
```
-#### PureJSON
-
-Normally, JSON replaces special HTML characters with their unicode entities, e.g. `<` becomes `\u003c`. If you want to encode such characters literally, you can use PureJSON instead.
-This feature is unavailable in Go 1.6 and lower.
-
-```go
-func main() {
- r := gin.Default()
-
- // Serves unicode entities
- r.GET("/json", func(c *gin.Context) {
- c.JSON(200, gin.H{
- "html": "Hello, world!",
- })
- })
-
- // Serves literal characters
- r.GET("/purejson", func(c *gin.Context) {
- c.PureJSON(200, gin.H{
- "html": "Hello, world!",
- })
- })
-
- // listen and serve on 0.0.0.0:8080
- r.Run(":8080")
-}
-```
-
### Serving static files
```go
@@ -1195,7 +1087,7 @@ You may use custom delims
```go
r := gin.Default()
r.Delims("{[{", "}]}")
- r.LoadHTMLGlob("/path/to/templates")
+ r.LoadHTMLGlob("/path/to/templates"))
```
#### Custom Template Funcs
@@ -1757,11 +1649,11 @@ type StructX struct {
}
type StructY struct {
- Y StructX `form:"name_y"` // HERE have form
+ Y StructX `form:"name_y"` // HERE hava form
}
type StructZ struct {
- Z *StructZ `form:"name_z"` // HERE have form
+ Z *StructZ `form:"name_z"` // HERE hava form
}
```
@@ -1874,78 +1766,6 @@ func main() {
}
```
-### Define format for the log of routes
-
-The default log of routes is:
-```
-[GIN-debug] POST /foo --> main.main.func1 (3 handlers)
-[GIN-debug] GET /bar --> main.main.func2 (3 handlers)
-[GIN-debug] GET /status --> main.main.func3 (3 handlers)
-```
-
-If you want to log this information in given format (e.g. JSON, key values or something else), then you can define this format with `gin.DebugPrintRouteFunc`.
-In the example below, we log all routes with standard log package but you can use another log tools that suits of your needs.
-```go
-import (
- "log"
- "net/http"
-
- "github.com/gin-gonic/gin"
-)
-
-func main() {
- r := gin.Default()
- gin.DebugPrintRouteFunc = func(httpMethod, absolutePath, handlerName string, nuHandlers int) {
- log.Printf("endpoint %v %v %v %v\n", httpMethod, absolutePath, handlerName, nuHandlers)
- }
-
- r.POST("/foo", func(c *gin.Context) {
- c.JSON(http.StatusOK, "foo")
- })
-
- r.GET("/bar", func(c *gin.Context) {
- c.JSON(http.StatusOK, "bar")
- })
-
- r.GET("/status", func(c *gin.Context) {
- c.JSON(http.StatusOK, "ok")
- })
-
- // Listen and Server in http://0.0.0.0:8080
- r.Run()
-}
-```
-
-### Set and get a cookie
-
-```go
-import (
- "fmt"
-
- "github.com/gin-gonic/gin"
-)
-
-func main() {
-
- router := gin.Default()
-
- router.GET("/cookie", func(c *gin.Context) {
-
- cookie, err := c.Cookie("gin_cookie")
-
- if err != nil {
- cookie = "NotSet"
- c.SetCookie("gin_cookie", "test", 3600, "/", "localhost", false, true)
- }
-
- fmt.Printf("Cookie value: %s \n", cookie)
- })
-
- router.Run()
-}
-```
-
-
## Testing
The `net/http/httptest` package is preferable way for HTTP testing.
@@ -1996,9 +1816,5 @@ func TestPingRoute(t *testing.T) {
Awesome project lists using [Gin](https://github.com/gin-gonic/gin) web framework.
-* [drone](https://github.com/drone/drone): Drone is a Continuous Delivery platform built on Docker, written in Go.
+* [drone](https://github.com/drone/drone): Drone is a Continuous Delivery platform built on Docker, written in Go
* [gorush](https://github.com/appleboy/gorush): A push notification server written in Go.
-* [fnproject](https://github.com/fnproject/fn): The container native, cloud agnostic serverless platform.
-* [photoprism](https://github.com/photoprism/photoprism): Personal photo management powered by Go and Google TensorFlow.
-* [krakend](https://github.com/devopsfaith/krakend): Ultra performant API Gateway with middlewares.
-* [picfit](https://github.com/thoas/picfit): An image resizing server written in Go.
diff --git a/vendor/github.com/gin-gonic/gin/binding/binding.go b/vendor/github.com/gin-gonic/gin/binding/binding.go
index 26d71c9..3a2aad9 100644
--- a/vendor/github.com/gin-gonic/gin/binding/binding.go
+++ b/vendor/github.com/gin-gonic/gin/binding/binding.go
@@ -18,7 +18,6 @@ const (
MIMEPROTOBUF = "application/x-protobuf"
MIMEMSGPACK = "application/x-msgpack"
MIMEMSGPACK2 = "application/msgpack"
- MIMEYAML = "application/x-yaml"
)
// Binding describes the interface which needs to be implemented for binding the
@@ -36,16 +35,9 @@ type BindingBody interface {
BindBody([]byte, interface{}) error
}
-// BindingUri adds BindUri method to Binding. BindUri is similar with Bind,
-// but it read the Params.
-type BindingUri interface {
- Name() string
- BindUri(map[string][]string, interface{}) error
-}
-
// StructValidator is the minimal interface which needs to be implemented in
// order for it to be used as the validator engine for ensuring the correctness
-// of the request. Gin provides a default implementation for this using
+// of the reqest. Gin provides a default implementation for this using
// https://github.com/go-playground/validator/tree/v8.18.2.
type StructValidator interface {
// ValidateStruct can receive any kind of type and it should never panic, even if the configuration is not right.
@@ -76,8 +68,6 @@ var (
FormMultipart = formMultipartBinding{}
ProtoBuf = protobufBinding{}
MsgPack = msgpackBinding{}
- YAML = yamlBinding{}
- Uri = uriBinding{}
)
// Default returns the appropriate Binding instance based on the HTTP method
@@ -96,8 +86,6 @@ func Default(method, contentType string) Binding {
return ProtoBuf
case MIMEMSGPACK, MIMEMSGPACK2:
return MsgPack
- case MIMEYAML:
- return YAML
default: //case MIMEPOSTForm, MIMEMultipartPOSTForm:
return Form
}
diff --git a/vendor/github.com/gin-gonic/gin/binding/form_mapping.go b/vendor/github.com/gin-gonic/gin/binding/form_mapping.go
index d893c21..3f6b9bf 100644
--- a/vendor/github.com/gin-gonic/gin/binding/form_mapping.go
+++ b/vendor/github.com/gin-gonic/gin/binding/form_mapping.go
@@ -12,15 +12,7 @@ import (
"time"
)
-func mapUri(ptr interface{}, m map[string][]string) error {
- return mapFormByTag(ptr, m, "uri")
-}
-
func mapForm(ptr interface{}, form map[string][]string) error {
- return mapFormByTag(ptr, form, "form")
-}
-
-func mapFormByTag(ptr interface{}, form map[string][]string, tag string) error {
typ := reflect.TypeOf(ptr).Elem()
val := reflect.ValueOf(ptr).Elem()
for i := 0; i < typ.NumField(); i++ {
@@ -31,7 +23,7 @@ func mapFormByTag(ptr interface{}, form map[string][]string, tag string) error {
}
structFieldKind := structField.Kind()
- inputFieldName := typeField.Tag.Get(tag)
+ inputFieldName := typeField.Tag.Get("form")
inputFieldNameList := strings.Split(inputFieldName, ",")
inputFieldName = inputFieldNameList[0]
var defaultValue string
@@ -82,16 +74,16 @@ func mapFormByTag(ptr interface{}, form map[string][]string, tag string) error {
}
}
val.Field(i).Set(slice)
- continue
- }
- if _, isTime := structField.Interface().(time.Time); isTime {
- if err := setTimeField(inputValue[0], typeField, structField); err != nil {
+ } else {
+ if _, isTime := structField.Interface().(time.Time); isTime {
+ if err := setTimeField(inputValue[0], typeField, structField); err != nil {
+ return err
+ }
+ continue
+ }
+ if err := setWithProperType(typeField.Type.Kind(), inputValue[0], structField); err != nil {
return err
}
- continue
- }
- if err := setWithProperType(typeField.Type.Kind(), inputValue[0], structField); err != nil {
- return err
}
}
return nil
@@ -186,7 +178,7 @@ func setFloatField(val string, bitSize int, field reflect.Value) error {
func setTimeField(val string, structField reflect.StructField, value reflect.Value) error {
timeFormat := structField.Tag.Get("time_format")
if timeFormat == "" {
- timeFormat = time.RFC3339
+ return errors.New("Blank time format")
}
if val == "" {
diff --git a/vendor/github.com/gin-gonic/gin/binding/json.go b/vendor/github.com/gin-gonic/gin/binding/json.go
index f968161..fea17bb 100644
--- a/vendor/github.com/gin-gonic/gin/binding/json.go
+++ b/vendor/github.com/gin-gonic/gin/binding/json.go
@@ -6,11 +6,10 @@ package binding
import (
"bytes"
- "fmt"
"io"
"net/http"
- "github.com/gin-gonic/gin/internal/json"
+ "github.com/gin-gonic/gin/json"
)
// EnableDecoderUseNumber is used to call the UseNumber method on the JSON
@@ -25,9 +24,6 @@ func (jsonBinding) Name() string {
}
func (jsonBinding) Bind(req *http.Request, obj interface{}) error {
- if req == nil || req.Body == nil {
- return fmt.Errorf("invalid request")
- }
return decodeJSON(req.Body, obj)
}
diff --git a/vendor/github.com/gin-gonic/gin/binding/protobuf.go b/vendor/github.com/gin-gonic/gin/binding/protobuf.go
index f9ece92..540e9c1 100644
--- a/vendor/github.com/gin-gonic/gin/binding/protobuf.go
+++ b/vendor/github.com/gin-gonic/gin/binding/protobuf.go
@@ -29,7 +29,7 @@ func (protobufBinding) BindBody(body []byte, obj interface{}) error {
if err := proto.Unmarshal(body, obj.(proto.Message)); err != nil {
return err
}
- // Here it's same to return validate(obj), but util now we can't add
+ // Here it's same to return validate(obj), but util now we cann't add
// `binding:""` to the struct which automatically generate by gen-proto
return nil
// return validate(obj)
diff --git a/vendor/github.com/gin-gonic/gin/context.go b/vendor/github.com/gin-gonic/gin/context.go
index 478e8c0..724ded7 100644
--- a/vendor/github.com/gin-gonic/gin/context.go
+++ b/vendor/github.com/gin-gonic/gin/context.go
@@ -31,7 +31,6 @@ const (
MIMEPlain = binding.MIMEPlain
MIMEPOSTForm = binding.MIMEPOSTForm
MIMEMultipartPOSTForm = binding.MIMEMultipartPOSTForm
- MIMEYAML = binding.MIMEYAML
BodyBytesKey = "_gin-gonic/gin/bodybyteskey"
)
@@ -415,6 +414,7 @@ func (c *Context) PostFormArray(key string) []string {
// a boolean value whether at least one value exists for the given key.
func (c *Context) GetPostFormArray(key string) ([]string, bool) {
req := c.Request
+ req.ParseForm()
req.ParseMultipartForm(c.engine.MaxMultipartMemory)
if values := req.PostForm[key]; len(values) > 0 {
return values, true
@@ -437,6 +437,7 @@ func (c *Context) PostFormMap(key string) map[string]string {
// whether at least one value exists for the given key.
func (c *Context) GetPostFormMap(key string) (map[string]string, bool) {
req := c.Request
+ req.ParseForm()
req.ParseMultipartForm(c.engine.MaxMultipartMemory)
dicts, exist := c.get(req.PostForm, key)
@@ -464,11 +465,6 @@ func (c *Context) get(m map[string][]string, key string) (map[string]string, boo
// FormFile returns the first file for the provided form key.
func (c *Context) FormFile(name string) (*multipart.FileHeader, error) {
- if c.Request.MultipartForm == nil {
- if err := c.Request.ParseMultipartForm(c.engine.MaxMultipartMemory); err != nil {
- return nil, err
- }
- }
_, fh, err := c.Request.FormFile(name)
return fh, err
}
@@ -515,23 +511,13 @@ func (c *Context) BindJSON(obj interface{}) error {
return c.MustBindWith(obj, binding.JSON)
}
-// BindXML is a shortcut for c.MustBindWith(obj, binding.BindXML).
-func (c *Context) BindXML(obj interface{}) error {
- return c.MustBindWith(obj, binding.XML)
-}
-
// BindQuery is a shortcut for c.MustBindWith(obj, binding.Query).
func (c *Context) BindQuery(obj interface{}) error {
return c.MustBindWith(obj, binding.Query)
}
-// BindYAML is a shortcut for c.MustBindWith(obj, binding.YAML).
-func (c *Context) BindYAML(obj interface{}) error {
- return c.MustBindWith(obj, binding.YAML)
-}
-
// MustBindWith binds the passed struct pointer using the specified binding engine.
-// It will abort the request with HTTP 400 if any error occurs.
+// It will abort the request with HTTP 400 if any error ocurrs.
// See the binding package.
func (c *Context) MustBindWith(obj interface{}, b binding.Binding) (err error) {
if err = c.ShouldBindWith(obj, b); err != nil {
@@ -559,30 +545,11 @@ func (c *Context) ShouldBindJSON(obj interface{}) error {
return c.ShouldBindWith(obj, binding.JSON)
}
-// ShouldBindXML is a shortcut for c.ShouldBindWith(obj, binding.XML).
-func (c *Context) ShouldBindXML(obj interface{}) error {
- return c.ShouldBindWith(obj, binding.XML)
-}
-
// ShouldBindQuery is a shortcut for c.ShouldBindWith(obj, binding.Query).
func (c *Context) ShouldBindQuery(obj interface{}) error {
return c.ShouldBindWith(obj, binding.Query)
}
-// ShouldBindYAML is a shortcut for c.ShouldBindWith(obj, binding.YAML).
-func (c *Context) ShouldBindYAML(obj interface{}) error {
- return c.ShouldBindWith(obj, binding.YAML)
-}
-
-// ShouldBindUri binds the passed struct pointer using the specified binding engine.
-func (c *Context) ShouldBindUri(obj interface{}) error {
- m := make(map[string][]string)
- for _, v := range c.Params {
- m[v.Key] = []string{v.Value}
- }
- return binding.Uri.BindUri(m, obj)
-}
-
// ShouldBindWith binds the passed struct pointer using the specified binding engine.
// See the binding package.
func (c *Context) ShouldBindWith(obj interface{}, b binding.Binding) error {
@@ -594,7 +561,9 @@ func (c *Context) ShouldBindWith(obj interface{}, b binding.Binding) error {
//
// NOTE: This method reads the body before binding. So you should use
// ShouldBindWith for better performance if you need to call only once.
-func (c *Context) ShouldBindBodyWith(obj interface{}, bb binding.BindingBody) (err error) {
+func (c *Context) ShouldBindBodyWith(
+ obj interface{}, bb binding.BindingBody,
+) (err error) {
var body []byte
if cb, ok := c.Get(BodyBytesKey); ok {
if cbb, ok := cb.([]byte); ok {
@@ -686,9 +655,9 @@ func (c *Context) Status(code int) {
func (c *Context) Header(key, value string) {
if value == "" {
c.Writer.Header().Del(key)
- return
+ } else {
+ c.Writer.Header().Set(key, value)
}
- c.Writer.Header().Set(key, value)
}
// GetHeader returns value from request headers.
@@ -732,7 +701,6 @@ func (c *Context) Cookie(name string) (string, error) {
return val, nil
}
-// Render writes the response headers and calls render.Render to render data.
func (c *Context) Render(code int, r render.Render) {
c.Status(code)
@@ -777,9 +745,9 @@ func (c *Context) JSONP(code int, obj interface{}) {
callback := c.DefaultQuery("callback", "")
if callback == "" {
c.Render(code, render.JSON{Data: obj})
- return
+ } else {
+ c.Render(code, render.JsonpJSON{Callback: callback, Data: obj})
}
- c.Render(code, render.JsonpJSON{Callback: callback, Data: obj})
}
// JSON serializes the given struct as JSON into the response body.
@@ -805,11 +773,6 @@ func (c *Context) YAML(code int, obj interface{}) {
c.Render(code, render.YAML{Data: obj})
}
-// ProtoBuf serializes the given struct as ProtoBuf into the response body.
-func (c *Context) ProtoBuf(code int, obj interface{}) {
- c.Render(code, render.ProtoBuf{Data: obj})
-}
-
// String writes the given string into the response body.
func (c *Context) String(code int, format string, values ...interface{}) {
c.Render(code, render.String{Format: format, Data: values})
@@ -855,7 +818,6 @@ func (c *Context) SSEvent(name string, message interface{}) {
})
}
-// Stream sends a streaming response.
func (c *Context) Stream(step func(w io.Writer) bool) {
w := c.Writer
clientGone := w.CloseNotify()
@@ -877,7 +839,6 @@ func (c *Context) Stream(step func(w io.Writer) bool) {
/******** CONTENT NEGOTIATION *******/
/************************************/
-// Negotiate contains all negotiations data.
type Negotiate struct {
Offered []string
HTMLName string
@@ -887,7 +848,6 @@ type Negotiate struct {
Data interface{}
}
-// Negotiate calls different Render according acceptable Accept format.
func (c *Context) Negotiate(code int, config Negotiate) {
switch c.NegotiateFormat(config.Offered...) {
case binding.MIMEJSON:
@@ -907,7 +867,6 @@ func (c *Context) Negotiate(code int, config Negotiate) {
}
}
-// NegotiateFormat returns an acceptable Accept format.
func (c *Context) NegotiateFormat(offered ...string) string {
assert1(len(offered) > 0, "you must provide at least one offer")
@@ -927,7 +886,6 @@ func (c *Context) NegotiateFormat(offered ...string) string {
return ""
}
-// SetAccepted sets Accept header data.
func (c *Context) SetAccepted(formats ...string) {
c.Accepted = formats
}
diff --git a/vendor/github.com/gin-gonic/gin/coverage.sh b/vendor/github.com/gin-gonic/gin/coverage.sh
new file mode 100644
index 0000000..4d1ee03
--- /dev/null
+++ b/vendor/github.com/gin-gonic/gin/coverage.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+set -e
+
+echo "mode: count" > coverage.out
+
+for d in $(go list ./... | grep -E 'gin$|binding$|render$' | grep -v 'examples'); do
+ go test -v -covermode=count -coverprofile=profile.out $d
+ if [ -f profile.out ]; then
+ cat profile.out | grep -v "mode:" >> coverage.out
+ rm profile.out
+ fi
+done
diff --git a/vendor/github.com/gin-gonic/gin/debug.go b/vendor/github.com/gin-gonic/gin/debug.go
index 98c67cf..f11156b 100644
--- a/vendor/github.com/gin-gonic/gin/debug.go
+++ b/vendor/github.com/gin-gonic/gin/debug.go
@@ -6,15 +6,13 @@ package gin
import (
"bytes"
- "fmt"
"html/template"
- "os"
- "runtime"
- "strconv"
- "strings"
+ "log"
)
-const ginSupportMinGoVer = 6
+func init() {
+ log.SetFlags(0)
+}
// IsDebugging returns true if the framework is running in debug mode.
// Use SetMode(gin.ReleaseMode) to disable debug mode.
@@ -22,18 +20,11 @@ func IsDebugging() bool {
return ginMode == debugCode
}
-// DebugPrintRouteFunc indicates debug log output format.
-var DebugPrintRouteFunc func(httpMethod, absolutePath, handlerName string, nuHandlers int)
-
func debugPrintRoute(httpMethod, absolutePath string, handlers HandlersChain) {
if IsDebugging() {
nuHandlers := len(handlers)
handlerName := nameOfFunction(handlers.Last())
- if DebugPrintRouteFunc == nil {
- debugPrint("%-6s %-25s --> %s (%d handlers)\n", httpMethod, absolutePath, handlerName, nuHandlers)
- } else {
- DebugPrintRouteFunc(httpMethod, absolutePath, handlerName, nuHandlers)
- }
+ debugPrint("%-6s %-25s --> %s (%d handlers)\n", httpMethod, absolutePath, handlerName, nuHandlers)
}
}
@@ -51,28 +42,14 @@ func debugPrintLoadTemplate(tmpl *template.Template) {
func debugPrint(format string, values ...interface{}) {
if IsDebugging() {
- if !strings.HasSuffix(format, "\n") {
- format += "\n"
- }
- fmt.Fprintf(os.Stderr, "[GIN-debug] "+format, values...)
+ log.Printf("[GIN-debug] "+format, values...)
}
}
-func getMinVer(v string) (uint64, error) {
- first := strings.IndexByte(v, '.')
- last := strings.LastIndexByte(v, '.')
- if first == last {
- return strconv.ParseUint(v[first+1:], 10, 64)
- }
- return strconv.ParseUint(v[first+1:last], 10, 64)
-}
-
func debugPrintWARNINGDefault() {
- if v, e := getMinVer(runtime.Version()); e == nil && v <= ginSupportMinGoVer {
- debugPrint(`[WARNING] Now Gin requires Go 1.6 or later and Go 1.7 will be required soon.
+ debugPrint(`[WARNING] Now Gin requires Go 1.6 or later and Go 1.7 will be required soon.
`)
- }
debugPrint(`[WARNING] Creating an Engine instance with the Logger and Recovery middleware already attached.
`)
diff --git a/vendor/github.com/gin-gonic/gin/errors.go b/vendor/github.com/gin-gonic/gin/errors.go
index ab13ca6..dbfccd8 100644
--- a/vendor/github.com/gin-gonic/gin/errors.go
+++ b/vendor/github.com/gin-gonic/gin/errors.go
@@ -9,28 +9,21 @@ import (
"fmt"
"reflect"
- "github.com/gin-gonic/gin/internal/json"
+ "github.com/gin-gonic/gin/json"
)
-// ErrorType is an unsigned 64-bit error code as defined in the gin spec.
type ErrorType uint64
const (
- // ErrorTypeBind is used when Context.Bind() fails.
- ErrorTypeBind ErrorType = 1 << 63
- // ErrorTypeRender is used when Context.Render() fails.
- ErrorTypeRender ErrorType = 1 << 62
- // ErrorTypePrivate indicates a private error.
+ ErrorTypeBind ErrorType = 1 << 63 // used when c.Bind() fails
+ ErrorTypeRender ErrorType = 1 << 62 // used when c.Render() fails
ErrorTypePrivate ErrorType = 1 << 0
- // ErrorTypePublic indicates a public error.
- ErrorTypePublic ErrorType = 1 << 1
- // ErrorTypeAny indicates any other error.
+ ErrorTypePublic ErrorType = 1 << 1
+
ErrorTypeAny ErrorType = 1<<64 - 1
- // ErrorTypeNu indicates any other error.
- ErrorTypeNu = 2
+ ErrorTypeNu = 2
)
-// Error represents a error's specification.
type Error struct {
Err error
Type ErrorType
@@ -41,19 +34,16 @@ type errorMsgs []*Error
var _ error = &Error{}
-// SetType sets the error's type.
func (msg *Error) SetType(flags ErrorType) *Error {
msg.Type = flags
return msg
}
-// SetMeta sets the error's meta data.
func (msg *Error) SetMeta(data interface{}) *Error {
msg.Meta = data
return msg
}
-// JSON creates a properly formated JSON
func (msg *Error) JSON() interface{} {
json := H{}
if msg.Meta != nil {
@@ -80,12 +70,11 @@ func (msg *Error) MarshalJSON() ([]byte, error) {
return json.Marshal(msg.JSON())
}
-// Error implements the error interface.
+// Error implements the error interface
func (msg Error) Error() string {
return msg.Err.Error()
}
-// IsType judges one error.
func (msg *Error) IsType(flags ErrorType) bool {
return (msg.Type & flags) > 0
}
@@ -149,7 +138,6 @@ func (a errorMsgs) JSON() interface{} {
}
}
-// MarshalJSON implements the json.Marshaller interface.
func (a errorMsgs) MarshalJSON() ([]byte, error) {
return json.Marshal(a.JSON())
}
diff --git a/vendor/github.com/gin-gonic/gin/gin.go b/vendor/github.com/gin-gonic/gin/gin.go
index b7c77e1..aa62e01 100644
--- a/vendor/github.com/gin-gonic/gin/gin.go
+++ b/vendor/github.com/gin-gonic/gin/gin.go
@@ -5,7 +5,6 @@
package gin
import (
- "fmt"
"html/template"
"net"
"net/http"
@@ -15,7 +14,11 @@ import (
"github.com/gin-gonic/gin/render"
)
-const defaultMultipartMemory = 32 << 20 // 32 MB
+const (
+ // Version is Framework's version.
+ Version = "v1.3.0"
+ defaultMultipartMemory = 32 << 20 // 32 MB
+)
var (
default404Body = []byte("404 page not found")
@@ -23,10 +26,7 @@ var (
defaultAppEngine bool
)
-// HandlerFunc defines the handler used by gin middleware as return value.
type HandlerFunc func(*Context)
-
-// HandlersChain defines a HandlerFunc array.
type HandlersChain []HandlerFunc
// Last returns the last handler in the chain. ie. the last handler is the main own.
@@ -37,15 +37,12 @@ func (c HandlersChain) Last() HandlerFunc {
return nil
}
-// RouteInfo represents a request route's specification which contains method and path and its handler.
type RouteInfo struct {
- Method string
- Path string
- Handler string
- HandlerFunc HandlerFunc
+ Method string
+ Path string
+ Handler string
}
-// RoutesInfo defines a RouteInfo array.
type RoutesInfo []RouteInfo
// Engine is the framework's instance, it contains the muxer, middleware and configuration settings.
@@ -158,7 +155,6 @@ func (engine *Engine) allocateContext() *Context {
return &Context{engine: engine}
}
-// Delims sets template left and right delims and returns a Engine instance.
func (engine *Engine) Delims(left, right string) *Engine {
engine.delims = render.Delims{Left: left, Right: right}
return engine
@@ -268,12 +264,10 @@ func (engine *Engine) Routes() (routes RoutesInfo) {
func iterate(path, method string, routes RoutesInfo, root *node) RoutesInfo {
path += root.path
if len(root.handlers) > 0 {
- handlerFunc := root.handlers.Last()
routes = append(routes, RouteInfo{
- Method: method,
- Path: path,
- Handler: nameOfFunction(handlerFunc),
- HandlerFunc: handlerFunc,
+ Method: method,
+ Path: path,
+ Handler: nameOfFunction(root.handlers.Last()),
})
}
for _, child := range root.children {
@@ -322,23 +316,6 @@ func (engine *Engine) RunUnix(file string) (err error) {
return
}
-// RunFd attaches the router to a http.Server and starts listening and serving HTTP requests
-// through the specified file descriptor.
-// Note: this method will block the calling goroutine indefinitely unless an error happens.
-func (engine *Engine) RunFd(fd int) (err error) {
- debugPrint("Listening and serving HTTP on fd@%d", fd)
- defer func() { debugPrintError(err) }()
-
- f := os.NewFile(uintptr(fd), fmt.Sprintf("fd@%d", fd))
- listener, err := net.FileListener(f)
- if err != nil {
- return
- }
- defer listener.Close()
- err = http.Serve(listener, engine)
- return
-}
-
// ServeHTTP conforms to the http.Handler interface.
func (engine *Engine) ServeHTTP(w http.ResponseWriter, req *http.Request) {
c := engine.pool.Get().(*Context)
@@ -357,6 +334,7 @@ func (engine *Engine) ServeHTTP(w http.ResponseWriter, req *http.Request) {
func (engine *Engine) HandleContext(c *Context) {
c.reset()
engine.handleHTTPRequest(c)
+ engine.pool.Put(c)
}
func (engine *Engine) handleHTTPRequest(c *Context) {
diff --git a/vendor/github.com/gin-gonic/gin/json/json.go b/vendor/github.com/gin-gonic/gin/json/json.go
new file mode 100644
index 0000000..aa76aa3
--- /dev/null
+++ b/vendor/github.com/gin-gonic/gin/json/json.go
@@ -0,0 +1,15 @@
+// Copyright 2017 Bo-Yi Wu. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+// +build !jsoniter
+
+package json
+
+import "encoding/json"
+
+var (
+ Marshal = json.Marshal
+ MarshalIndent = json.MarshalIndent
+ NewDecoder = json.NewDecoder
+)
diff --git a/vendor/github.com/gin-gonic/gin/json/jsoniter.go b/vendor/github.com/gin-gonic/gin/json/jsoniter.go
new file mode 100644
index 0000000..ffe1424
--- /dev/null
+++ b/vendor/github.com/gin-gonic/gin/json/jsoniter.go
@@ -0,0 +1,16 @@
+// Copyright 2017 Bo-Yi Wu. All rights reserved.
+// Use of this source code is governed by a MIT style
+// license that can be found in the LICENSE file.
+
+// +build jsoniter
+
+package json
+
+import "github.com/json-iterator/go"
+
+var (
+ json = jsoniter.ConfigCompatibleWithStandardLibrary
+ Marshal = json.Marshal
+ MarshalIndent = json.MarshalIndent
+ NewDecoder = json.NewDecoder
+)
diff --git a/vendor/github.com/gin-gonic/gin/logger.go b/vendor/github.com/gin-gonic/gin/logger.go
index 74dd2e6..1a8df60 100644
--- a/vendor/github.com/gin-gonic/gin/logger.go
+++ b/vendor/github.com/gin-gonic/gin/logger.go
@@ -17,7 +17,7 @@ import (
var (
green = string([]byte{27, 91, 57, 55, 59, 52, 50, 109})
white = string([]byte{27, 91, 57, 48, 59, 52, 55, 109})
- yellow = string([]byte{27, 91, 57, 48, 59, 52, 51, 109})
+ yellow = string([]byte{27, 91, 57, 55, 59, 52, 51, 109})
red = string([]byte{27, 91, 57, 55, 59, 52, 49, 109})
blue = string([]byte{27, 91, 57, 55, 59, 52, 52, 109})
magenta = string([]byte{27, 91, 57, 55, 59, 52, 53, 109})
@@ -53,7 +53,7 @@ func Logger() HandlerFunc {
return LoggerWithWriter(DefaultWriter)
}
-// LoggerWithWriter instance a Logger middleware with the specified writer buffer.
+// LoggerWithWriter instance a Logger middleware with the specified writter buffer.
// Example: os.Stdout, a file opened in write mode, a socket...
func LoggerWithWriter(out io.Writer, notlogged ...string) HandlerFunc {
isTerm := true
diff --git a/vendor/github.com/gin-gonic/gin/mode.go b/vendor/github.com/gin-gonic/gin/mode.go
index f787b5c..9df4e45 100644
--- a/vendor/github.com/gin-gonic/gin/mode.go
+++ b/vendor/github.com/gin-gonic/gin/mode.go
@@ -11,16 +11,12 @@ import (
"github.com/gin-gonic/gin/binding"
)
-// ENV_GIN_MODE indicates environment name for gin mode.
const ENV_GIN_MODE = "GIN_MODE"
const (
- // DebugMode indicates gin mode is debug.
- DebugMode = "debug"
- // ReleaseMode indicates gin mode is release.
+ DebugMode = "debug"
ReleaseMode = "release"
- // TestMode indicates gin mode is test.
- TestMode = "test"
+ TestMode = "test"
)
const (
debugCode = iota
@@ -28,7 +24,7 @@ const (
testCode
)
-// DefaultWriter is the default io.Writer used by Gin for debug output and
+// DefaultWriter is the default io.Writer used the Gin for debug output and
// middleware output like Logger() or Recovery().
// Note that both Logger and Recovery provides custom ways to configure their
// output io.Writer.
@@ -36,8 +32,6 @@ const (
// import "github.com/mattn/go-colorable"
// gin.DefaultWriter = colorable.NewColorableStdout()
var DefaultWriter io.Writer = os.Stdout
-
-// DefaultErrorWriter is the default io.Writer used by Gin to debug errors
var DefaultErrorWriter io.Writer = os.Stderr
var ginMode = debugCode
@@ -48,7 +42,6 @@ func init() {
SetMode(mode)
}
-// SetMode sets gin mode according to input string.
func SetMode(value string) {
switch value {
case DebugMode, "":
@@ -66,18 +59,14 @@ func SetMode(value string) {
modeName = value
}
-// DisableBindValidation closes the default validator.
func DisableBindValidation() {
binding.Validator = nil
}
-// EnableJsonDecoderUseNumber sets true for binding.EnableDecoderUseNumberto to
-// call the UseNumber method on the JSON Decoder instance.
func EnableJsonDecoderUseNumber() {
binding.EnableDecoderUseNumber = true
}
-// Mode returns currently gin mode.
func Mode() string {
return modeName
}
diff --git a/vendor/github.com/gin-gonic/gin/recovery.go b/vendor/github.com/gin-gonic/gin/recovery.go
index f06ad56..61c5bd5 100644
--- a/vendor/github.com/gin-gonic/gin/recovery.go
+++ b/vendor/github.com/gin-gonic/gin/recovery.go
@@ -10,12 +10,9 @@ import (
"io"
"io/ioutil"
"log"
- "net"
"net/http"
"net/http/httputil"
- "os"
"runtime"
- "strings"
"time"
)
@@ -40,37 +37,12 @@ func RecoveryWithWriter(out io.Writer) HandlerFunc {
return func(c *Context) {
defer func() {
if err := recover(); err != nil {
- // Check for a broken connection, as it is not really a
- // condition that warrants a panic stack trace.
- var brokenPipe bool
- if ne, ok := err.(*net.OpError); ok {
- if se, ok := ne.Err.(*os.SyscallError); ok {
- if strings.Contains(strings.ToLower(se.Error()), "broken pipe") || strings.Contains(strings.ToLower(se.Error()), "connection reset by peer") {
- brokenPipe = true
- }
- }
- }
if logger != nil {
stack := stack(3)
httprequest, _ := httputil.DumpRequest(c.Request, false)
- if brokenPipe {
- logger.Printf("%s\n%s%s", err, string(httprequest), reset)
- } else if IsDebugging() {
- logger.Printf("[Recovery] %s panic recovered:\n%s\n%s\n%s%s",
- timeFormat(time.Now()), string(httprequest), err, stack, reset)
- } else {
- logger.Printf("[Recovery] %s panic recovered:\n%s\n%s%s",
- timeFormat(time.Now()), err, stack, reset)
- }
- }
-
- // If the connection is dead, we can't write a status to it.
- if brokenPipe {
- c.Error(err.(error))
- c.Abort()
- } else {
- c.AbortWithStatus(http.StatusInternalServerError)
+ logger.Printf("[Recovery] %s panic recovered:\n%s\n%s\n%s%s", timeFormat(time.Now()), string(httprequest), err, stack, reset)
}
+ c.AbortWithStatus(http.StatusInternalServerError)
}
}()
c.Next()
diff --git a/vendor/github.com/gin-gonic/gin/render/data.go b/vendor/github.com/gin-gonic/gin/render/data.go
index 6ba657b..3319491 100644
--- a/vendor/github.com/gin-gonic/gin/render/data.go
+++ b/vendor/github.com/gin-gonic/gin/render/data.go
@@ -6,7 +6,6 @@ package render
import "net/http"
-// Data contains ContentType and bytes data.
type Data struct {
ContentType string
Data []byte
@@ -19,7 +18,6 @@ func (r Data) Render(w http.ResponseWriter) (err error) {
return
}
-// WriteContentType (Data) writes custom ContentType.
func (r Data) WriteContentType(w http.ResponseWriter) {
writeContentType(w, []string{r.ContentType})
}
diff --git a/vendor/github.com/gin-gonic/gin/render/html.go b/vendor/github.com/gin-gonic/gin/render/html.go
index 6696ece..1e3be65 100644
--- a/vendor/github.com/gin-gonic/gin/render/html.go
+++ b/vendor/github.com/gin-gonic/gin/render/html.go
@@ -9,27 +9,20 @@ import (
"net/http"
)
-// Delims represents a set of Left and Right delimiters for HTML template rendering.
type Delims struct {
- // Left delimiter, defaults to {{.
- Left string
- // Right delimiter, defaults to }}.
+ Left string
Right string
}
-// HTMLRender interface is to be implemented by HTMLProduction and HTMLDebug.
type HTMLRender interface {
- // Instance returns an HTML instance.
Instance(string, interface{}) Render
}
-// HTMLProduction contains template reference and its delims.
type HTMLProduction struct {
Template *template.Template
Delims Delims
}
-// HTMLDebug contains template delims and pattern and function with file list.
type HTMLDebug struct {
Files []string
Glob string
@@ -37,7 +30,6 @@ type HTMLDebug struct {
FuncMap template.FuncMap
}
-// HTML contains template reference and its name with given interface object.
type HTML struct {
Template *template.Template
Name string
@@ -46,7 +38,6 @@ type HTML struct {
var htmlContentType = []string{"text/html; charset=utf-8"}
-// Instance (HTMLProduction) returns an HTML instance which it realizes Render interface.
func (r HTMLProduction) Instance(name string, data interface{}) Render {
return HTML{
Template: r.Template,
@@ -55,7 +46,6 @@ func (r HTMLProduction) Instance(name string, data interface{}) Render {
}
}
-// Instance (HTMLDebug) returns an HTML instance which it realizes Render interface.
func (r HTMLDebug) Instance(name string, data interface{}) Render {
return HTML{
Template: r.loadTemplate(),
@@ -76,7 +66,6 @@ func (r HTMLDebug) loadTemplate() *template.Template {
panic("the HTML debug render was created without files or glob pattern")
}
-// Render (HTML) executes template and writes its result with custom ContentType for response.
func (r HTML) Render(w http.ResponseWriter) error {
r.WriteContentType(w)
@@ -86,7 +75,6 @@ func (r HTML) Render(w http.ResponseWriter) error {
return r.Template.ExecuteTemplate(w, r.Name, r.Data)
}
-// WriteContentType (HTML) writes HTML ContentType.
func (r HTML) WriteContentType(w http.ResponseWriter) {
writeContentType(w, htmlContentType)
}
diff --git a/vendor/github.com/gin-gonic/gin/render/json.go b/vendor/github.com/gin-gonic/gin/render/json.go
index 32d0fc4..6e5089a 100644
--- a/vendor/github.com/gin-gonic/gin/render/json.go
+++ b/vendor/github.com/gin-gonic/gin/render/json.go
@@ -10,44 +10,37 @@ import (
"html/template"
"net/http"
- "github.com/gin-gonic/gin/internal/json"
+ "github.com/gin-gonic/gin/json"
)
-// JSON contains the given interface object.
type JSON struct {
Data interface{}
}
-// IndentedJSON contains the given interface object.
type IndentedJSON struct {
Data interface{}
}
-// SecureJSON contains the given interface object and its prefix.
type SecureJSON struct {
Prefix string
Data interface{}
}
-// JsonpJSON contains the given interface object its callback.
type JsonpJSON struct {
Callback string
Data interface{}
}
-// AsciiJSON contains the given interface object.
type AsciiJSON struct {
Data interface{}
}
-// SecureJSONPrefix is a string which represents SecureJSON prefix.
type SecureJSONPrefix string
var jsonContentType = []string{"application/json; charset=utf-8"}
var jsonpContentType = []string{"application/javascript; charset=utf-8"}
var jsonAsciiContentType = []string{"application/json"}
-// Render (JSON) writes data with custom ContentType.
func (r JSON) Render(w http.ResponseWriter) (err error) {
if err = WriteJSON(w, r.Data); err != nil {
panic(err)
@@ -55,12 +48,10 @@ func (r JSON) Render(w http.ResponseWriter) (err error) {
return
}
-// WriteContentType (JSON) writes JSON ContentType.
func (r JSON) WriteContentType(w http.ResponseWriter) {
writeContentType(w, jsonContentType)
}
-// WriteJSON marshals the given interface object and writes it with custom ContentType.
func WriteJSON(w http.ResponseWriter, obj interface{}) error {
writeContentType(w, jsonContentType)
jsonBytes, err := json.Marshal(obj)
@@ -71,7 +62,6 @@ func WriteJSON(w http.ResponseWriter, obj interface{}) error {
return nil
}
-// Render (IndentedJSON) marshals the given interface object and writes it with custom ContentType.
func (r IndentedJSON) Render(w http.ResponseWriter) error {
r.WriteContentType(w)
jsonBytes, err := json.MarshalIndent(r.Data, "", " ")
@@ -82,12 +72,10 @@ func (r IndentedJSON) Render(w http.ResponseWriter) error {
return nil
}
-// WriteContentType (IndentedJSON) writes JSON ContentType.
func (r IndentedJSON) WriteContentType(w http.ResponseWriter) {
writeContentType(w, jsonContentType)
}
-// Render (SecureJSON) marshals the given interface object and writes it with custom ContentType.
func (r SecureJSON) Render(w http.ResponseWriter) error {
r.WriteContentType(w)
jsonBytes, err := json.Marshal(r.Data)
@@ -102,12 +90,10 @@ func (r SecureJSON) Render(w http.ResponseWriter) error {
return nil
}
-// WriteContentType (SecureJSON) writes JSON ContentType.
func (r SecureJSON) WriteContentType(w http.ResponseWriter) {
writeContentType(w, jsonContentType)
}
-// Render (JsonpJSON) marshals the given interface object and writes it and its callback with custom ContentType.
func (r JsonpJSON) Render(w http.ResponseWriter) (err error) {
r.WriteContentType(w)
ret, err := json.Marshal(r.Data)
@@ -129,12 +115,10 @@ func (r JsonpJSON) Render(w http.ResponseWriter) (err error) {
return nil
}
-// WriteContentType (JsonpJSON) writes Javascript ContentType.
func (r JsonpJSON) WriteContentType(w http.ResponseWriter) {
writeContentType(w, jsonpContentType)
}
-// Render (AsciiJSON) marshals the given interface object and writes it with custom ContentType.
func (r AsciiJSON) Render(w http.ResponseWriter) (err error) {
r.WriteContentType(w)
ret, err := json.Marshal(r.Data)
@@ -144,8 +128,10 @@ func (r AsciiJSON) Render(w http.ResponseWriter) (err error) {
var buffer bytes.Buffer
for _, r := range string(ret) {
- cvt := string(r)
- if r >= 128 {
+ cvt := ""
+ if r < 128 {
+ cvt = string(r)
+ } else {
cvt = fmt.Sprintf("\\u%04x", int64(r))
}
buffer.WriteString(cvt)
@@ -155,7 +141,6 @@ func (r AsciiJSON) Render(w http.ResponseWriter) (err error) {
return nil
}
-// WriteContentType (AsciiJSON) writes JSON ContentType.
func (r AsciiJSON) WriteContentType(w http.ResponseWriter) {
writeContentType(w, jsonAsciiContentType)
}
diff --git a/vendor/github.com/gin-gonic/gin/render/msgpack.go b/vendor/github.com/gin-gonic/gin/render/msgpack.go
index dc681fc..e6c13e5 100644
--- a/vendor/github.com/gin-gonic/gin/render/msgpack.go
+++ b/vendor/github.com/gin-gonic/gin/render/msgpack.go
@@ -10,26 +10,22 @@ import (
"github.com/ugorji/go/codec"
)
-// MsgPack contains the given interface object.
type MsgPack struct {
Data interface{}
}
var msgpackContentType = []string{"application/msgpack; charset=utf-8"}
-// WriteContentType (MsgPack) writes MsgPack ContentType.
func (r MsgPack) WriteContentType(w http.ResponseWriter) {
writeContentType(w, msgpackContentType)
}
-// Render (MsgPack) encodes the given interface object and writes data with custom ContentType.
func (r MsgPack) Render(w http.ResponseWriter) error {
return WriteMsgPack(w, r.Data)
}
-// WriteMsgPack writes MsgPack ContentType and encodes the given interface object.
func WriteMsgPack(w http.ResponseWriter, obj interface{}) error {
writeContentType(w, msgpackContentType)
- var mh codec.MsgpackHandle
- return codec.NewEncoder(w, &mh).Encode(obj)
+ var h codec.Handle = new(codec.MsgpackHandle)
+ return codec.NewEncoder(w, h).Encode(obj)
}
diff --git a/vendor/github.com/gin-gonic/gin/render/reader.go b/vendor/github.com/gin-gonic/gin/render/reader.go
index ab60e53..be2132c 100644
--- a/vendor/github.com/gin-gonic/gin/render/reader.go
+++ b/vendor/github.com/gin-gonic/gin/render/reader.go
@@ -1,7 +1,3 @@
-// Copyright 2018 Gin Core Team. All rights reserved.
-// Use of this source code is governed by a MIT style
-// license that can be found in the LICENSE file.
-
package render
import (
@@ -10,7 +6,6 @@ import (
"strconv"
)
-// Reader contains the IO reader and its length, and custom ContentType and other headers.
type Reader struct {
ContentType string
ContentLength int64
@@ -27,12 +22,10 @@ func (r Reader) Render(w http.ResponseWriter) (err error) {
return
}
-// WriteContentType (Reader) writes custom ContentType.
func (r Reader) WriteContentType(w http.ResponseWriter) {
writeContentType(w, []string{r.ContentType})
}
-// writeHeaders writes custom Header.
func (r Reader) writeHeaders(w http.ResponseWriter, headers map[string]string) {
header := w.Header()
for k, v := range headers {
diff --git a/vendor/github.com/gin-gonic/gin/render/redirect.go b/vendor/github.com/gin-gonic/gin/render/redirect.go
index 9c145fe..a0634f5 100644
--- a/vendor/github.com/gin-gonic/gin/render/redirect.go
+++ b/vendor/github.com/gin-gonic/gin/render/redirect.go
@@ -9,14 +9,12 @@ import (
"net/http"
)
-// Redirect contains the http request reference and redirects status code and location.
type Redirect struct {
Code int
Request *http.Request
Location string
}
-// Render (Redirect) redirects the http request to new location and writes redirect response.
func (r Redirect) Render(w http.ResponseWriter) error {
// todo(thinkerou): go1.6 not support StatusPermanentRedirect(308)
// when we upgrade go version we can use http.StatusPermanentRedirect
@@ -27,5 +25,4 @@ func (r Redirect) Render(w http.ResponseWriter) error {
return nil
}
-// WriteContentType (Redirect) don't write any ContentType.
func (r Redirect) WriteContentType(http.ResponseWriter) {}
diff --git a/vendor/github.com/gin-gonic/gin/render/render.go b/vendor/github.com/gin-gonic/gin/render/render.go
index abfc79f..4ff1c7b 100644
--- a/vendor/github.com/gin-gonic/gin/render/render.go
+++ b/vendor/github.com/gin-gonic/gin/render/render.go
@@ -6,11 +6,8 @@ package render
import "net/http"
-// Render interface is to be implemented by JSON, XML, HTML, YAML and so on.
type Render interface {
- // Render writes data with custom ContentType.
Render(http.ResponseWriter) error
- // WriteContentType writes custom ContentType.
WriteContentType(w http.ResponseWriter)
}
@@ -30,7 +27,6 @@ var (
_ Render = MsgPack{}
_ Render = Reader{}
_ Render = AsciiJSON{}
- _ Render = ProtoBuf{}
)
func writeContentType(w http.ResponseWriter, value []string) {
diff --git a/vendor/github.com/gin-gonic/gin/render/text.go b/vendor/github.com/gin-gonic/gin/render/text.go
index 2ea7343..74cd26b 100644
--- a/vendor/github.com/gin-gonic/gin/render/text.go
+++ b/vendor/github.com/gin-gonic/gin/render/text.go
@@ -10,7 +10,6 @@ import (
"net/http"
)
-// String contains the given interface object slice and its format.
type String struct {
Format string
Data []interface{}
@@ -18,23 +17,20 @@ type String struct {
var plainContentType = []string{"text/plain; charset=utf-8"}
-// Render (String) writes data with custom ContentType.
func (r String) Render(w http.ResponseWriter) error {
WriteString(w, r.Format, r.Data)
return nil
}
-// WriteContentType (String) writes Plain ContentType.
func (r String) WriteContentType(w http.ResponseWriter) {
writeContentType(w, plainContentType)
}
-// WriteString writes data according to its format and write custom ContentType.
func WriteString(w http.ResponseWriter, format string, data []interface{}) {
writeContentType(w, plainContentType)
if len(data) > 0 {
fmt.Fprintf(w, format, data...)
- return
+ } else {
+ io.WriteString(w, format)
}
- io.WriteString(w, format)
}
diff --git a/vendor/github.com/gin-gonic/gin/render/xml.go b/vendor/github.com/gin-gonic/gin/render/xml.go
index cc5390a..cff1ac3 100644
--- a/vendor/github.com/gin-gonic/gin/render/xml.go
+++ b/vendor/github.com/gin-gonic/gin/render/xml.go
@@ -9,20 +9,17 @@ import (
"net/http"
)
-// XML contains the given interface object.
type XML struct {
Data interface{}
}
var xmlContentType = []string{"application/xml; charset=utf-8"}
-// Render (XML) encodes the given interface object and writes data with custom ContentType.
func (r XML) Render(w http.ResponseWriter) error {
r.WriteContentType(w)
return xml.NewEncoder(w).Encode(r.Data)
}
-// WriteContentType (XML) writes XML ContentType for response.
func (r XML) WriteContentType(w http.ResponseWriter) {
writeContentType(w, xmlContentType)
}
diff --git a/vendor/github.com/gin-gonic/gin/render/yaml.go b/vendor/github.com/gin-gonic/gin/render/yaml.go
index 33bc325..25d0ebd 100644
--- a/vendor/github.com/gin-gonic/gin/render/yaml.go
+++ b/vendor/github.com/gin-gonic/gin/render/yaml.go
@@ -10,14 +10,12 @@ import (
"gopkg.in/yaml.v2"
)
-// YAML contains the given interface object.
type YAML struct {
Data interface{}
}
var yamlContentType = []string{"application/x-yaml; charset=utf-8"}
-// Render (YAML) marshals the given interface object and writes data with custom ContentType.
func (r YAML) Render(w http.ResponseWriter) error {
r.WriteContentType(w)
@@ -30,7 +28,6 @@ func (r YAML) Render(w http.ResponseWriter) error {
return nil
}
-// WriteContentType (YAML) writes YAML ContentType for response.
func (r YAML) WriteContentType(w http.ResponseWriter) {
writeContentType(w, yamlContentType)
}
diff --git a/vendor/github.com/gin-gonic/gin/routergroup.go b/vendor/github.com/gin-gonic/gin/routergroup.go
index 2b41dfd..876a61b 100644
--- a/vendor/github.com/gin-gonic/gin/routergroup.go
+++ b/vendor/github.com/gin-gonic/gin/routergroup.go
@@ -11,13 +11,11 @@ import (
"strings"
)
-// IRouter defines all router handle interface includes single and group router.
type IRouter interface {
IRoutes
Group(string, ...HandlerFunc) *RouterGroup
}
-// IRoutes defines all router handle interface.
type IRoutes interface {
Use(...HandlerFunc) IRoutes
@@ -36,8 +34,8 @@ type IRoutes interface {
StaticFS(string, http.FileSystem) IRoutes
}
-// RouterGroup is used internally to configure router, a RouterGroup is associated with
-// a prefix and an array of handlers (middleware).
+// RouterGroup is used internally to configure router, a RouterGroup is associated with a prefix
+// and an array of handlers (middleware).
type RouterGroup struct {
Handlers HandlersChain
basePath string
@@ -53,8 +51,8 @@ func (group *RouterGroup) Use(middleware ...HandlerFunc) IRoutes {
return group.returnObj()
}
-// Group creates a new router group. You should add all the routes that have common middlewares or the same path prefix.
-// For example, all the routes that use a common middleware for authorization could be grouped.
+// Group creates a new router group. You should add all the routes that have common middlwares or the same path prefix.
+// For example, all the routes that use a common middlware for authorization could be grouped.
func (group *RouterGroup) Group(relativePath string, handlers ...HandlerFunc) *RouterGroup {
return &RouterGroup{
Handlers: group.combineHandlers(handlers),
@@ -63,8 +61,6 @@ func (group *RouterGroup) Group(relativePath string, handlers ...HandlerFunc) *R
}
}
-// BasePath returns the base path of router group.
-// For example, if v := router.Group("/rest/n/v1/api"), v.BasePath() is "/rest/n/v1/api".
func (group *RouterGroup) BasePath() string {
return group.basePath
}
@@ -185,22 +181,11 @@ func (group *RouterGroup) StaticFS(relativePath string, fs http.FileSystem) IRou
func (group *RouterGroup) createStaticHandler(relativePath string, fs http.FileSystem) HandlerFunc {
absolutePath := group.calculateAbsolutePath(relativePath)
fileServer := http.StripPrefix(absolutePath, http.FileServer(fs))
-
+ _, nolisting := fs.(*onlyfilesFS)
return func(c *Context) {
- if _, nolisting := fs.(*onlyfilesFS); nolisting {
+ if nolisting {
c.Writer.WriteHeader(http.StatusNotFound)
}
-
- file := c.Param("filepath")
- // Check if file exists and/or if we have permission to access it
- if _, err := fs.Open(file); err != nil {
- c.Writer.WriteHeader(http.StatusNotFound)
- c.handlers = group.engine.allNoRoute
- // Reset index
- c.index = -1
- return
- }
-
fileServer.ServeHTTP(c.Writer, c.Request)
}
}
diff --git a/vendor/github.com/gin-gonic/gin/tree.go b/vendor/github.com/gin-gonic/gin/tree.go
index ada62ce..b653066 100644
--- a/vendor/github.com/gin-gonic/gin/tree.go
+++ b/vendor/github.com/gin-gonic/gin/tree.go
@@ -193,16 +193,9 @@ func (n *node) addRoute(path string, handlers HandlersChain) {
}
}
- pathSeg := path
- if n.nType != catchAll {
- pathSeg = strings.SplitN(path, "/", 2)[0]
- }
- prefix := fullPath[:strings.Index(fullPath, pathSeg)] + n.path
- panic("'" + pathSeg +
- "' in new path '" + fullPath +
+ panic("path segment '" + path +
"' conflicts with existing wildcard '" + n.path +
- "' in existing prefix '" + prefix +
- "'")
+ "' in path '" + fullPath + "'")
}
c := path[0]
diff --git a/vendor/github.com/gin-gonic/gin/utils.go b/vendor/github.com/gin-gonic/gin/utils.go
index f4532d5..bf32c77 100644
--- a/vendor/github.com/gin-gonic/gin/utils.go
+++ b/vendor/github.com/gin-gonic/gin/utils.go
@@ -14,10 +14,8 @@ import (
"strings"
)
-// BindKey indicates a default bind key.
const BindKey = "_gin-gonic/gin/bindkey"
-// Bind is a helper function for given interface object and returns a Gin middleware.
func Bind(val interface{}) HandlerFunc {
value := reflect.ValueOf(val)
if value.Kind() == reflect.Ptr {
@@ -35,14 +33,16 @@ func Bind(val interface{}) HandlerFunc {
}
}
-// WrapF is a helper function for wrapping http.HandlerFunc and returns a Gin middleware.
+// WrapF is a helper function for wrapping http.HandlerFunc
+// Returns a Gin middleware
func WrapF(f http.HandlerFunc) HandlerFunc {
return func(c *Context) {
f(c.Writer, c.Request)
}
}
-// WrapH is a helper function for wrapping http.Handler and returns a Gin middleware.
+// WrapH is a helper function for wrapping http.Handler
+// Returns a Gin middleware
func WrapH(h http.Handler) HandlerFunc {
return func(c *Context) {
h.ServeHTTP(c.Writer, c.Request)
diff --git a/vendor/github.com/gin-gonic/gin/wercker.yml b/vendor/github.com/gin-gonic/gin/wercker.yml
new file mode 100644
index 0000000..3ab8084
--- /dev/null
+++ b/vendor/github.com/gin-gonic/gin/wercker.yml
@@ -0,0 +1 @@
+box: wercker/default
\ No newline at end of file
diff --git a/vendor/github.com/go-sql-driver/mysql/AUTHORS b/vendor/github.com/go-sql-driver/mysql/AUTHORS
index 5ce4f7e..73ff68f 100644
--- a/vendor/github.com/go-sql-driver/mysql/AUTHORS
+++ b/vendor/github.com/go-sql-driver/mysql/AUTHORS
@@ -35,7 +35,6 @@ Hanno Braun
Henri Yandell
Hirotaka Yamamoto
ICHINOSE Shogo
-Ilia Cimpoes
INADA Naoki
Jacek Szwec
James Harr
@@ -73,9 +72,6 @@ Shuode Li
Soroush Pour
Stan Putrya
Stanley Gunawan
-Steven Hartland
-Thomas Wodarek
-Tom Jenkinson
Xiangyu Hu
Xiaobing Jiang
Xiuming Chen
@@ -91,4 +87,3 @@ Keybase Inc.
Percona LLC
Pivotal Inc.
Stripe Inc.
-Multiplay Ltd.
diff --git a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md
index 2d87d74..ce1b533 100644
--- a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md
+++ b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md
@@ -1,3 +1,14 @@
+## Version 1.4.1 (2018-11-14)
+
+Bugfixes:
+
+ - Fix TIME format for binary columns (#818)
+ - Fix handling of empty auth plugin names (#835)
+ - Fix caching_sha2_password with empty password (#826)
+ - Fix canceled context broke mysqlConn (#862)
+ - Fix OldAuthSwitchRequest support (#870)
+ - Fix Auth Response packet for cleartext password (#887)
+
## Version 1.4 (2018-06-03)
Changes:
diff --git a/vendor/github.com/go-sql-driver/mysql/README.md b/vendor/github.com/go-sql-driver/mysql/README.md
index 7e7df1a..2e9b07e 100644
--- a/vendor/github.com/go-sql-driver/mysql/README.md
+++ b/vendor/github.com/go-sql-driver/mysql/README.md
@@ -40,7 +40,7 @@ A MySQL-Driver for Go's [database/sql](https://golang.org/pkg/database/sql/) pac
* Optional placeholder interpolation
## Requirements
- * Go 1.8 or higher. We aim to support the 3 latest versions of Go.
+ * Go 1.7 or higher. We aim to support the 3 latest versions of Go.
* MySQL (4.1+), MariaDB, Percona Server, Google CloudSQL or Sphinx (2.2.3+)
---------------------------------------
diff --git a/vendor/github.com/go-sql-driver/mysql/auth.go b/vendor/github.com/go-sql-driver/mysql/auth.go
index fec7040..14f678a 100644
--- a/vendor/github.com/go-sql-driver/mysql/auth.go
+++ b/vendor/github.com/go-sql-driver/mysql/auth.go
@@ -360,15 +360,13 @@ func (mc *mysqlConn) handleAuthResult(oldAuthData []byte, plugin string) error {
pubKey := mc.cfg.pubKey
if pubKey == nil {
// request public key from server
- data, err := mc.buf.takeSmallBuffer(4 + 1)
- if err != nil {
- return err
- }
+ data := mc.buf.takeSmallBuffer(4 + 1)
data[4] = cachingSha2PasswordRequestPublicKey
mc.writePacket(data)
// parse public key
- if data, err = mc.readPacket(); err != nil {
+ data, err := mc.readPacket()
+ if err != nil {
return err
}
diff --git a/vendor/github.com/go-sql-driver/mysql/buffer.go b/vendor/github.com/go-sql-driver/mysql/buffer.go
index 19486bd..eb4748b 100644
--- a/vendor/github.com/go-sql-driver/mysql/buffer.go
+++ b/vendor/github.com/go-sql-driver/mysql/buffer.go
@@ -22,17 +22,17 @@ const defaultBufSize = 4096
// The buffer is similar to bufio.Reader / Writer but zero-copy-ish
// Also highly optimized for this particular use case.
type buffer struct {
- buf []byte // buf is a byte buffer who's length and capacity are equal.
+ buf []byte
nc net.Conn
idx int
length int
timeout time.Duration
}
-// newBuffer allocates and returns a new buffer.
func newBuffer(nc net.Conn) buffer {
+ var b [defaultBufSize]byte
return buffer{
- buf: make([]byte, defaultBufSize),
+ buf: b[:],
nc: nc,
}
}
@@ -105,56 +105,43 @@ func (b *buffer) readNext(need int) ([]byte, error) {
return b.buf[offset:b.idx], nil
}
-// takeBuffer returns a buffer with the requested size.
+// returns a buffer with the requested size.
// If possible, a slice from the existing buffer is returned.
// Otherwise a bigger buffer is made.
// Only one buffer (total) can be used at a time.
-func (b *buffer) takeBuffer(length int) ([]byte, error) {
+func (b *buffer) takeBuffer(length int) []byte {
if b.length > 0 {
- return nil, ErrBusyBuffer
+ return nil
}
// test (cheap) general case first
- if length <= cap(b.buf) {
- return b.buf[:length], nil
+ if length <= defaultBufSize || length <= cap(b.buf) {
+ return b.buf[:length]
}
if length < maxPacketSize {
b.buf = make([]byte, length)
- return b.buf, nil
+ return b.buf
}
-
- // buffer is larger than we want to store.
- return make([]byte, length), nil
+ return make([]byte, length)
}
-// takeSmallBuffer is shortcut which can be used if length is
-// known to be smaller than defaultBufSize.
+// shortcut which can be used if the requested buffer is guaranteed to be
+// smaller than defaultBufSize
// Only one buffer (total) can be used at a time.
-func (b *buffer) takeSmallBuffer(length int) ([]byte, error) {
+func (b *buffer) takeSmallBuffer(length int) []byte {
if b.length > 0 {
- return nil, ErrBusyBuffer
+ return nil
}
- return b.buf[:length], nil
+ return b.buf[:length]
}
// takeCompleteBuffer returns the complete existing buffer.
// This can be used if the necessary buffer size is unknown.
-// cap and len of the returned buffer will be equal.
// Only one buffer (total) can be used at a time.
-func (b *buffer) takeCompleteBuffer() ([]byte, error) {
+func (b *buffer) takeCompleteBuffer() []byte {
if b.length > 0 {
- return nil, ErrBusyBuffer
+ return nil
}
- return b.buf, nil
-}
-
-// store stores buf, an updated buffer, if its suitable to do so.
-func (b *buffer) store(buf []byte) error {
- if b.length > 0 {
- return ErrBusyBuffer
- } else if cap(buf) <= maxPacketSize && cap(buf) > cap(b.buf) {
- b.buf = buf[:cap(buf)]
- }
- return nil
+ return b.buf
}
diff --git a/vendor/github.com/go-sql-driver/mysql/connection.go b/vendor/github.com/go-sql-driver/mysql/connection.go
index fc4ec75..e570614 100644
--- a/vendor/github.com/go-sql-driver/mysql/connection.go
+++ b/vendor/github.com/go-sql-driver/mysql/connection.go
@@ -9,8 +9,6 @@
package mysql
import (
- "context"
- "database/sql"
"database/sql/driver"
"io"
"net"
@@ -19,6 +17,16 @@ import (
"time"
)
+// a copy of context.Context for Go 1.7 and earlier
+type mysqlContext interface {
+ Done() <-chan struct{}
+ Err() error
+
+ // defined in context.Context, but not used in this driver:
+ // Deadline() (deadline time.Time, ok bool)
+ // Value(key interface{}) interface{}
+}
+
type mysqlConn struct {
buf buffer
netConn net.Conn
@@ -35,7 +43,7 @@ type mysqlConn struct {
// for context support (Go 1.8+)
watching bool
- watcher chan<- context.Context
+ watcher chan<- mysqlContext
closech chan struct{}
finished chan<- struct{}
canceled atomicError // set non-nil if conn is canceled
@@ -182,10 +190,10 @@ func (mc *mysqlConn) interpolateParams(query string, args []driver.Value) (strin
return "", driver.ErrSkip
}
- buf, err := mc.buf.takeCompleteBuffer()
- if err != nil {
+ buf := mc.buf.takeCompleteBuffer()
+ if buf == nil {
// can not take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return "", ErrInvalidConn
}
buf = buf[:0]
@@ -451,193 +459,3 @@ func (mc *mysqlConn) finish() {
case <-mc.closech:
}
}
-
-// Ping implements driver.Pinger interface
-func (mc *mysqlConn) Ping(ctx context.Context) (err error) {
- if mc.closed.IsSet() {
- errLog.Print(ErrInvalidConn)
- return driver.ErrBadConn
- }
-
- if err = mc.watchCancel(ctx); err != nil {
- return
- }
- defer mc.finish()
-
- if err = mc.writeCommandPacket(comPing); err != nil {
- return mc.markBadConn(err)
- }
-
- return mc.readResultOK()
-}
-
-// BeginTx implements driver.ConnBeginTx interface
-func (mc *mysqlConn) BeginTx(ctx context.Context, opts driver.TxOptions) (driver.Tx, error) {
- if err := mc.watchCancel(ctx); err != nil {
- return nil, err
- }
- defer mc.finish()
-
- if sql.IsolationLevel(opts.Isolation) != sql.LevelDefault {
- level, err := mapIsolationLevel(opts.Isolation)
- if err != nil {
- return nil, err
- }
- err = mc.exec("SET TRANSACTION ISOLATION LEVEL " + level)
- if err != nil {
- return nil, err
- }
- }
-
- return mc.begin(opts.ReadOnly)
-}
-
-func (mc *mysqlConn) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) {
- dargs, err := namedValueToValue(args)
- if err != nil {
- return nil, err
- }
-
- if err := mc.watchCancel(ctx); err != nil {
- return nil, err
- }
-
- rows, err := mc.query(query, dargs)
- if err != nil {
- mc.finish()
- return nil, err
- }
- rows.finish = mc.finish
- return rows, err
-}
-
-func (mc *mysqlConn) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) {
- dargs, err := namedValueToValue(args)
- if err != nil {
- return nil, err
- }
-
- if err := mc.watchCancel(ctx); err != nil {
- return nil, err
- }
- defer mc.finish()
-
- return mc.Exec(query, dargs)
-}
-
-func (mc *mysqlConn) PrepareContext(ctx context.Context, query string) (driver.Stmt, error) {
- if err := mc.watchCancel(ctx); err != nil {
- return nil, err
- }
-
- stmt, err := mc.Prepare(query)
- mc.finish()
- if err != nil {
- return nil, err
- }
-
- select {
- default:
- case <-ctx.Done():
- stmt.Close()
- return nil, ctx.Err()
- }
- return stmt, nil
-}
-
-func (stmt *mysqlStmt) QueryContext(ctx context.Context, args []driver.NamedValue) (driver.Rows, error) {
- dargs, err := namedValueToValue(args)
- if err != nil {
- return nil, err
- }
-
- if err := stmt.mc.watchCancel(ctx); err != nil {
- return nil, err
- }
-
- rows, err := stmt.query(dargs)
- if err != nil {
- stmt.mc.finish()
- return nil, err
- }
- rows.finish = stmt.mc.finish
- return rows, err
-}
-
-func (stmt *mysqlStmt) ExecContext(ctx context.Context, args []driver.NamedValue) (driver.Result, error) {
- dargs, err := namedValueToValue(args)
- if err != nil {
- return nil, err
- }
-
- if err := stmt.mc.watchCancel(ctx); err != nil {
- return nil, err
- }
- defer stmt.mc.finish()
-
- return stmt.Exec(dargs)
-}
-
-func (mc *mysqlConn) watchCancel(ctx context.Context) error {
- if mc.watching {
- // Reach here if canceled,
- // so the connection is already invalid
- mc.cleanup()
- return nil
- }
- // When ctx is already cancelled, don't watch it.
- if err := ctx.Err(); err != nil {
- return err
- }
- // When ctx is not cancellable, don't watch it.
- if ctx.Done() == nil {
- return nil
- }
- // When watcher is not alive, can't watch it.
- if mc.watcher == nil {
- return nil
- }
-
- mc.watching = true
- mc.watcher <- ctx
- return nil
-}
-
-func (mc *mysqlConn) startWatcher() {
- watcher := make(chan context.Context, 1)
- mc.watcher = watcher
- finished := make(chan struct{})
- mc.finished = finished
- go func() {
- for {
- var ctx context.Context
- select {
- case ctx = <-watcher:
- case <-mc.closech:
- return
- }
-
- select {
- case <-ctx.Done():
- mc.cancel(ctx.Err())
- case <-finished:
- case <-mc.closech:
- return
- }
- }
- }()
-}
-
-func (mc *mysqlConn) CheckNamedValue(nv *driver.NamedValue) (err error) {
- nv.Value, err = converter{}.ConvertValue(nv.Value)
- return
-}
-
-// ResetSession implements driver.SessionResetter.
-// (From Go 1.10)
-func (mc *mysqlConn) ResetSession(ctx context.Context) error {
- if mc.closed.IsSet() {
- return driver.ErrBadConn
- }
- return nil
-}
diff --git a/vendor/github.com/go-sql-driver/mysql/connection_go18.go b/vendor/github.com/go-sql-driver/mysql/connection_go18.go
new file mode 100644
index 0000000..ce52c7d
--- /dev/null
+++ b/vendor/github.com/go-sql-driver/mysql/connection_go18.go
@@ -0,0 +1,207 @@
+// Go MySQL Driver - A MySQL-Driver for Go's database/sql package
+//
+// Copyright 2012 The Go-MySQL-Driver Authors. All rights reserved.
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this file,
+// You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// +build go1.8
+
+package mysql
+
+import (
+ "context"
+ "database/sql"
+ "database/sql/driver"
+)
+
+// Ping implements driver.Pinger interface
+func (mc *mysqlConn) Ping(ctx context.Context) (err error) {
+ if mc.closed.IsSet() {
+ errLog.Print(ErrInvalidConn)
+ return driver.ErrBadConn
+ }
+
+ if err = mc.watchCancel(ctx); err != nil {
+ return
+ }
+ defer mc.finish()
+
+ if err = mc.writeCommandPacket(comPing); err != nil {
+ return
+ }
+
+ return mc.readResultOK()
+}
+
+// BeginTx implements driver.ConnBeginTx interface
+func (mc *mysqlConn) BeginTx(ctx context.Context, opts driver.TxOptions) (driver.Tx, error) {
+ if err := mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+ defer mc.finish()
+
+ if sql.IsolationLevel(opts.Isolation) != sql.LevelDefault {
+ level, err := mapIsolationLevel(opts.Isolation)
+ if err != nil {
+ return nil, err
+ }
+ err = mc.exec("SET TRANSACTION ISOLATION LEVEL " + level)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return mc.begin(opts.ReadOnly)
+}
+
+func (mc *mysqlConn) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) {
+ dargs, err := namedValueToValue(args)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+
+ rows, err := mc.query(query, dargs)
+ if err != nil {
+ mc.finish()
+ return nil, err
+ }
+ rows.finish = mc.finish
+ return rows, err
+}
+
+func (mc *mysqlConn) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) {
+ dargs, err := namedValueToValue(args)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+ defer mc.finish()
+
+ return mc.Exec(query, dargs)
+}
+
+func (mc *mysqlConn) PrepareContext(ctx context.Context, query string) (driver.Stmt, error) {
+ if err := mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+
+ stmt, err := mc.Prepare(query)
+ mc.finish()
+ if err != nil {
+ return nil, err
+ }
+
+ select {
+ default:
+ case <-ctx.Done():
+ stmt.Close()
+ return nil, ctx.Err()
+ }
+ return stmt, nil
+}
+
+func (stmt *mysqlStmt) QueryContext(ctx context.Context, args []driver.NamedValue) (driver.Rows, error) {
+ dargs, err := namedValueToValue(args)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := stmt.mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+
+ rows, err := stmt.query(dargs)
+ if err != nil {
+ stmt.mc.finish()
+ return nil, err
+ }
+ rows.finish = stmt.mc.finish
+ return rows, err
+}
+
+func (stmt *mysqlStmt) ExecContext(ctx context.Context, args []driver.NamedValue) (driver.Result, error) {
+ dargs, err := namedValueToValue(args)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := stmt.mc.watchCancel(ctx); err != nil {
+ return nil, err
+ }
+ defer stmt.mc.finish()
+
+ return stmt.Exec(dargs)
+}
+
+func (mc *mysqlConn) watchCancel(ctx context.Context) error {
+ if mc.watching {
+ // Reach here if canceled,
+ // so the connection is already invalid
+ mc.cleanup()
+ return nil
+ }
+ // When ctx is already cancelled, don't watch it.
+ if err := ctx.Err(); err != nil {
+ return err
+ }
+ // When ctx is not cancellable, don't watch it.
+ if ctx.Done() == nil {
+ return nil
+ }
+ // When watcher is not alive, can't watch it.
+ if mc.watcher == nil {
+ return nil
+ }
+
+ mc.watching = true
+ mc.watcher <- ctx
+ return nil
+}
+
+func (mc *mysqlConn) startWatcher() {
+ watcher := make(chan mysqlContext, 1)
+ mc.watcher = watcher
+ finished := make(chan struct{})
+ mc.finished = finished
+ go func() {
+ for {
+ var ctx mysqlContext
+ select {
+ case ctx = <-watcher:
+ case <-mc.closech:
+ return
+ }
+
+ select {
+ case <-ctx.Done():
+ mc.cancel(ctx.Err())
+ case <-finished:
+ case <-mc.closech:
+ return
+ }
+ }
+ }()
+}
+
+func (mc *mysqlConn) CheckNamedValue(nv *driver.NamedValue) (err error) {
+ nv.Value, err = converter{}.ConvertValue(nv.Value)
+ return
+}
+
+// ResetSession implements driver.SessionResetter.
+// (From Go 1.10)
+func (mc *mysqlConn) ResetSession(ctx context.Context) error {
+ if mc.closed.IsSet() {
+ return driver.ErrBadConn
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-sql-driver/mysql/driver.go b/vendor/github.com/go-sql-driver/mysql/driver.go
index 9f49670..e9ede2c 100644
--- a/vendor/github.com/go-sql-driver/mysql/driver.go
+++ b/vendor/github.com/go-sql-driver/mysql/driver.go
@@ -23,6 +23,11 @@ import (
"sync"
)
+// watcher interface is used for context support (From Go 1.8)
+type watcher interface {
+ startWatcher()
+}
+
// MySQLDriver is exported to make the driver directly accessible.
// In general the driver is used via the database/sql package.
type MySQLDriver struct{}
@@ -50,7 +55,7 @@ func RegisterDial(net string, dial DialFunc) {
// Open new Connection.
// See https://github.com/go-sql-driver/mysql#dsn-data-source-name for how
-// the DSN string is formatted
+// the DSN string is formated
func (d MySQLDriver) Open(dsn string) (driver.Conn, error) {
var err error
@@ -77,10 +82,6 @@ func (d MySQLDriver) Open(dsn string) (driver.Conn, error) {
mc.netConn, err = nd.Dial(mc.cfg.Net, mc.cfg.Addr)
}
if err != nil {
- if nerr, ok := err.(net.Error); ok && nerr.Temporary() {
- errLog.Print("net.Error from Dial()': ", nerr.Error())
- return nil, driver.ErrBadConn
- }
return nil, err
}
@@ -95,7 +96,9 @@ func (d MySQLDriver) Open(dsn string) (driver.Conn, error) {
}
// Call startWatcher for context support (From Go 1.8)
- mc.startWatcher()
+ if s, ok := interface{}(mc).(watcher); ok {
+ s.startWatcher()
+ }
mc.buf = newBuffer(mc.netConn)
diff --git a/vendor/github.com/go-sql-driver/mysql/packets.go b/vendor/github.com/go-sql-driver/mysql/packets.go
index cfcfff3..9ed6408 100644
--- a/vendor/github.com/go-sql-driver/mysql/packets.go
+++ b/vendor/github.com/go-sql-driver/mysql/packets.go
@@ -51,7 +51,7 @@ func (mc *mysqlConn) readPacket() ([]byte, error) {
mc.sequence++
// packets with length 0 terminate a previous packet which is a
- // multiple of (2^24)-1 bytes long
+ // multiple of (2^24)−1 bytes long
if pktLen == 0 {
// there was no previous packet
if prevData == nil {
@@ -286,10 +286,10 @@ func (mc *mysqlConn) writeHandshakeResponsePacket(authResp []byte, plugin string
}
// Calculate packet length and get buffer with that size
- data, err := mc.buf.takeSmallBuffer(pktLen + 4)
- if err != nil {
+ data := mc.buf.takeSmallBuffer(pktLen + 4)
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -367,10 +367,10 @@ func (mc *mysqlConn) writeHandshakeResponsePacket(authResp []byte, plugin string
// http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchResponse
func (mc *mysqlConn) writeAuthSwitchPacket(authData []byte) error {
pktLen := 4 + len(authData)
- data, err := mc.buf.takeSmallBuffer(pktLen)
- if err != nil {
+ data := mc.buf.takeSmallBuffer(pktLen)
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -387,10 +387,10 @@ func (mc *mysqlConn) writeCommandPacket(command byte) error {
// Reset Packet Sequence
mc.sequence = 0
- data, err := mc.buf.takeSmallBuffer(4 + 1)
- if err != nil {
+ data := mc.buf.takeSmallBuffer(4 + 1)
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -406,10 +406,10 @@ func (mc *mysqlConn) writeCommandPacketStr(command byte, arg string) error {
mc.sequence = 0
pktLen := 1 + len(arg)
- data, err := mc.buf.takeBuffer(pktLen + 4)
- if err != nil {
+ data := mc.buf.takeBuffer(pktLen + 4)
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -427,10 +427,10 @@ func (mc *mysqlConn) writeCommandPacketUint32(command byte, arg uint32) error {
// Reset Packet Sequence
mc.sequence = 0
- data, err := mc.buf.takeSmallBuffer(4 + 1 + 4)
- if err != nil {
+ data := mc.buf.takeSmallBuffer(4 + 1 + 4)
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -883,7 +883,7 @@ func (stmt *mysqlStmt) writeExecutePacket(args []driver.Value) error {
const minPktLen = 4 + 1 + 4 + 1 + 4
mc := stmt.mc
- // Determine threshold dynamically to avoid packet size shortage.
+ // Determine threshould dynamically to avoid packet size shortage.
longDataSize := mc.maxAllowedPacket / (stmt.paramCount + 1)
if longDataSize < 64 {
longDataSize = 64
@@ -893,17 +893,15 @@ func (stmt *mysqlStmt) writeExecutePacket(args []driver.Value) error {
mc.sequence = 0
var data []byte
- var err error
if len(args) == 0 {
- data, err = mc.buf.takeBuffer(minPktLen)
+ data = mc.buf.takeBuffer(minPktLen)
} else {
- data, err = mc.buf.takeCompleteBuffer()
- // In this case the len(data) == cap(data) which is used to optimise the flow below.
+ data = mc.buf.takeCompleteBuffer()
}
- if err != nil {
+ if data == nil {
// cannot take the buffer. Something must be wrong with the connection
- errLog.Print(err)
+ errLog.Print(ErrBusyBuffer)
return errBadConnNoWrite
}
@@ -929,7 +927,7 @@ func (stmt *mysqlStmt) writeExecutePacket(args []driver.Value) error {
pos := minPktLen
var nullMask []byte
- if maskLen, typesLen := (len(args)+7)/8, 1+2*len(args); pos+maskLen+typesLen >= cap(data) {
+ if maskLen, typesLen := (len(args)+7)/8, 1+2*len(args); pos+maskLen+typesLen >= len(data) {
// buffer has to be extended but we don't know by how much so
// we depend on append after all data with known sizes fit.
// We stop at that because we deal with a lot of columns here
@@ -938,11 +936,10 @@ func (stmt *mysqlStmt) writeExecutePacket(args []driver.Value) error {
copy(tmp[:pos], data[:pos])
data = tmp
nullMask = data[pos : pos+maskLen]
- // No need to clean nullMask as make ensures that.
pos += maskLen
} else {
nullMask = data[pos : pos+maskLen]
- for i := range nullMask {
+ for i := 0; i < maskLen; i++ {
nullMask[i] = 0
}
pos += maskLen
@@ -1079,10 +1076,7 @@ func (stmt *mysqlStmt) writeExecutePacket(args []driver.Value) error {
// In that case we must build the data packet with the new values buffer
if valuesCap != cap(paramValues) {
data = append(data[:pos], paramValues...)
- if err = mc.buf.store(data); err != nil {
- errLog.Print(err)
- return errBadConnNoWrite
- }
+ mc.buf.buf = data
}
pos += len(paramValues)
diff --git a/vendor/github.com/go-sql-driver/mysql/utils.go b/vendor/github.com/go-sql-driver/mysql/utils.go
index cb3650b..ca5d47d 100644
--- a/vendor/github.com/go-sql-driver/mysql/utils.go
+++ b/vendor/github.com/go-sql-driver/mysql/utils.go
@@ -10,10 +10,8 @@ package mysql
import (
"crypto/tls"
- "database/sql"
"database/sql/driver"
"encoding/binary"
- "errors"
"fmt"
"io"
"strconv"
@@ -82,7 +80,7 @@ func DeregisterTLSConfig(key string) {
func getTLSConfigClone(key string) (config *tls.Config) {
tlsConfigLock.RLock()
if v, ok := tlsConfigRegistry[key]; ok {
- config = v.Clone()
+ config = cloneTLSConfig(v)
}
tlsConfigLock.RUnlock()
return
@@ -726,30 +724,3 @@ func (ae *atomicError) Value() error {
}
return nil
}
-
-func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {
- dargs := make([]driver.Value, len(named))
- for n, param := range named {
- if len(param.Name) > 0 {
- // TODO: support the use of Named Parameters #561
- return nil, errors.New("mysql: driver does not support the use of Named Parameters")
- }
- dargs[n] = param.Value
- }
- return dargs, nil
-}
-
-func mapIsolationLevel(level driver.IsolationLevel) (string, error) {
- switch sql.IsolationLevel(level) {
- case sql.LevelRepeatableRead:
- return "REPEATABLE READ", nil
- case sql.LevelReadCommitted:
- return "READ COMMITTED", nil
- case sql.LevelReadUncommitted:
- return "READ UNCOMMITTED", nil
- case sql.LevelSerializable:
- return "SERIALIZABLE", nil
- default:
- return "", fmt.Errorf("mysql: unsupported isolation level: %v", level)
- }
-}
diff --git a/vendor/github.com/go-sql-driver/mysql/utils_go17.go b/vendor/github.com/go-sql-driver/mysql/utils_go17.go
new file mode 100644
index 0000000..f595634
--- /dev/null
+++ b/vendor/github.com/go-sql-driver/mysql/utils_go17.go
@@ -0,0 +1,40 @@
+// Go MySQL Driver - A MySQL-Driver for Go's database/sql package
+//
+// Copyright 2017 The Go-MySQL-Driver Authors. All rights reserved.
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this file,
+// You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// +build go1.7
+// +build !go1.8
+
+package mysql
+
+import "crypto/tls"
+
+func cloneTLSConfig(c *tls.Config) *tls.Config {
+ return &tls.Config{
+ Rand: c.Rand,
+ Time: c.Time,
+ Certificates: c.Certificates,
+ NameToCertificate: c.NameToCertificate,
+ GetCertificate: c.GetCertificate,
+ RootCAs: c.RootCAs,
+ NextProtos: c.NextProtos,
+ ServerName: c.ServerName,
+ ClientAuth: c.ClientAuth,
+ ClientCAs: c.ClientCAs,
+ InsecureSkipVerify: c.InsecureSkipVerify,
+ CipherSuites: c.CipherSuites,
+ PreferServerCipherSuites: c.PreferServerCipherSuites,
+ SessionTicketsDisabled: c.SessionTicketsDisabled,
+ SessionTicketKey: c.SessionTicketKey,
+ ClientSessionCache: c.ClientSessionCache,
+ MinVersion: c.MinVersion,
+ MaxVersion: c.MaxVersion,
+ CurvePreferences: c.CurvePreferences,
+ DynamicRecordSizingDisabled: c.DynamicRecordSizingDisabled,
+ Renegotiation: c.Renegotiation,
+ }
+}
diff --git a/vendor/github.com/go-sql-driver/mysql/utils_go18.go b/vendor/github.com/go-sql-driver/mysql/utils_go18.go
new file mode 100644
index 0000000..c35c2a6
--- /dev/null
+++ b/vendor/github.com/go-sql-driver/mysql/utils_go18.go
@@ -0,0 +1,50 @@
+// Go MySQL Driver - A MySQL-Driver for Go's database/sql package
+//
+// Copyright 2017 The Go-MySQL-Driver Authors. All rights reserved.
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this file,
+// You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// +build go1.8
+
+package mysql
+
+import (
+ "crypto/tls"
+ "database/sql"
+ "database/sql/driver"
+ "errors"
+ "fmt"
+)
+
+func cloneTLSConfig(c *tls.Config) *tls.Config {
+ return c.Clone()
+}
+
+func namedValueToValue(named []driver.NamedValue) ([]driver.Value, error) {
+ dargs := make([]driver.Value, len(named))
+ for n, param := range named {
+ if len(param.Name) > 0 {
+ // TODO: support the use of Named Parameters #561
+ return nil, errors.New("mysql: driver does not support the use of Named Parameters")
+ }
+ dargs[n] = param.Value
+ }
+ return dargs, nil
+}
+
+func mapIsolationLevel(level driver.IsolationLevel) (string, error) {
+ switch sql.IsolationLevel(level) {
+ case sql.LevelRepeatableRead:
+ return "REPEATABLE READ", nil
+ case sql.LevelReadCommitted:
+ return "READ COMMITTED", nil
+ case sql.LevelReadUncommitted:
+ return "READ UNCOMMITTED", nil
+ case sql.LevelSerializable:
+ return "SERIALIZABLE", nil
+ default:
+ return "", fmt.Errorf("mysql: unsupported isolation level: %v", level)
+ }
+}
diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go
index 63b0f08..d9aa3c4 100644
--- a/vendor/github.com/golang/protobuf/proto/decode.go
+++ b/vendor/github.com/golang/protobuf/proto/decode.go
@@ -186,6 +186,7 @@ func (p *Buffer) DecodeVarint() (x uint64, err error) {
if b&0x80 == 0 {
goto done
}
+ // x -= 0x80 << 63 // Always zero.
return 0, errOverflow
diff --git a/vendor/github.com/golang/protobuf/proto/equal.go b/vendor/github.com/golang/protobuf/proto/equal.go
index f9b6e41..d4db5a1 100644
--- a/vendor/github.com/golang/protobuf/proto/equal.go
+++ b/vendor/github.com/golang/protobuf/proto/equal.go
@@ -246,8 +246,7 @@ func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool {
return false
}
- m1 := extensionAsLegacyType(e1.value)
- m2 := extensionAsLegacyType(e2.value)
+ m1, m2 := e1.value, e2.value
if m1 == nil && m2 == nil {
// Both have only encoded form.
diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go
index fa88add..816a3b9 100644
--- a/vendor/github.com/golang/protobuf/proto/extensions.go
+++ b/vendor/github.com/golang/protobuf/proto/extensions.go
@@ -185,25 +185,9 @@ type Extension struct {
// extension will have only enc set. When such an extension is
// accessed using GetExtension (or GetExtensions) desc and value
// will be set.
- desc *ExtensionDesc
-
- // value is a concrete value for the extension field. Let the type of
- // desc.ExtensionType be the "API type" and the type of Extension.value
- // be the "storage type". The API type and storage type are the same except:
- // * For scalars (except []byte), the API type uses *T,
- // while the storage type uses T.
- // * For repeated fields, the API type uses []T, while the storage type
- // uses *[]T.
- //
- // The reason for the divergence is so that the storage type more naturally
- // matches what is expected of when retrieving the values through the
- // protobuf reflection APIs.
- //
- // The value may only be populated if desc is also populated.
+ desc *ExtensionDesc
value interface{}
-
- // enc is the raw bytes for the extension field.
- enc []byte
+ enc []byte
}
// SetRawExtension is for testing only.
@@ -350,7 +334,7 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) {
// descriptors with the same field number.
return nil, errors.New("proto: descriptor conflict")
}
- return extensionAsLegacyType(e.value), nil
+ return e.value, nil
}
if extension.ExtensionType == nil {
@@ -365,11 +349,11 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) {
// Remember the decoded version and drop the encoded version.
// That way it is safe to mutate what we return.
- e.value = extensionAsStorageType(v)
+ e.value = v
e.desc = extension
e.enc = nil
emap[extension.Field] = e
- return extensionAsLegacyType(e.value), nil
+ return e.value, nil
}
// defaultExtensionValue returns the default value for extension.
@@ -504,7 +488,7 @@ func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error
}
typ := reflect.TypeOf(extension.ExtensionType)
if typ != reflect.TypeOf(value) {
- return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", value, extension.ExtensionType)
+ return errors.New("proto: bad extension value type")
}
// nil extension values need to be caught early, because the
// encoder can't distinguish an ErrNil due to a nil extension
@@ -516,7 +500,7 @@ func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error
}
extmap := epb.extensionsWrite()
- extmap[extension.Field] = Extension{desc: extension, value: extensionAsStorageType(value)}
+ extmap[extension.Field] = Extension{desc: extension, value: value}
return nil
}
@@ -557,51 +541,3 @@ func RegisterExtension(desc *ExtensionDesc) {
func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc {
return extensionMaps[reflect.TypeOf(pb).Elem()]
}
-
-// extensionAsLegacyType converts an value in the storage type as the API type.
-// See Extension.value.
-func extensionAsLegacyType(v interface{}) interface{} {
- switch rv := reflect.ValueOf(v); rv.Kind() {
- case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String:
- // Represent primitive types as a pointer to the value.
- rv2 := reflect.New(rv.Type())
- rv2.Elem().Set(rv)
- v = rv2.Interface()
- case reflect.Ptr:
- // Represent slice types as the value itself.
- switch rv.Type().Elem().Kind() {
- case reflect.Slice:
- if rv.IsNil() {
- v = reflect.Zero(rv.Type().Elem()).Interface()
- } else {
- v = rv.Elem().Interface()
- }
- }
- }
- return v
-}
-
-// extensionAsStorageType converts an value in the API type as the storage type.
-// See Extension.value.
-func extensionAsStorageType(v interface{}) interface{} {
- switch rv := reflect.ValueOf(v); rv.Kind() {
- case reflect.Ptr:
- // Represent slice types as the value itself.
- switch rv.Type().Elem().Kind() {
- case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String:
- if rv.IsNil() {
- v = reflect.Zero(rv.Type().Elem()).Interface()
- } else {
- v = rv.Elem().Interface()
- }
- }
- case reflect.Slice:
- // Represent slice types as a pointer to the value.
- if rv.Type().Elem().Kind() != reflect.Uint8 {
- rv2 := reflect.New(rv.Type())
- rv2.Elem().Set(rv)
- v = rv2.Interface()
- }
- }
- return v
-}
diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go
index fdd328b..75565cc 100644
--- a/vendor/github.com/golang/protobuf/proto/lib.go
+++ b/vendor/github.com/golang/protobuf/proto/lib.go
@@ -341,6 +341,26 @@ type Message interface {
ProtoMessage()
}
+// Stats records allocation details about the protocol buffer encoders
+// and decoders. Useful for tuning the library itself.
+type Stats struct {
+ Emalloc uint64 // mallocs in encode
+ Dmalloc uint64 // mallocs in decode
+ Encode uint64 // number of encodes
+ Decode uint64 // number of decodes
+ Chit uint64 // number of cache hits
+ Cmiss uint64 // number of cache misses
+ Size uint64 // number of sizes
+}
+
+// Set to true to enable stats collection.
+const collectStats = false
+
+var stats Stats
+
+// GetStats returns a copy of the global Stats structure.
+func GetStats() Stats { return stats }
+
// A Buffer is a buffer manager for marshaling and unmarshaling
// protocol buffers. It may be reused between invocations to
// reduce memory usage. It is not necessary to use a Buffer;
@@ -940,19 +960,13 @@ func isProto3Zero(v reflect.Value) bool {
return false
}
-const (
- // ProtoPackageIsVersion3 is referenced from generated protocol buffer files
- // to assert that that code is compatible with this version of the proto package.
- ProtoPackageIsVersion3 = true
+// ProtoPackageIsVersion2 is referenced from generated protocol buffer files
+// to assert that that code is compatible with this version of the proto package.
+const ProtoPackageIsVersion2 = true
- // ProtoPackageIsVersion2 is referenced from generated protocol buffer files
- // to assert that that code is compatible with this version of the proto package.
- ProtoPackageIsVersion2 = true
-
- // ProtoPackageIsVersion1 is referenced from generated protocol buffer files
- // to assert that that code is compatible with this version of the proto package.
- ProtoPackageIsVersion1 = true
-)
+// ProtoPackageIsVersion1 is referenced from generated protocol buffer files
+// to assert that that code is compatible with this version of the proto package.
+const ProtoPackageIsVersion1 = true
// InternalMessageInfo is a type used internally by generated .pb.go files.
// This type is not intended to be used by non-generated code.
diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go
index f48a756..3b6ca41 100644
--- a/vendor/github.com/golang/protobuf/proto/message_set.go
+++ b/vendor/github.com/golang/protobuf/proto/message_set.go
@@ -36,7 +36,13 @@ package proto
*/
import (
+ "bytes"
+ "encoding/json"
"errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "sync"
)
// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID.
@@ -139,9 +145,46 @@ func skipVarint(buf []byte) []byte {
return buf[i+1:]
}
-// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
+// MarshalMessageSet encodes the extension map represented by m in the message set wire format.
+// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option.
+func MarshalMessageSet(exts interface{}) ([]byte, error) {
+ return marshalMessageSet(exts, false)
+}
+
+// marshaMessageSet implements above function, with the opt to turn on / off deterministic during Marshal.
+func marshalMessageSet(exts interface{}, deterministic bool) ([]byte, error) {
+ switch exts := exts.(type) {
+ case *XXX_InternalExtensions:
+ var u marshalInfo
+ siz := u.sizeMessageSet(exts)
+ b := make([]byte, 0, siz)
+ return u.appendMessageSet(b, exts, deterministic)
+
+ case map[int32]Extension:
+ // This is an old-style extension map.
+ // Wrap it in a new-style XXX_InternalExtensions.
+ ie := XXX_InternalExtensions{
+ p: &struct {
+ mu sync.Mutex
+ extensionMap map[int32]Extension
+ }{
+ extensionMap: exts,
+ },
+ }
+
+ var u marshalInfo
+ siz := u.sizeMessageSet(&ie)
+ b := make([]byte, 0, siz)
+ return u.appendMessageSet(b, &ie, deterministic)
+
+ default:
+ return nil, errors.New("proto: not an extension map")
+ }
+}
+
+// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option.
-func unmarshalMessageSet(buf []byte, exts interface{}) error {
+func UnmarshalMessageSet(buf []byte, exts interface{}) error {
var m map[int32]Extension
switch exts := exts.(type) {
case *XXX_InternalExtensions:
@@ -179,3 +222,93 @@ func unmarshalMessageSet(buf []byte, exts interface{}) error {
}
return nil
}
+
+// MarshalMessageSetJSON encodes the extension map represented by m in JSON format.
+// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
+func MarshalMessageSetJSON(exts interface{}) ([]byte, error) {
+ var m map[int32]Extension
+ switch exts := exts.(type) {
+ case *XXX_InternalExtensions:
+ var mu sync.Locker
+ m, mu = exts.extensionsRead()
+ if m != nil {
+ // Keep the extensions map locked until we're done marshaling to prevent
+ // races between marshaling and unmarshaling the lazily-{en,de}coded
+ // values.
+ mu.Lock()
+ defer mu.Unlock()
+ }
+ case map[int32]Extension:
+ m = exts
+ default:
+ return nil, errors.New("proto: not an extension map")
+ }
+ var b bytes.Buffer
+ b.WriteByte('{')
+
+ // Process the map in key order for deterministic output.
+ ids := make([]int32, 0, len(m))
+ for id := range m {
+ ids = append(ids, id)
+ }
+ sort.Sort(int32Slice(ids)) // int32Slice defined in text.go
+
+ for i, id := range ids {
+ ext := m[id]
+ msd, ok := messageSetMap[id]
+ if !ok {
+ // Unknown type; we can't render it, so skip it.
+ continue
+ }
+
+ if i > 0 && b.Len() > 1 {
+ b.WriteByte(',')
+ }
+
+ fmt.Fprintf(&b, `"[%s]":`, msd.name)
+
+ x := ext.value
+ if x == nil {
+ x = reflect.New(msd.t.Elem()).Interface()
+ if err := Unmarshal(ext.enc, x.(Message)); err != nil {
+ return nil, err
+ }
+ }
+ d, err := json.Marshal(x)
+ if err != nil {
+ return nil, err
+ }
+ b.Write(d)
+ }
+ b.WriteByte('}')
+ return b.Bytes(), nil
+}
+
+// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format.
+// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
+func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error {
+ // Common-case fast path.
+ if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) {
+ return nil
+ }
+
+ // This is fairly tricky, and it's not clear that it is needed.
+ return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented")
+}
+
+// A global registry of types that can be used in a MessageSet.
+
+var messageSetMap = make(map[int32]messageSetDesc)
+
+type messageSetDesc struct {
+ t reflect.Type // pointer to struct
+ name string
+}
+
+// RegisterMessageSetType is called from the generated code.
+func RegisterMessageSetType(m Message, fieldNum int32, name string) {
+ messageSetMap[fieldNum] = messageSetDesc{
+ t: reflect.TypeOf(m),
+ name: name,
+ }
+}
diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go
index 94fa919..b6cad90 100644
--- a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go
+++ b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go
@@ -79,13 +79,10 @@ func toPointer(i *Message) pointer {
// toAddrPointer converts an interface to a pointer that points to
// the interface data.
-func toAddrPointer(i *interface{}, isptr, deref bool) pointer {
+func toAddrPointer(i *interface{}, isptr bool) pointer {
v := reflect.ValueOf(*i)
u := reflect.New(v.Type())
u.Elem().Set(v)
- if deref {
- u = u.Elem()
- }
return pointer{v: u}
}
diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go
index dbfffe0..d55a335 100644
--- a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go
+++ b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go
@@ -85,21 +85,16 @@ func toPointer(i *Message) pointer {
// toAddrPointer converts an interface to a pointer that points to
// the interface data.
-func toAddrPointer(i *interface{}, isptr, deref bool) (p pointer) {
+func toAddrPointer(i *interface{}, isptr bool) pointer {
// Super-tricky - read or get the address of data word of interface value.
if isptr {
// The interface is of pointer type, thus it is a direct interface.
// The data word is the pointer data itself. We take its address.
- p = pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)}
- } else {
- // The interface is not of pointer type. The data word is the pointer
- // to the data.
- p = pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]}
+ return pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)}
}
- if deref {
- p.p = *(*unsafe.Pointer)(p.p)
- }
- return p
+ // The interface is not of pointer type. The data word is the pointer
+ // to the data.
+ return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]}
}
// valToPointer converts v to a pointer. v must be of pointer type.
diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go
index 79668ff..50b99b8 100644
--- a/vendor/github.com/golang/protobuf/proto/properties.go
+++ b/vendor/github.com/golang/protobuf/proto/properties.go
@@ -334,6 +334,9 @@ func GetProperties(t reflect.Type) *StructProperties {
sprop, ok := propertiesMap[t]
propertiesMu.RUnlock()
if ok {
+ if collectStats {
+ stats.Chit++
+ }
return sprop
}
@@ -343,20 +346,17 @@ func GetProperties(t reflect.Type) *StructProperties {
return sprop
}
-type (
- oneofFuncsIface interface {
- XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
- }
- oneofWrappersIface interface {
- XXX_OneofWrappers() []interface{}
- }
-)
-
// getPropertiesLocked requires that propertiesMu is held.
func getPropertiesLocked(t reflect.Type) *StructProperties {
if prop, ok := propertiesMap[t]; ok {
+ if collectStats {
+ stats.Chit++
+ }
return prop
}
+ if collectStats {
+ stats.Cmiss++
+ }
prop := new(StructProperties)
// in case of recursive protos, fill this in now.
@@ -391,14 +391,13 @@ func getPropertiesLocked(t reflect.Type) *StructProperties {
// Re-order prop.order.
sort.Sort(prop)
- var oots []interface{}
- switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
- case oneofFuncsIface:
- _, _, _, oots = m.XXX_OneofFuncs()
- case oneofWrappersIface:
- oots = m.XXX_OneofWrappers()
+ type oneofMessage interface {
+ XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
}
- if len(oots) > 0 {
+ if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok {
+ var oots []interface{}
+ _, _, _, oots = om.XXX_OneofFuncs()
+
// Interpret oneof metadata.
prop.OneofTypes = make(map[string]*OneofProperties)
for _, oot := range oots {
diff --git a/vendor/github.com/golang/protobuf/proto/table_marshal.go b/vendor/github.com/golang/protobuf/proto/table_marshal.go
index 5cb11fa..b167944 100644
--- a/vendor/github.com/golang/protobuf/proto/table_marshal.go
+++ b/vendor/github.com/golang/protobuf/proto/table_marshal.go
@@ -87,7 +87,6 @@ type marshalElemInfo struct {
sizer sizer
marshaler marshaler
isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only)
- deref bool // dereference the pointer before operating on it; implies isptr
}
var (
@@ -321,11 +320,8 @@ func (u *marshalInfo) computeMarshalInfo() {
// get oneof implementers
var oneofImplementers []interface{}
- switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
- case oneofFuncsIface:
+ if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok {
_, _, _, oneofImplementers = m.XXX_OneofFuncs()
- case oneofWrappersIface:
- oneofImplementers = m.XXX_OneofWrappers()
}
n := t.NumField()
@@ -411,22 +407,13 @@ func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo {
panic("tag is not an integer")
}
wt := wiretype(tags[0])
- if t.Kind() == reflect.Ptr && t.Elem().Kind() != reflect.Struct {
- t = t.Elem()
- }
sizer, marshaler := typeMarshaler(t, tags, false, false)
- var deref bool
- if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 {
- t = reflect.PtrTo(t)
- deref = true
- }
e = &marshalElemInfo{
wiretag: uint64(tag)<<3 | wt,
tagsize: SizeVarint(uint64(tag) << 3),
sizer: sizer,
marshaler: marshaler,
isptr: t.Kind() == reflect.Ptr,
- deref: deref,
}
// update cache
@@ -461,7 +448,7 @@ func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) {
func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) {
fi.field = toField(f)
- fi.wiretag = math.MaxInt32 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire.
+ fi.wiretag = 1<<31 - 1 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire.
fi.isPointer = true
fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f)
fi.oneofElems = make(map[reflect.Type]*marshalElemInfo)
@@ -489,6 +476,10 @@ func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofI
}
}
+type oneofMessage interface {
+ XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
+}
+
// wiretype returns the wire encoding of the type.
func wiretype(encoding string) uint64 {
switch encoding {
@@ -2319,8 +2310,8 @@ func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) {
for _, k := range m.MapKeys() {
ki := k.Interface()
vi := m.MapIndex(k).Interface()
- kaddr := toAddrPointer(&ki, false, false) // pointer to key
- vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value
+ kaddr := toAddrPointer(&ki, false) // pointer to key
+ vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value
siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1)
n += siz + SizeVarint(uint64(siz)) + tagsize
}
@@ -2338,8 +2329,8 @@ func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) {
for _, k := range keys {
ki := k.Interface()
vi := m.MapIndex(k).Interface()
- kaddr := toAddrPointer(&ki, false, false) // pointer to key
- vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value
+ kaddr := toAddrPointer(&ki, false) // pointer to key
+ vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value
b = appendVarint(b, tag)
siz := keySizer(kaddr, 1) + valCachedSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1)
b = appendVarint(b, uint64(siz))
@@ -2408,7 +2399,7 @@ func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int {
// the last time this function was called.
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
n += ei.sizer(p, ei.tagsize)
}
mu.Unlock()
@@ -2443,7 +2434,7 @@ func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, de
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
b, err = ei.marshaler(b, p, ei.wiretag, deterministic)
if !nerr.Merge(err) {
return b, err
@@ -2474,7 +2465,7 @@ func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, de
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
b, err = ei.marshaler(b, p, ei.wiretag, deterministic)
if !nerr.Merge(err) {
return b, err
@@ -2519,7 +2510,7 @@ func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int {
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
n += ei.sizer(p, 1) // message, tag = 3 (size=1)
}
mu.Unlock()
@@ -2562,7 +2553,7 @@ func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, de
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic)
if !nerr.Merge(err) {
return b, err
@@ -2600,7 +2591,7 @@ func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, de
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic)
b = append(b, 1<<3|WireEndGroup)
if !nerr.Merge(err) {
@@ -2630,7 +2621,7 @@ func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int {
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
n += ei.sizer(p, ei.tagsize)
}
return n
@@ -2665,7 +2656,7 @@ func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, determ
ei := u.getExtElemInfo(e.desc)
v := e.value
- p := toAddrPointer(&v, ei.isptr, ei.deref)
+ p := toAddrPointer(&v, ei.isptr)
b, err = ei.marshaler(b, p, ei.wiretag, deterministic)
if !nerr.Merge(err) {
return b, err
diff --git a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go
index acee2fc..ebf1caa 100644
--- a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go
+++ b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go
@@ -136,7 +136,7 @@ func (u *unmarshalInfo) unmarshal(m pointer, b []byte) error {
u.computeUnmarshalInfo()
}
if u.isMessageSet {
- return unmarshalMessageSet(b, m.offset(u.extensions).toExtensions())
+ return UnmarshalMessageSet(b, m.offset(u.extensions).toExtensions())
}
var reqMask uint64 // bitmask of required fields we've seen.
var errLater error
@@ -362,48 +362,46 @@ func (u *unmarshalInfo) computeUnmarshalInfo() {
}
// Find any types associated with oneof fields.
- var oneofImplementers []interface{}
- switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
- case oneofFuncsIface:
- _, _, _, oneofImplementers = m.XXX_OneofFuncs()
- case oneofWrappersIface:
- oneofImplementers = m.XXX_OneofWrappers()
- }
- for _, v := range oneofImplementers {
- tptr := reflect.TypeOf(v) // *Msg_X
- typ := tptr.Elem() // Msg_X
+ // TODO: XXX_OneofFuncs returns more info than we need. Get rid of some of it?
+ fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("XXX_OneofFuncs")
+ if fn.IsValid() {
+ res := fn.Call(nil)[3] // last return value from XXX_OneofFuncs: []interface{}
+ for i := res.Len() - 1; i >= 0; i-- {
+ v := res.Index(i) // interface{}
+ tptr := reflect.ValueOf(v.Interface()).Type() // *Msg_X
+ typ := tptr.Elem() // Msg_X
- f := typ.Field(0) // oneof implementers have one field
- baseUnmarshal := fieldUnmarshaler(&f)
- tags := strings.Split(f.Tag.Get("protobuf"), ",")
- fieldNum, err := strconv.Atoi(tags[1])
- if err != nil {
- panic("protobuf tag field not an integer: " + tags[1])
- }
- var name string
- for _, tag := range tags {
- if strings.HasPrefix(tag, "name=") {
- name = strings.TrimPrefix(tag, "name=")
- break
+ f := typ.Field(0) // oneof implementers have one field
+ baseUnmarshal := fieldUnmarshaler(&f)
+ tags := strings.Split(f.Tag.Get("protobuf"), ",")
+ fieldNum, err := strconv.Atoi(tags[1])
+ if err != nil {
+ panic("protobuf tag field not an integer: " + tags[1])
+ }
+ var name string
+ for _, tag := range tags {
+ if strings.HasPrefix(tag, "name=") {
+ name = strings.TrimPrefix(tag, "name=")
+ break
+ }
+ }
+
+ // Find the oneof field that this struct implements.
+ // Might take O(n^2) to process all of the oneofs, but who cares.
+ for _, of := range oneofFields {
+ if tptr.Implements(of.ityp) {
+ // We have found the corresponding interface for this struct.
+ // That lets us know where this struct should be stored
+ // when we encounter it during unmarshaling.
+ unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal)
+ u.setTag(fieldNum, of.field, unmarshal, 0, name)
+ }
}
}
-
- // Find the oneof field that this struct implements.
- // Might take O(n^2) to process all of the oneofs, but who cares.
- for _, of := range oneofFields {
- if tptr.Implements(of.ityp) {
- // We have found the corresponding interface for this struct.
- // That lets us know where this struct should be stored
- // when we encounter it during unmarshaling.
- unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal)
- u.setTag(fieldNum, of.field, unmarshal, 0, name)
- }
- }
-
}
// Get extension ranges, if any.
- fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray")
+ fn = reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray")
if fn.IsValid() {
if !u.extensions.IsValid() && !u.oldExtensions.IsValid() {
panic("a message with extensions, but no extensions field in " + t.Name())
@@ -1950,7 +1948,7 @@ func encodeVarint(b []byte, x uint64) []byte {
// If there is an error, it returns 0,0.
func decodeVarint(b []byte) (uint64, int) {
var x, y uint64
- if len(b) == 0 {
+ if len(b) <= 0 {
goto bad
}
x = uint64(b[0])
diff --git a/vendor/github.com/gorilla/sessions/README.md b/vendor/github.com/gorilla/sessions/README.md
index 22b13d9..98c993d 100644
--- a/vendor/github.com/gorilla/sessions/README.md
+++ b/vendor/github.com/gorilla/sessions/README.md
@@ -31,7 +31,7 @@ Let's start with an example that shows the sessions API in a nutshell:
// environmental variable, or flag (or both), and don't accidentally commit it
// alongside your code. Ensure your key is sufficiently random - i.e. use Go's
// crypto/rand or securecookie.GenerateRandomKey(32) and persist the result.
- var store = sessions.NewCookieStore(os.Getenv("SESSION_KEY"))
+ var store = sessions.NewCookieStore([]byte(os.Getenv("SESSION_KEY")))
func MyHandler(w http.ResponseWriter, r *http.Request) {
// Get a session. We're ignoring the error resulted from decoding an
@@ -51,18 +51,6 @@ secret key used to authenticate the session. Inside the handler, we call
some session values in session.Values, which is a `map[interface{}]interface{}`.
And finally we call `session.Save()` to save the session in the response.
-Important Note: If you aren't using gorilla/mux, you need to wrap your handlers
-with
-[`context.ClearHandler`](https://www.gorillatoolkit.org/pkg/context#ClearHandler)
-or else you will leak memory! An easy way to do this is to wrap the top-level
-mux when calling http.ListenAndServe:
-
-```go
- http.ListenAndServe(":8080", context.ClearHandler(http.DefaultServeMux))
-```
-
-The ClearHandler function is provided by the gorilla/context package.
-
More examples are available [on the Gorilla
website](https://www.gorillatoolkit.org/pkg/sessions).
diff --git a/vendor/github.com/gorilla/sessions/doc.go b/vendor/github.com/gorilla/sessions/doc.go
index 7db6729..64f858c 100644
--- a/vendor/github.com/gorilla/sessions/doc.go
+++ b/vendor/github.com/gorilla/sessions/doc.go
@@ -59,14 +59,6 @@ session.Save(r, w), and either display an error message or otherwise handle it.
Save must be called before writing to the response, otherwise the session
cookie will not be sent to the client.
-Important Note: If you aren't using gorilla/mux, you need to wrap your handlers
-with context.ClearHandler as or else you will leak memory! An easy way to do this
-is to wrap the top-level mux when calling http.ListenAndServe:
-
- http.ListenAndServe(":8080", context.ClearHandler(http.DefaultServeMux))
-
-The ClearHandler function is provided by the gorilla/context package.
-
That's all you need to know for the basic usage. Let's take a look at other
options, starting with flash messages.
diff --git a/vendor/github.com/gorilla/sessions/sessions.go b/vendor/github.com/gorilla/sessions/sessions.go
index a821d31..c052b28 100644
--- a/vendor/github.com/gorilla/sessions/sessions.go
+++ b/vendor/github.com/gorilla/sessions/sessions.go
@@ -5,12 +5,11 @@
package sessions
import (
+ "context"
"encoding/gob"
"fmt"
"net/http"
"time"
-
- "github.com/gorilla/context"
)
// Default flashes key.
@@ -108,7 +107,8 @@ const registryKey contextKey = 0
// GetRegistry returns a registry instance for the current request.
func GetRegistry(r *http.Request) *Registry {
- registry := context.Get(r, registryKey)
+ var ctx = r.Context()
+ registry := ctx.Value(registryKey)
if registry != nil {
return registry.(*Registry)
}
@@ -116,7 +116,7 @@ func GetRegistry(r *http.Request) *Registry {
request: r,
sessions: make(map[string]sessionInfo),
}
- context.Set(r, registryKey, newRegistry)
+ *r = *r.WithContext(context.WithValue(ctx, registryKey, newRegistry))
return newRegistry
}
diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock
new file mode 100644
index 0000000..c8a9fbb
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.lock
@@ -0,0 +1,21 @@
+# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
+
+
+[[projects]]
+ name = "github.com/modern-go/concurrent"
+ packages = ["."]
+ revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
+ version = "1.0.0"
+
+[[projects]]
+ name = "github.com/modern-go/reflect2"
+ packages = ["."]
+ revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
+ version = "1.0.1"
+
+[solve-meta]
+ analyzer-name = "dep"
+ analyzer-version = 1
+ inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
+ solver-name = "gps-cdcl"
+ solver-version = 1
diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml
new file mode 100644
index 0000000..313a0f8
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.toml
@@ -0,0 +1,26 @@
+# Gopkg.toml example
+#
+# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
+# for detailed Gopkg.toml documentation.
+#
+# required = ["github.com/user/thing/cmd/thing"]
+# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
+#
+# [[constraint]]
+# name = "github.com/user/project"
+# version = "1.0.0"
+#
+# [[constraint]]
+# name = "github.com/user/project2"
+# branch = "dev"
+# source = "github.com/myfork/project2"
+#
+# [[override]]
+# name = "github.com/x/y"
+# version = "2.4.0"
+
+ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
+
+[[constraint]]
+ name = "github.com/modern-go/reflect2"
+ version = "1.0.1"
diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE
new file mode 100644
index 0000000..2cf4f5a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 json-iterator
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md
new file mode 100644
index 0000000..54d5afe
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/README.md
@@ -0,0 +1,91 @@
+[](https://sourcegraph.com/github.com/json-iterator/go?badge)
+[](http://godoc.org/github.com/json-iterator/go)
+[](https://travis-ci.org/json-iterator/go)
+[](https://codecov.io/gh/json-iterator/go)
+[](https://goreportcard.com/report/github.com/json-iterator/go)
+[](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
+[](https://gitter.im/json-iterator/Lobby)
+
+A high-performance 100% compatible drop-in replacement of "encoding/json"
+
+You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
+
+```
+Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
+```
+
+# Benchmark
+
+
+
+Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
+
+Raw Result (easyjson requires static code generation)
+
+| | ns/op | allocation bytes | allocation times |
+| --- | --- | --- | --- |
+| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
+| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
+| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
+| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
+| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
+| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
+
+Always benchmark with your own workload.
+The result depends heavily on the data input.
+
+# Usage
+
+100% compatibility with standard lib
+
+Replace
+
+```go
+import "encoding/json"
+json.Marshal(&data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Marshal(&data)
+```
+
+Replace
+
+```go
+import "encoding/json"
+json.Unmarshal(input, &data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Unmarshal(input, &data)
+```
+
+[More documentation](http://jsoniter.com/migrate-from-go-std.html)
+
+# How to get
+
+```
+go get github.com/json-iterator/go
+```
+
+# Contribution Welcomed !
+
+Contributors
+
+* [thockin](https://github.com/thockin)
+* [mattn](https://github.com/mattn)
+* [cch123](https://github.com/cch123)
+* [Oleg Shaldybin](https://github.com/olegshaldybin)
+* [Jason Toffaletti](https://github.com/toffaletti)
+
+Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
diff --git a/vendor/github.com/json-iterator/go/adapter.go b/vendor/github.com/json-iterator/go/adapter.go
new file mode 100644
index 0000000..e674d0f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/adapter.go
@@ -0,0 +1,150 @@
+package jsoniter
+
+import (
+ "bytes"
+ "io"
+)
+
+// RawMessage to make replace json with jsoniter
+type RawMessage []byte
+
+// Unmarshal adapts to json/encoding Unmarshal API
+//
+// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
+// Refer to https://godoc.org/encoding/json#Unmarshal for more information
+func Unmarshal(data []byte, v interface{}) error {
+ return ConfigDefault.Unmarshal(data, v)
+}
+
+// UnmarshalFromString convenient method to read from string instead of []byte
+func UnmarshalFromString(str string, v interface{}) error {
+ return ConfigDefault.UnmarshalFromString(str, v)
+}
+
+// Get quick method to get value from deeply nested JSON structure
+func Get(data []byte, path ...interface{}) Any {
+ return ConfigDefault.Get(data, path...)
+}
+
+// Marshal adapts to json/encoding Marshal API
+//
+// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
+// Refer to https://godoc.org/encoding/json#Marshal for more information
+func Marshal(v interface{}) ([]byte, error) {
+ return ConfigDefault.Marshal(v)
+}
+
+// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ return ConfigDefault.MarshalIndent(v, prefix, indent)
+}
+
+// MarshalToString convenient method to write as string instead of []byte
+func MarshalToString(v interface{}) (string, error) {
+ return ConfigDefault.MarshalToString(v)
+}
+
+// NewDecoder adapts to json/stream NewDecoder API.
+//
+// NewDecoder returns a new decoder that reads from r.
+//
+// Instead of a json/encoding Decoder, an Decoder is returned
+// Refer to https://godoc.org/encoding/json#NewDecoder for more information
+func NewDecoder(reader io.Reader) *Decoder {
+ return ConfigDefault.NewDecoder(reader)
+}
+
+// Decoder reads and decodes JSON values from an input stream.
+// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
+type Decoder struct {
+ iter *Iterator
+}
+
+// Decode decode JSON into interface{}
+func (adapter *Decoder) Decode(obj interface{}) error {
+ if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
+ if !adapter.iter.loadMore() {
+ return io.EOF
+ }
+ }
+ adapter.iter.ReadVal(obj)
+ err := adapter.iter.Error
+ if err == io.EOF {
+ return nil
+ }
+ return adapter.iter.Error
+}
+
+// More is there more?
+func (adapter *Decoder) More() bool {
+ iter := adapter.iter
+ if iter.Error != nil {
+ return false
+ }
+ c := iter.nextToken()
+ if c == 0 {
+ return false
+ }
+ iter.unreadByte()
+ return c != ']' && c != '}'
+}
+
+// Buffered remaining buffer
+func (adapter *Decoder) Buffered() io.Reader {
+ remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
+ return bytes.NewReader(remaining)
+}
+
+// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
+// Number instead of as a float64.
+func (adapter *Decoder) UseNumber() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.UseNumber = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// DisallowUnknownFields causes the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func (adapter *Decoder) DisallowUnknownFields() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.DisallowUnknownFields = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// NewEncoder same as json.NewEncoder
+func NewEncoder(writer io.Writer) *Encoder {
+ return ConfigDefault.NewEncoder(writer)
+}
+
+// Encoder same as json.Encoder
+type Encoder struct {
+ stream *Stream
+}
+
+// Encode encode interface{} as JSON to io.Writer
+func (adapter *Encoder) Encode(val interface{}) error {
+ adapter.stream.WriteVal(val)
+ adapter.stream.WriteRaw("\n")
+ adapter.stream.Flush()
+ return adapter.stream.Error
+}
+
+// SetIndent set the indention. Prefix is not supported
+func (adapter *Encoder) SetIndent(prefix, indent string) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.IndentionStep = len(indent)
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// SetEscapeHTML escape html by default, set to false to disable
+func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.EscapeHTML = escapeHTML
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func Valid(data []byte) bool {
+ return ConfigDefault.Valid(data)
+}
diff --git a/vendor/github.com/json-iterator/go/any.go b/vendor/github.com/json-iterator/go/any.go
new file mode 100644
index 0000000..daecfed
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any.go
@@ -0,0 +1,321 @@
+package jsoniter
+
+import (
+ "errors"
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "strconv"
+ "unsafe"
+)
+
+// Any generic object representation.
+// The lazy json implementation holds []byte and parse lazily.
+type Any interface {
+ LastError() error
+ ValueType() ValueType
+ MustBeValid() Any
+ ToBool() bool
+ ToInt() int
+ ToInt32() int32
+ ToInt64() int64
+ ToUint() uint
+ ToUint32() uint32
+ ToUint64() uint64
+ ToFloat32() float32
+ ToFloat64() float64
+ ToString() string
+ ToVal(val interface{})
+ Get(path ...interface{}) Any
+ Size() int
+ Keys() []string
+ GetInterface() interface{}
+ WriteTo(stream *Stream)
+}
+
+type baseAny struct{}
+
+func (any *baseAny) Get(path ...interface{}) Any {
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *baseAny) Size() int {
+ return 0
+}
+
+func (any *baseAny) Keys() []string {
+ return []string{}
+}
+
+func (any *baseAny) ToVal(obj interface{}) {
+ panic("not implemented")
+}
+
+// WrapInt32 turn int32 into Any interface
+func WrapInt32(val int32) Any {
+ return &int32Any{baseAny{}, val}
+}
+
+// WrapInt64 turn int64 into Any interface
+func WrapInt64(val int64) Any {
+ return &int64Any{baseAny{}, val}
+}
+
+// WrapUint32 turn uint32 into Any interface
+func WrapUint32(val uint32) Any {
+ return &uint32Any{baseAny{}, val}
+}
+
+// WrapUint64 turn uint64 into Any interface
+func WrapUint64(val uint64) Any {
+ return &uint64Any{baseAny{}, val}
+}
+
+// WrapFloat64 turn float64 into Any interface
+func WrapFloat64(val float64) Any {
+ return &floatAny{baseAny{}, val}
+}
+
+// WrapString turn string into Any interface
+func WrapString(val string) Any {
+ return &stringAny{baseAny{}, val}
+}
+
+// Wrap turn a go object into Any interface
+func Wrap(val interface{}) Any {
+ if val == nil {
+ return &nilAny{}
+ }
+ asAny, isAny := val.(Any)
+ if isAny {
+ return asAny
+ }
+ typ := reflect2.TypeOf(val)
+ switch typ.Kind() {
+ case reflect.Slice:
+ return wrapArray(val)
+ case reflect.Struct:
+ return wrapStruct(val)
+ case reflect.Map:
+ return wrapMap(val)
+ case reflect.String:
+ return WrapString(val.(string))
+ case reflect.Int:
+ if strconv.IntSize == 32 {
+ return WrapInt32(int32(val.(int)))
+ }
+ return WrapInt64(int64(val.(int)))
+ case reflect.Int8:
+ return WrapInt32(int32(val.(int8)))
+ case reflect.Int16:
+ return WrapInt32(int32(val.(int16)))
+ case reflect.Int32:
+ return WrapInt32(val.(int32))
+ case reflect.Int64:
+ return WrapInt64(val.(int64))
+ case reflect.Uint:
+ if strconv.IntSize == 32 {
+ return WrapUint32(uint32(val.(uint)))
+ }
+ return WrapUint64(uint64(val.(uint)))
+ case reflect.Uintptr:
+ if ptrSize == 32 {
+ return WrapUint32(uint32(val.(uintptr)))
+ }
+ return WrapUint64(uint64(val.(uintptr)))
+ case reflect.Uint8:
+ return WrapUint32(uint32(val.(uint8)))
+ case reflect.Uint16:
+ return WrapUint32(uint32(val.(uint16)))
+ case reflect.Uint32:
+ return WrapUint32(uint32(val.(uint32)))
+ case reflect.Uint64:
+ return WrapUint64(val.(uint64))
+ case reflect.Float32:
+ return WrapFloat64(float64(val.(float32)))
+ case reflect.Float64:
+ return WrapFloat64(val.(float64))
+ case reflect.Bool:
+ if val.(bool) == true {
+ return &trueAny{}
+ }
+ return &falseAny{}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
+}
+
+// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
+func (iter *Iterator) ReadAny() Any {
+ return iter.readAny()
+}
+
+func (iter *Iterator) readAny() Any {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.unreadByte()
+ return &stringAny{baseAny{}, iter.ReadString()}
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return &nilAny{}
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ return &trueAny{}
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ return &falseAny{}
+ case '{':
+ return iter.readObjectAny()
+ case '[':
+ return iter.readArrayAny()
+ case '-':
+ return iter.readNumberAny(false)
+ case 0:
+ return &invalidAny{baseAny{}, errors.New("input is empty")}
+ default:
+ return iter.readNumberAny(true)
+ }
+}
+
+func (iter *Iterator) readNumberAny(positive bool) Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipNumber()
+ lazyBuf := iter.stopCapture()
+ return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readObjectAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipObject()
+ lazyBuf := iter.stopCapture()
+ return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readArrayAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipArray()
+ lazyBuf := iter.stopCapture()
+ return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func locateObjectField(iter *Iterator, target string) []byte {
+ var found []byte
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ if field == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ return true
+ })
+ return found
+}
+
+func locateArrayElement(iter *Iterator, target int) []byte {
+ var found []byte
+ n := 0
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ if n == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ n++
+ return true
+ })
+ return found
+}
+
+func locatePath(iter *Iterator, path []interface{}) Any {
+ for i, pathKeyObj := range path {
+ switch pathKey := pathKeyObj.(type) {
+ case string:
+ valueBytes := locateObjectField(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int:
+ valueBytes := locateArrayElement(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int32:
+ if '*' == pathKey {
+ return iter.readAny().Get(path[i:]...)
+ }
+ return newInvalidAny(path[i:])
+ default:
+ return newInvalidAny(path[i:])
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return &invalidAny{baseAny{}, iter.Error}
+ }
+ return iter.readAny()
+}
+
+var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
+
+func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+type anyCodec struct {
+ valType reflect2.Type
+}
+
+func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ panic("not implemented")
+}
+
+func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ any.WriteTo(stream)
+}
+
+func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ return any.Size() == 0
+}
+
+type directAnyCodec struct {
+}
+
+func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *(*Any)(ptr) = iter.readAny()
+}
+
+func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ any := *(*Any)(ptr)
+ any.WriteTo(stream)
+}
+
+func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ any := *(*Any)(ptr)
+ return any.Size() == 0
+}
diff --git a/vendor/github.com/json-iterator/go/any_array.go b/vendor/github.com/json-iterator/go/any_array.go
new file mode 100644
index 0000000..0449e9a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_array.go
@@ -0,0 +1,278 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type arrayLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *arrayLazyAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *arrayLazyAny) ToBool() bool {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.ReadArray()
+}
+
+func (any *arrayLazyAny) ToInt() int {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt32() int32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt64() int64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint() uint {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint32() uint32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint64() uint64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat32() float32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat64() float64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *arrayLazyAny) ToVal(val interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(val)
+}
+
+func (any *arrayLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateArrayElement(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ arr := make([]Any, 0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ found := iter.readAny().Get(path[1:]...)
+ if found.ValueType() != InvalidValue {
+ arr = append(arr, found)
+ }
+ return true
+ })
+ return wrapArray(arr)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ size++
+ iter.Skip()
+ return true
+ })
+ return size
+}
+
+func (any *arrayLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *arrayLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type arrayAny struct {
+ baseAny
+ val reflect.Value
+}
+
+func wrapArray(val interface{}) *arrayAny {
+ return &arrayAny{baseAny{}, reflect.ValueOf(val)}
+}
+
+func (any *arrayAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayAny) LastError() error {
+ return nil
+}
+
+func (any *arrayAny) ToBool() bool {
+ return any.val.Len() != 0
+}
+
+func (any *arrayAny) ToInt() int {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt32() int32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt64() int64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint() uint {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint32() uint32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint64() uint64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat32() float32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat64() float64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToString() string {
+ str, _ := MarshalToString(any.val.Interface())
+ return str
+}
+
+func (any *arrayAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ if firstPath < 0 || firstPath >= any.val.Len() {
+ return newInvalidAny(path)
+ }
+ return Wrap(any.val.Index(firstPath).Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := make([]Any, 0)
+ for i := 0; i < any.val.Len(); i++ {
+ mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll = append(mappedAll, mapped)
+ }
+ }
+ return wrapArray(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *arrayAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *arrayAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_bool.go b/vendor/github.com/json-iterator/go/any_bool.go
new file mode 100644
index 0000000..9452324
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_bool.go
@@ -0,0 +1,137 @@
+package jsoniter
+
+type trueAny struct {
+ baseAny
+}
+
+func (any *trueAny) LastError() error {
+ return nil
+}
+
+func (any *trueAny) ToBool() bool {
+ return true
+}
+
+func (any *trueAny) ToInt() int {
+ return 1
+}
+
+func (any *trueAny) ToInt32() int32 {
+ return 1
+}
+
+func (any *trueAny) ToInt64() int64 {
+ return 1
+}
+
+func (any *trueAny) ToUint() uint {
+ return 1
+}
+
+func (any *trueAny) ToUint32() uint32 {
+ return 1
+}
+
+func (any *trueAny) ToUint64() uint64 {
+ return 1
+}
+
+func (any *trueAny) ToFloat32() float32 {
+ return 1
+}
+
+func (any *trueAny) ToFloat64() float64 {
+ return 1
+}
+
+func (any *trueAny) ToString() string {
+ return "true"
+}
+
+func (any *trueAny) WriteTo(stream *Stream) {
+ stream.WriteTrue()
+}
+
+func (any *trueAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *trueAny) GetInterface() interface{} {
+ return true
+}
+
+func (any *trueAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *trueAny) MustBeValid() Any {
+ return any
+}
+
+type falseAny struct {
+ baseAny
+}
+
+func (any *falseAny) LastError() error {
+ return nil
+}
+
+func (any *falseAny) ToBool() bool {
+ return false
+}
+
+func (any *falseAny) ToInt() int {
+ return 0
+}
+
+func (any *falseAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *falseAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *falseAny) ToUint() uint {
+ return 0
+}
+
+func (any *falseAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *falseAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *falseAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *falseAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *falseAny) ToString() string {
+ return "false"
+}
+
+func (any *falseAny) WriteTo(stream *Stream) {
+ stream.WriteFalse()
+}
+
+func (any *falseAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *falseAny) GetInterface() interface{} {
+ return false
+}
+
+func (any *falseAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *falseAny) MustBeValid() Any {
+ return any
+}
diff --git a/vendor/github.com/json-iterator/go/any_float.go b/vendor/github.com/json-iterator/go/any_float.go
new file mode 100644
index 0000000..35fdb09
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_float.go
@@ -0,0 +1,83 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type floatAny struct {
+ baseAny
+ val float64
+}
+
+func (any *floatAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *floatAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *floatAny) MustBeValid() Any {
+ return any
+}
+
+func (any *floatAny) LastError() error {
+ return nil
+}
+
+func (any *floatAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *floatAny) ToInt() int {
+ return int(any.val)
+}
+
+func (any *floatAny) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *floatAny) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *floatAny) ToUint() uint {
+ if any.val > 0 {
+ return uint(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint32() uint32 {
+ if any.val > 0 {
+ return uint32(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint64() uint64 {
+ if any.val > 0 {
+ return uint64(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *floatAny) ToFloat64() float64 {
+ return any.val
+}
+
+func (any *floatAny) ToString() string {
+ return strconv.FormatFloat(any.val, 'E', -1, 64)
+}
+
+func (any *floatAny) WriteTo(stream *Stream) {
+ stream.WriteFloat64(any.val)
+}
+
+func (any *floatAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int32.go b/vendor/github.com/json-iterator/go/any_int32.go
new file mode 100644
index 0000000..1b56f39
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int32Any struct {
+ baseAny
+ val int32
+}
+
+func (any *int32Any) LastError() error {
+ return nil
+}
+
+func (any *int32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int32Any) ToInt32() int32 {
+ return any.val
+}
+
+func (any *int32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *int32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int32Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *int32Any) WriteTo(stream *Stream) {
+ stream.WriteInt32(any.val)
+}
+
+func (any *int32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int64.go b/vendor/github.com/json-iterator/go/any_int64.go
new file mode 100644
index 0000000..c440d72
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int64Any struct {
+ baseAny
+ val int64
+}
+
+func (any *int64Any) LastError() error {
+ return nil
+}
+
+func (any *int64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *int64Any) ToInt64() int64 {
+ return any.val
+}
+
+func (any *int64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int64Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int64Any) ToString() string {
+ return strconv.FormatInt(any.val, 10)
+}
+
+func (any *int64Any) WriteTo(stream *Stream) {
+ stream.WriteInt64(any.val)
+}
+
+func (any *int64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_invalid.go b/vendor/github.com/json-iterator/go/any_invalid.go
new file mode 100644
index 0000000..1d859ea
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_invalid.go
@@ -0,0 +1,82 @@
+package jsoniter
+
+import "fmt"
+
+type invalidAny struct {
+ baseAny
+ err error
+}
+
+func newInvalidAny(path []interface{}) *invalidAny {
+ return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
+}
+
+func (any *invalidAny) LastError() error {
+ return any.err
+}
+
+func (any *invalidAny) ValueType() ValueType {
+ return InvalidValue
+}
+
+func (any *invalidAny) MustBeValid() Any {
+ panic(any.err)
+}
+
+func (any *invalidAny) ToBool() bool {
+ return false
+}
+
+func (any *invalidAny) ToInt() int {
+ return 0
+}
+
+func (any *invalidAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *invalidAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *invalidAny) ToUint() uint {
+ return 0
+}
+
+func (any *invalidAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *invalidAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *invalidAny) ToString() string {
+ return ""
+}
+
+func (any *invalidAny) WriteTo(stream *Stream) {
+}
+
+func (any *invalidAny) Get(path ...interface{}) Any {
+ if any.err == nil {
+ return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
+}
+
+func (any *invalidAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *invalidAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_nil.go b/vendor/github.com/json-iterator/go/any_nil.go
new file mode 100644
index 0000000..d04cb54
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_nil.go
@@ -0,0 +1,69 @@
+package jsoniter
+
+type nilAny struct {
+ baseAny
+}
+
+func (any *nilAny) LastError() error {
+ return nil
+}
+
+func (any *nilAny) ValueType() ValueType {
+ return NilValue
+}
+
+func (any *nilAny) MustBeValid() Any {
+ return any
+}
+
+func (any *nilAny) ToBool() bool {
+ return false
+}
+
+func (any *nilAny) ToInt() int {
+ return 0
+}
+
+func (any *nilAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *nilAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *nilAny) ToUint() uint {
+ return 0
+}
+
+func (any *nilAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *nilAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *nilAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *nilAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *nilAny) ToString() string {
+ return ""
+}
+
+func (any *nilAny) WriteTo(stream *Stream) {
+ stream.WriteNil()
+}
+
+func (any *nilAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *nilAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_number.go b/vendor/github.com/json-iterator/go/any_number.go
new file mode 100644
index 0000000..9d1e901
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_number.go
@@ -0,0 +1,123 @@
+package jsoniter
+
+import (
+ "io"
+ "unsafe"
+)
+
+type numberLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *numberLazyAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *numberLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *numberLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *numberLazyAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *numberLazyAny) ToInt() int {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt32() int32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt64() int64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint() uint {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint32() uint32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint64() uint64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat32() float32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat64() float64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *numberLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *numberLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
diff --git a/vendor/github.com/json-iterator/go/any_object.go b/vendor/github.com/json-iterator/go/any_object.go
new file mode 100644
index 0000000..c44ef5c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_object.go
@@ -0,0 +1,374 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type objectLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *objectLazyAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *objectLazyAny) ToBool() bool {
+ return true
+}
+
+func (any *objectLazyAny) ToInt() int {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *objectLazyAny) ToVal(obj interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(obj)
+}
+
+func (any *objectLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateObjectField(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ mapped := locatePath(iter, path[1:])
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[field] = mapped
+ }
+ return true
+ })
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectLazyAny) Keys() []string {
+ keys := []string{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ keys = append(keys, field)
+ return true
+ })
+ return keys
+}
+
+func (any *objectLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ size++
+ return true
+ })
+ return size
+}
+
+func (any *objectLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *objectLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type objectAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapStruct(val interface{}) *objectAny {
+ return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *objectAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *objectAny) LastError() error {
+ return any.err
+}
+
+func (any *objectAny) ToBool() bool {
+ return any.val.NumField() != 0
+}
+
+func (any *objectAny) ToInt() int {
+ return 0
+}
+
+func (any *objectAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *objectAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ field := any.val.FieldByName(firstPath)
+ if !field.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(field.Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for i := 0; i < any.val.NumField(); i++ {
+ field := any.val.Field(i)
+ if field.CanInterface() {
+ mapped := Wrap(field.Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[any.val.Type().Field(i).Name] = mapped
+ }
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectAny) Keys() []string {
+ keys := make([]string, 0, any.val.NumField())
+ for i := 0; i < any.val.NumField(); i++ {
+ keys = append(keys, any.val.Type().Field(i).Name)
+ }
+ return keys
+}
+
+func (any *objectAny) Size() int {
+ return any.val.NumField()
+}
+
+func (any *objectAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *objectAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
+
+type mapAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapMap(val interface{}) *mapAny {
+ return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *mapAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *mapAny) MustBeValid() Any {
+ return any
+}
+
+func (any *mapAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *mapAny) LastError() error {
+ return any.err
+}
+
+func (any *mapAny) ToBool() bool {
+ return true
+}
+
+func (any *mapAny) ToInt() int {
+ return 0
+}
+
+func (any *mapAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *mapAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *mapAny) ToUint() uint {
+ return 0
+}
+
+func (any *mapAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *mapAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *mapAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *mapAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *mapAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *mapAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for _, key := range any.val.MapKeys() {
+ keyAsStr := key.String()
+ element := Wrap(any.val.MapIndex(key).Interface())
+ mapped := element.Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[keyAsStr] = mapped
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ value := any.val.MapIndex(reflect.ValueOf(firstPath))
+ if !value.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(value.Interface())
+ }
+}
+
+func (any *mapAny) Keys() []string {
+ keys := make([]string, 0, any.val.Len())
+ for _, key := range any.val.MapKeys() {
+ keys = append(keys, key.String())
+ }
+ return keys
+}
+
+func (any *mapAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *mapAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *mapAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_str.go b/vendor/github.com/json-iterator/go/any_str.go
new file mode 100644
index 0000000..a4b93c7
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_str.go
@@ -0,0 +1,166 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strconv"
+)
+
+type stringAny struct {
+ baseAny
+ val string
+}
+
+func (any *stringAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *stringAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *stringAny) ValueType() ValueType {
+ return StringValue
+}
+
+func (any *stringAny) MustBeValid() Any {
+ return any
+}
+
+func (any *stringAny) LastError() error {
+ return nil
+}
+
+func (any *stringAny) ToBool() bool {
+ str := any.ToString()
+ if str == "0" {
+ return false
+ }
+ for _, c := range str {
+ switch c {
+ case ' ', '\n', '\r', '\t':
+ default:
+ return true
+ }
+ }
+ return false
+}
+
+func (any *stringAny) ToInt() int {
+ return int(any.ToInt64())
+
+}
+
+func (any *stringAny) ToInt32() int32 {
+ return int32(any.ToInt64())
+}
+
+func (any *stringAny) ToInt64() int64 {
+ if any.val == "" {
+ return 0
+ }
+
+ flag := 1
+ startPos := 0
+ endPos := 0
+ if any.val[0] == '+' || any.val[0] == '-' {
+ startPos = 1
+ }
+
+ if any.val[0] == '-' {
+ flag = -1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
+ return int64(flag) * parsed
+}
+
+func (any *stringAny) ToUint() uint {
+ return uint(any.ToUint64())
+}
+
+func (any *stringAny) ToUint32() uint32 {
+ return uint32(any.ToUint64())
+}
+
+func (any *stringAny) ToUint64() uint64 {
+ if any.val == "" {
+ return 0
+ }
+
+ startPos := 0
+ endPos := 0
+
+ if any.val[0] == '-' {
+ return 0
+ }
+ if any.val[0] == '+' {
+ startPos = 1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
+ return parsed
+}
+
+func (any *stringAny) ToFloat32() float32 {
+ return float32(any.ToFloat64())
+}
+
+func (any *stringAny) ToFloat64() float64 {
+ if len(any.val) == 0 {
+ return 0
+ }
+
+ // first char invalid
+ if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
+ return 0
+ }
+
+ // extract valid num expression from string
+ // eg 123true => 123, -12.12xxa => -12.12
+ endPos := 1
+ for i := 1; i < len(any.val); i++ {
+ if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
+ endPos = i + 1
+ continue
+ }
+
+ // end position is the first char which is not digit
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ endPos = i
+ break
+ }
+ }
+ parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
+ return parsed
+}
+
+func (any *stringAny) ToString() string {
+ return any.val
+}
+
+func (any *stringAny) WriteTo(stream *Stream) {
+ stream.WriteString(any.val)
+}
+
+func (any *stringAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint32.go b/vendor/github.com/json-iterator/go/any_uint32.go
new file mode 100644
index 0000000..656bbd3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint32Any struct {
+ baseAny
+ val uint32
+}
+
+func (any *uint32Any) LastError() error {
+ return nil
+}
+
+func (any *uint32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint32Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint32Any) ToUint32() uint32 {
+ return any.val
+}
+
+func (any *uint32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *uint32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *uint32Any) WriteTo(stream *Stream) {
+ stream.WriteUint32(any.val)
+}
+
+func (any *uint32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint64.go b/vendor/github.com/json-iterator/go/any_uint64.go
new file mode 100644
index 0000000..7df2fce
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint64Any struct {
+ baseAny
+ val uint64
+}
+
+func (any *uint64Any) LastError() error {
+ return nil
+}
+
+func (any *uint64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint64Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *uint64Any) ToUint64() uint64 {
+ return any.val
+}
+
+func (any *uint64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint64Any) ToString() string {
+ return strconv.FormatUint(any.val, 10)
+}
+
+func (any *uint64Any) WriteTo(stream *Stream) {
+ stream.WriteUint64(any.val)
+}
+
+func (any *uint64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh
new file mode 100644
index 0000000..b45ef68
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/build.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -e
+set -x
+
+if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
+ mkdir -p /tmp/build-golang/src/github.com/json-iterator
+ ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
+fi
+export GOPATH=/tmp/build-golang
+go get -u github.com/golang/dep/cmd/dep
+cd /tmp/build-golang/src/github.com/json-iterator/go
+exec $GOPATH/bin/dep ensure -update
diff --git a/vendor/github.com/json-iterator/go/config.go b/vendor/github.com/json-iterator/go/config.go
new file mode 100644
index 0000000..8c58fcb
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/config.go
@@ -0,0 +1,375 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "reflect"
+ "sync"
+ "unsafe"
+
+ "github.com/modern-go/concurrent"
+ "github.com/modern-go/reflect2"
+)
+
+// Config customize how the API should behave.
+// The API is created from Config by Froze.
+type Config struct {
+ IndentionStep int
+ MarshalFloatWith6Digits bool
+ EscapeHTML bool
+ SortMapKeys bool
+ UseNumber bool
+ DisallowUnknownFields bool
+ TagKey string
+ OnlyTaggedField bool
+ ValidateJsonRawMessage bool
+ ObjectFieldMustBeSimpleString bool
+ CaseSensitive bool
+}
+
+// API the public interface of this package.
+// Primary Marshal and Unmarshal.
+type API interface {
+ IteratorPool
+ StreamPool
+ MarshalToString(v interface{}) (string, error)
+ Marshal(v interface{}) ([]byte, error)
+ MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
+ UnmarshalFromString(str string, v interface{}) error
+ Unmarshal(data []byte, v interface{}) error
+ Get(data []byte, path ...interface{}) Any
+ NewEncoder(writer io.Writer) *Encoder
+ NewDecoder(reader io.Reader) *Decoder
+ Valid(data []byte) bool
+ RegisterExtension(extension Extension)
+ DecoderOf(typ reflect2.Type) ValDecoder
+ EncoderOf(typ reflect2.Type) ValEncoder
+}
+
+// ConfigDefault the default API
+var ConfigDefault = Config{
+ EscapeHTML: true,
+}.Froze()
+
+// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
+var ConfigCompatibleWithStandardLibrary = Config{
+ EscapeHTML: true,
+ SortMapKeys: true,
+ ValidateJsonRawMessage: true,
+}.Froze()
+
+// ConfigFastest marshals float with only 6 digits precision
+var ConfigFastest = Config{
+ EscapeHTML: false,
+ MarshalFloatWith6Digits: true, // will lose precession
+ ObjectFieldMustBeSimpleString: true, // do not unescape object field
+}.Froze()
+
+type frozenConfig struct {
+ configBeforeFrozen Config
+ sortMapKeys bool
+ indentionStep int
+ objectFieldMustBeSimpleString bool
+ onlyTaggedField bool
+ disallowUnknownFields bool
+ decoderCache *concurrent.Map
+ encoderCache *concurrent.Map
+ encoderExtension Extension
+ decoderExtension Extension
+ extraExtensions []Extension
+ streamPool *sync.Pool
+ iteratorPool *sync.Pool
+ caseSensitive bool
+}
+
+func (cfg *frozenConfig) initCache() {
+ cfg.decoderCache = concurrent.NewMap()
+ cfg.encoderCache = concurrent.NewMap()
+}
+
+func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
+ cfg.decoderCache.Store(cacheKey, decoder)
+}
+
+func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
+ cfg.encoderCache.Store(cacheKey, encoder)
+}
+
+func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
+ decoder, found := cfg.decoderCache.Load(cacheKey)
+ if found {
+ return decoder.(ValDecoder)
+ }
+ return nil
+}
+
+func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
+ encoder, found := cfg.encoderCache.Load(cacheKey)
+ if found {
+ return encoder.(ValEncoder)
+ }
+ return nil
+}
+
+var cfgCache = concurrent.NewMap()
+
+func getFrozenConfigFromCache(cfg Config) *frozenConfig {
+ obj, found := cfgCache.Load(cfg)
+ if found {
+ return obj.(*frozenConfig)
+ }
+ return nil
+}
+
+func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
+ cfgCache.Store(cfg, frozenConfig)
+}
+
+// Froze forge API from config
+func (cfg Config) Froze() API {
+ api := &frozenConfig{
+ sortMapKeys: cfg.SortMapKeys,
+ indentionStep: cfg.IndentionStep,
+ objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
+ onlyTaggedField: cfg.OnlyTaggedField,
+ disallowUnknownFields: cfg.DisallowUnknownFields,
+ caseSensitive: cfg.CaseSensitive,
+ }
+ api.streamPool = &sync.Pool{
+ New: func() interface{} {
+ return NewStream(api, nil, 512)
+ },
+ }
+ api.iteratorPool = &sync.Pool{
+ New: func() interface{} {
+ return NewIterator(api)
+ },
+ }
+ api.initCache()
+ encoderExtension := EncoderExtension{}
+ decoderExtension := DecoderExtension{}
+ if cfg.MarshalFloatWith6Digits {
+ api.marshalFloatWith6Digits(encoderExtension)
+ }
+ if cfg.EscapeHTML {
+ api.escapeHTML(encoderExtension)
+ }
+ if cfg.UseNumber {
+ api.useNumber(decoderExtension)
+ }
+ if cfg.ValidateJsonRawMessage {
+ api.validateJsonRawMessage(encoderExtension)
+ }
+ api.encoderExtension = encoderExtension
+ api.decoderExtension = decoderExtension
+ api.configBeforeFrozen = cfg
+ return api
+}
+
+func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
+ api := getFrozenConfigFromCache(cfg)
+ if api != nil {
+ return api
+ }
+ api = cfg.Froze().(*frozenConfig)
+ for _, extension := range extraExtensions {
+ api.RegisterExtension(extension)
+ }
+ addFrozenConfigToCache(cfg, api)
+ return api
+}
+
+func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
+ encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+ rawMessage := *(*json.RawMessage)(ptr)
+ iter := cfg.BorrowIterator([]byte(rawMessage))
+ iter.Read()
+ if iter.Error != nil {
+ stream.WriteRaw("null")
+ } else {
+ cfg.ReturnIterator(iter)
+ stream.WriteRaw(string(rawMessage))
+ }
+ }, func(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+ }}
+ extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
+ extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
+}
+
+func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
+ extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
+ exitingValue := *((*interface{})(ptr))
+ if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
+ iter.ReadVal(exitingValue)
+ return
+ }
+ if iter.WhatIsNext() == NumberValue {
+ *((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
+ } else {
+ *((*interface{})(ptr)) = iter.Read()
+ }
+ }}
+}
+func (cfg *frozenConfig) getTagKey() string {
+ tagKey := cfg.configBeforeFrozen.TagKey
+ if tagKey == "" {
+ return "json"
+ }
+ return tagKey
+}
+
+func (cfg *frozenConfig) RegisterExtension(extension Extension) {
+ cfg.extraExtensions = append(cfg.extraExtensions, extension)
+ copied := cfg.configBeforeFrozen
+ cfg.configBeforeFrozen = copied
+}
+
+type lossyFloat32Encoder struct {
+}
+
+func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32Lossy(*((*float32)(ptr)))
+}
+
+func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type lossyFloat64Encoder struct {
+}
+
+func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64Lossy(*((*float64)(ptr)))
+}
+
+func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+// EnableLossyFloatMarshalling keeps 10**(-6) precision
+// for float variables for better performance.
+func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
+ // for better performance
+ extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
+ extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
+}
+
+type htmlEscapedStringEncoder struct {
+}
+
+func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteStringWithHTMLEscaped(str)
+}
+
+func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
+ encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
+}
+
+func (cfg *frozenConfig) cleanDecoders() {
+ typeDecoders = map[string]ValDecoder{}
+ fieldDecoders = map[string]ValDecoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) cleanEncoders() {
+ typeEncoders = map[string]ValEncoder{}
+ fieldEncoders = map[string]ValEncoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return "", stream.Error
+ }
+ return string(stream.Buffer()), nil
+}
+
+func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return nil, stream.Error
+ }
+ result := stream.Buffer()
+ copied := make([]byte, len(result))
+ copy(copied, result)
+ return copied, nil
+}
+
+func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ if prefix != "" {
+ panic("prefix is not supported")
+ }
+ for _, r := range indent {
+ if r != ' ' {
+ panic("indent can only be space")
+ }
+ }
+ newCfg := cfg.configBeforeFrozen
+ newCfg.IndentionStep = len(indent)
+ return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
+}
+
+func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
+ data := []byte(str)
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ return locatePath(iter, path)
+}
+
+func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
+ stream := NewStream(cfg, writer, 512)
+ return &Encoder{stream}
+}
+
+func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
+ iter := Parse(cfg, reader, 512)
+ return &Decoder{iter}
+}
+
+func (cfg *frozenConfig) Valid(data []byte) bool {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.Skip()
+ return iter.Error == nil
+}
diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
new file mode 100644
index 0000000..3095662
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
@@ -0,0 +1,7 @@
+| json type \ dest type | bool | int | uint | float |string|
+| --- | --- | --- | --- |--|--|
+| number | positive => true
negative => true
zero => false| 23.2 => 23
-32.1 => -32| 12.1 => 12
-12.1 => 0|as normal|same as origin|
+| string | empty string => false
string "0" => false
other strings => true | "123.32" => 123
"-123.4" => -123
"123.23xxxw" => 123
"abcde12" => 0
"-32.1" => -32| 13.2 => 13
-1.1 => 0 |12.1 => 12.1
-12.3 => -12.3
12.4xxa => 12.4
+1.1e2 =>110 |same as origin|
+| bool | true => true
false => false| true => 1
false => 0 | true => 1
false => 0 |true => 1
false => 0|true => "true"
false => "false"|
+| object | true | 0 | 0 |0|originnal json|
+| array | empty array => false
nonempty array => true| [] => 0
[1,2] => 1 | [] => 0
[1,2] => 1 |[] => 0
[1,2] => 1|original json|
\ No newline at end of file
diff --git a/vendor/github.com/json-iterator/go/iter.go b/vendor/github.com/json-iterator/go/iter.go
new file mode 100644
index 0000000..95ae54f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter.go
@@ -0,0 +1,322 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+// ValueType the type for JSON element
+type ValueType int
+
+const (
+ // InvalidValue invalid JSON element
+ InvalidValue ValueType = iota
+ // StringValue JSON element "string"
+ StringValue
+ // NumberValue JSON element 100 or 0.10
+ NumberValue
+ // NilValue JSON element null
+ NilValue
+ // BoolValue JSON element true or false
+ BoolValue
+ // ArrayValue JSON element []
+ ArrayValue
+ // ObjectValue JSON element {}
+ ObjectValue
+)
+
+var hexDigits []byte
+var valueTypes []ValueType
+
+func init() {
+ hexDigits = make([]byte, 256)
+ for i := 0; i < len(hexDigits); i++ {
+ hexDigits[i] = 255
+ }
+ for i := '0'; i <= '9'; i++ {
+ hexDigits[i] = byte(i - '0')
+ }
+ for i := 'a'; i <= 'f'; i++ {
+ hexDigits[i] = byte((i - 'a') + 10)
+ }
+ for i := 'A'; i <= 'F'; i++ {
+ hexDigits[i] = byte((i - 'A') + 10)
+ }
+ valueTypes = make([]ValueType, 256)
+ for i := 0; i < len(valueTypes); i++ {
+ valueTypes[i] = InvalidValue
+ }
+ valueTypes['"'] = StringValue
+ valueTypes['-'] = NumberValue
+ valueTypes['0'] = NumberValue
+ valueTypes['1'] = NumberValue
+ valueTypes['2'] = NumberValue
+ valueTypes['3'] = NumberValue
+ valueTypes['4'] = NumberValue
+ valueTypes['5'] = NumberValue
+ valueTypes['6'] = NumberValue
+ valueTypes['7'] = NumberValue
+ valueTypes['8'] = NumberValue
+ valueTypes['9'] = NumberValue
+ valueTypes['t'] = BoolValue
+ valueTypes['f'] = BoolValue
+ valueTypes['n'] = NilValue
+ valueTypes['['] = ArrayValue
+ valueTypes['{'] = ObjectValue
+}
+
+// Iterator is a io.Reader like object, with JSON specific read functions.
+// Error is not returned as return value, but stored as Error member on this iterator instance.
+type Iterator struct {
+ cfg *frozenConfig
+ reader io.Reader
+ buf []byte
+ head int
+ tail int
+ captureStartedAt int
+ captured []byte
+ Error error
+ Attachment interface{} // open for customized decoder
+}
+
+// NewIterator creates an empty Iterator instance
+func NewIterator(cfg API) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: nil,
+ head: 0,
+ tail: 0,
+ }
+}
+
+// Parse creates an Iterator instance from io.Reader
+func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: reader,
+ buf: make([]byte, bufSize),
+ head: 0,
+ tail: 0,
+ }
+}
+
+// ParseBytes creates an Iterator instance from byte array
+func ParseBytes(cfg API, input []byte) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: input,
+ head: 0,
+ tail: len(input),
+ }
+}
+
+// ParseString creates an Iterator instance from string
+func ParseString(cfg API, input string) *Iterator {
+ return ParseBytes(cfg, []byte(input))
+}
+
+// Pool returns a pool can provide more iterator with same configuration
+func (iter *Iterator) Pool() IteratorPool {
+ return iter.cfg
+}
+
+// Reset reuse iterator instance by specifying another reader
+func (iter *Iterator) Reset(reader io.Reader) *Iterator {
+ iter.reader = reader
+ iter.head = 0
+ iter.tail = 0
+ return iter
+}
+
+// ResetBytes reuse iterator instance by specifying another byte array as input
+func (iter *Iterator) ResetBytes(input []byte) *Iterator {
+ iter.reader = nil
+ iter.buf = input
+ iter.head = 0
+ iter.tail = len(input)
+ return iter
+}
+
+// WhatIsNext gets ValueType of relatively next json element
+func (iter *Iterator) WhatIsNext() ValueType {
+ valueType := valueTypes[iter.nextToken()]
+ iter.unreadByte()
+ return valueType
+}
+
+func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i
+ return false
+ }
+ return true
+}
+
+func (iter *Iterator) isObjectEnd() bool {
+ c := iter.nextToken()
+ if c == ',' {
+ return false
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
+ return true
+}
+
+func (iter *Iterator) nextToken() byte {
+ // a variation of skip whitespaces, returning the next non-whitespace token
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i + 1
+ return c
+ }
+ if !iter.loadMore() {
+ return 0
+ }
+ }
+}
+
+// ReportError record a error in iterator instance with current position.
+func (iter *Iterator) ReportError(operation string, msg string) {
+ if iter.Error != nil {
+ if iter.Error != io.EOF {
+ return
+ }
+ }
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ peekEnd := iter.head + 10
+ if peekEnd > iter.tail {
+ peekEnd = iter.tail
+ }
+ parsing := string(iter.buf[peekStart:peekEnd])
+ contextStart := iter.head - 50
+ if contextStart < 0 {
+ contextStart = 0
+ }
+ contextEnd := iter.head + 50
+ if contextEnd > iter.tail {
+ contextEnd = iter.tail
+ }
+ context := string(iter.buf[contextStart:contextEnd])
+ iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
+ operation, msg, iter.head-peekStart, parsing, context)
+}
+
+// CurrentBuffer gets current buffer as string for debugging purpose
+func (iter *Iterator) CurrentBuffer() string {
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
+ string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
+}
+
+func (iter *Iterator) readByte() (ret byte) {
+ if iter.head == iter.tail {
+ if iter.loadMore() {
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+ }
+ return 0
+ }
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+}
+
+func (iter *Iterator) loadMore() bool {
+ if iter.reader == nil {
+ if iter.Error == nil {
+ iter.head = iter.tail
+ iter.Error = io.EOF
+ }
+ return false
+ }
+ if iter.captured != nil {
+ iter.captured = append(iter.captured,
+ iter.buf[iter.captureStartedAt:iter.tail]...)
+ iter.captureStartedAt = 0
+ }
+ for {
+ n, err := iter.reader.Read(iter.buf)
+ if n == 0 {
+ if err != nil {
+ if iter.Error == nil {
+ iter.Error = err
+ }
+ return false
+ }
+ } else {
+ iter.head = 0
+ iter.tail = n
+ return true
+ }
+ }
+}
+
+func (iter *Iterator) unreadByte() {
+ if iter.Error != nil {
+ return
+ }
+ iter.head--
+ return
+}
+
+// Read read the next JSON element as generic interface{}.
+func (iter *Iterator) Read() interface{} {
+ valueType := iter.WhatIsNext()
+ switch valueType {
+ case StringValue:
+ return iter.ReadString()
+ case NumberValue:
+ if iter.cfg.configBeforeFrozen.UseNumber {
+ return json.Number(iter.readNumberAsString())
+ }
+ return iter.ReadFloat64()
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ return nil
+ case BoolValue:
+ return iter.ReadBool()
+ case ArrayValue:
+ arr := []interface{}{}
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ arr = append(arr, elem)
+ return true
+ })
+ return arr
+ case ObjectValue:
+ obj := map[string]interface{}{}
+ iter.ReadMapCB(func(Iter *Iterator, field string) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ obj[field] = elem
+ return true
+ })
+ return obj
+ default:
+ iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
+ return nil
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_array.go b/vendor/github.com/json-iterator/go/iter_array.go
new file mode 100644
index 0000000..6188cb4
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_array.go
@@ -0,0 +1,58 @@
+package jsoniter
+
+// ReadArray read array element, tells if the array has more element to read.
+func (iter *Iterator) ReadArray() (ret bool) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false // null
+ case '[':
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ return true
+ }
+ return false
+ case ']':
+ return false
+ case ',':
+ return true
+ default:
+ iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
+ return
+ }
+}
+
+// ReadArrayCB read array with callback
+func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
+ c := iter.nextToken()
+ if c == '[' {
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != ']' {
+ iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
+ return false
+ }
+ return true
+ }
+ return true
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/iter_float.go b/vendor/github.com/json-iterator/go/iter_float.go
new file mode 100644
index 0000000..4f883c0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_float.go
@@ -0,0 +1,347 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "math/big"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+var floatDigits []int8
+
+const invalidCharForNumber = int8(-1)
+const endOfNumber = int8(-2)
+const dotInNumber = int8(-3)
+
+func init() {
+ floatDigits = make([]int8, 256)
+ for i := 0; i < len(floatDigits); i++ {
+ floatDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ floatDigits[i] = i - int8('0')
+ }
+ floatDigits[','] = endOfNumber
+ floatDigits[']'] = endOfNumber
+ floatDigits['}'] = endOfNumber
+ floatDigits[' '] = endOfNumber
+ floatDigits['\t'] = endOfNumber
+ floatDigits['\n'] = endOfNumber
+ floatDigits['.'] = dotInNumber
+}
+
+// ReadBigFloat read big.Float
+func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ prec := 64
+ if len(str) > prec {
+ prec = len(str)
+ }
+ val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
+ if err != nil {
+ iter.Error = err
+ return nil
+ }
+ return val
+}
+
+// ReadBigInt read big.Int
+func (iter *Iterator) ReadBigInt() (ret *big.Int) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ ret = big.NewInt(0)
+ var success bool
+ ret, success = ret.SetString(str, 10)
+ if !success {
+ iter.ReportError("ReadBigInt", "invalid big int")
+ return nil
+ }
+ return ret
+}
+
+//ReadFloat32 read float32
+func (iter *Iterator) ReadFloat32() (ret float32) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat32()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat32()
+}
+
+func (iter *Iterator) readPositiveFloat32() (ret float32) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat32", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat32", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat32", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float32(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float32(float64(value) / float64(pow10[decimalPlaces]))
+ }
+ // too many decimal places
+ return iter.readFloat32SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat32SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat32SlowPath()
+}
+
+func (iter *Iterator) readNumberAsString() (ret string) {
+ strBuf := [16]byte{}
+ str := strBuf[0:0]
+load_loop:
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ str = append(str, c)
+ continue
+ default:
+ iter.head = i
+ break load_loop
+ }
+ }
+ if !iter.loadMore() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ if len(str) == 0 {
+ iter.ReportError("readNumberAsString", "invalid number")
+ }
+ return *(*string)(unsafe.Pointer(&str))
+}
+
+func (iter *Iterator) readFloat32SlowPath() (ret float32) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat32SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return float32(val)
+}
+
+// ReadFloat64 read float64
+func (iter *Iterator) ReadFloat64() (ret float64) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat64()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat64()
+}
+
+func (iter *Iterator) readPositiveFloat64() (ret float64) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat64", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat64", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat64", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float64(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float64(value) / float64(pow10[decimalPlaces])
+ }
+ // too many decimal places
+ return iter.readFloat64SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat64SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat64SlowPath()
+}
+
+func (iter *Iterator) readFloat64SlowPath() (ret float64) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat64SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return val
+}
+
+func validateFloat(str string) string {
+ // strconv.ParseFloat is not validating `1.` or `1.e1`
+ if len(str) == 0 {
+ return "empty number"
+ }
+ if str[0] == '-' {
+ return "-- is not valid"
+ }
+ dotPos := strings.IndexByte(str, '.')
+ if dotPos != -1 {
+ if dotPos == len(str)-1 {
+ return "dot can not be last character"
+ }
+ switch str[dotPos+1] {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ return "missing digit after dot"
+ }
+ }
+ return ""
+}
+
+// ReadNumber read json.Number
+func (iter *Iterator) ReadNumber() (ret json.Number) {
+ return json.Number(iter.readNumberAsString())
+}
diff --git a/vendor/github.com/json-iterator/go/iter_int.go b/vendor/github.com/json-iterator/go/iter_int.go
new file mode 100644
index 0000000..2142320
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_int.go
@@ -0,0 +1,345 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var intDigits []int8
+
+const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
+const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
+
+func init() {
+ intDigits = make([]int8, 256)
+ for i := 0; i < len(intDigits); i++ {
+ intDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ intDigits[i] = i - int8('0')
+ }
+}
+
+// ReadUint read uint
+func (iter *Iterator) ReadUint() uint {
+ if strconv.IntSize == 32 {
+ return uint(iter.ReadUint32())
+ }
+ return uint(iter.ReadUint64())
+}
+
+// ReadInt read int
+func (iter *Iterator) ReadInt() int {
+ if strconv.IntSize == 32 {
+ return int(iter.ReadInt32())
+ }
+ return int(iter.ReadInt64())
+}
+
+// ReadInt8 read int8
+func (iter *Iterator) ReadInt8() (ret int8) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt8+1 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int8(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt8 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int8(val)
+}
+
+// ReadUint8 read uint8
+func (iter *Iterator) ReadUint8() (ret uint8) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint8 {
+ iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint8(val)
+}
+
+// ReadInt16 read int16
+func (iter *Iterator) ReadInt16() (ret int16) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt16+1 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int16(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt16 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int16(val)
+}
+
+// ReadUint16 read uint16
+func (iter *Iterator) ReadUint16() (ret uint16) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint16 {
+ iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint16(val)
+}
+
+// ReadInt32 read int32
+func (iter *Iterator) ReadInt32() (ret int32) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt32+1 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int32(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt32 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int32(val)
+}
+
+// ReadUint32 read uint32
+func (iter *Iterator) ReadUint32() (ret uint32) {
+ return iter.readUint32(iter.nextToken())
+}
+
+func (iter *Iterator) readUint32(c byte) (ret uint32) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint32(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint32(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint32(ind2)*10 + uint32(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint32SafeToMultiply10 {
+ value2 := (value << 3) + (value << 1) + uint32(ind)
+ if value2 < value {
+ iter.ReportError("readUint32", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint32(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+// ReadInt64 read int64
+func (iter *Iterator) ReadInt64() (ret int64) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint64(iter.readByte())
+ if val > math.MaxInt64+1 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return -int64(val)
+ }
+ val := iter.readUint64(c)
+ if val > math.MaxInt64 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return int64(val)
+}
+
+// ReadUint64 read uint64
+func (iter *Iterator) ReadUint64() uint64 {
+ return iter.readUint64(iter.nextToken())
+}
+
+func (iter *Iterator) readUint64(c byte) (ret uint64) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint64(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint64(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint64(ind2)*10 + uint64(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint64SafeToMultiple10 {
+ value2 := (value << 3) + (value << 1) + uint64(ind)
+ if value2 < value {
+ iter.ReportError("readUint64", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+func (iter *Iterator) assertInteger() {
+ if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
+ iter.ReportError("assertInteger", "can not decode float as int")
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_object.go b/vendor/github.com/json-iterator/go/iter_object.go
new file mode 100644
index 0000000..1c57576
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_object.go
@@ -0,0 +1,251 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strings"
+)
+
+// ReadObject read one field from object.
+// If object ended, returns empty string.
+// Otherwise, returns the field name.
+func (iter *Iterator) ReadObject() (ret string) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return "" // null
+ case '{':
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ }
+ if c == '}' {
+ return "" // end of object
+ }
+ iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
+ return
+ case ',':
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ case '}':
+ return "" // end of object
+ default:
+ iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
+ return
+ }
+}
+
+// CaseInsensitive
+func (iter *Iterator) readFieldHash() int64 {
+ hash := int64(0x811c9dc5)
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
+ return 0
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ b := iter.buf[i]
+ if b == '\\' {
+ iter.head = i
+ for _, b := range iter.readStringSlowPath() {
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if b == '"' {
+ iter.head = i + 1
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ if !iter.loadMore() {
+ iter.ReportError("readFieldHash", `incomplete field name`)
+ return 0
+ }
+ }
+}
+
+func calcHash(str string, caseSensitive bool) int64 {
+ if !caseSensitive {
+ str = strings.ToLower(str)
+ }
+ hash := int64(0x811c9dc5)
+ for _, b := range []byte(str) {
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ return int64(hash)
+}
+
+// ReadObjectCB read object with callback, the key is ascii only and field name not copied
+func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ var field string
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadObjectCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+// ReadMapCB read map with callback, the key can be any string
+func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectStart() bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '}' {
+ return false
+ }
+ iter.unreadByte()
+ return true
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false
+ }
+ iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
+ str := iter.ReadStringAsSlice()
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if iter.buf[iter.head] != ':' {
+ iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
+ return
+ }
+ iter.head++
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if ret == nil {
+ return str
+ }
+ return ret
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip.go b/vendor/github.com/json-iterator/go/iter_skip.go
new file mode 100644
index 0000000..f58beb9
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip.go
@@ -0,0 +1,129 @@
+package jsoniter
+
+import "fmt"
+
+// ReadNil reads a json object as nil and
+// returns whether it's a nil or not
+func (iter *Iterator) ReadNil() (ret bool) {
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return true
+ }
+ iter.unreadByte()
+ return false
+}
+
+// ReadBool reads a json object as BoolValue
+func (iter *Iterator) ReadBool() (ret bool) {
+ c := iter.nextToken()
+ if c == 't' {
+ iter.skipThreeBytes('r', 'u', 'e')
+ return true
+ }
+ if c == 'f' {
+ iter.skipFourBytes('a', 'l', 's', 'e')
+ return false
+ }
+ iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
+ return
+}
+
+// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
+// The []byte can be kept, it is a copy of data.
+func (iter *Iterator) SkipAndReturnBytes() []byte {
+ iter.startCapture(iter.head)
+ iter.Skip()
+ return iter.stopCapture()
+}
+
+type captureBuffer struct {
+ startedAt int
+ captured []byte
+}
+
+func (iter *Iterator) startCapture(captureStartedAt int) {
+ if iter.captured != nil {
+ panic("already in capture mode")
+ }
+ iter.captureStartedAt = captureStartedAt
+ iter.captured = make([]byte, 0, 32)
+}
+
+func (iter *Iterator) stopCapture() []byte {
+ if iter.captured == nil {
+ panic("not in capture mode")
+ }
+ captured := iter.captured
+ remaining := iter.buf[iter.captureStartedAt:iter.head]
+ iter.captureStartedAt = -1
+ iter.captured = nil
+ if len(captured) == 0 {
+ copied := make([]byte, len(remaining))
+ copy(copied, remaining)
+ return copied
+ }
+ captured = append(captured, remaining...)
+ return captured
+}
+
+// Skip skips a json object and positions to relatively the next json object
+func (iter *Iterator) Skip() {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.skipString()
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ case '0':
+ iter.unreadByte()
+ iter.ReadFloat32()
+ case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.skipNumber()
+ case '[':
+ iter.skipArray()
+ case '{':
+ iter.skipObject()
+ default:
+ iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
+ return
+ }
+}
+
+func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b4 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+}
+
+func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
new file mode 100644
index 0000000..8fcdc3b
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
@@ -0,0 +1,144 @@
+//+build jsoniter_sloppy
+
+package jsoniter
+
+// sloppy but faster implementation, do not validate the input json
+
+func (iter *Iterator) skipNumber() {
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\r', '\t', ',', '}', ']':
+ iter.head = i
+ return
+ }
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipArray() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '[': // If open symbol, increase level
+ level++
+ case ']': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete array")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipObject() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '{': // If open symbol, increase level
+ level++
+ case '}': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete object")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipString() {
+ for {
+ end, escaped := iter.findStringEnd()
+ if end == -1 {
+ if !iter.loadMore() {
+ iter.ReportError("skipString", "incomplete string")
+ return
+ }
+ if escaped {
+ iter.head = 1 // skip the first char as last char read is \
+ }
+ } else {
+ iter.head = end
+ return
+ }
+ }
+}
+
+// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
+// Tries to find the end of string
+// Support if string contains escaped quote symbols.
+func (iter *Iterator) findStringEnd() (int, bool) {
+ escaped := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ if !escaped {
+ return i + 1, false
+ }
+ j := i - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return i + 1, true
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+ }
+ } else if c == '\\' {
+ escaped = true
+ }
+ }
+ j := iter.tail - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return -1, false // do not end with \
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+
+ }
+ return -1, true // end with \
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_strict.go b/vendor/github.com/json-iterator/go/iter_skip_strict.go
new file mode 100644
index 0000000..f67bc2e
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_strict.go
@@ -0,0 +1,89 @@
+//+build !jsoniter_sloppy
+
+package jsoniter
+
+import "fmt"
+
+func (iter *Iterator) skipNumber() {
+ if !iter.trySkipNumber() {
+ iter.unreadByte()
+ iter.ReadFloat32()
+ }
+}
+
+func (iter *Iterator) trySkipNumber() bool {
+ dotFound := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ case '.':
+ if dotFound {
+ iter.ReportError("validateNumber", `more than one dot found in number`)
+ return true // already failed
+ }
+ if i+1 == iter.tail {
+ return false
+ }
+ c = iter.buf[i+1]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ iter.ReportError("validateNumber", `missing digit after dot`)
+ return true // already failed
+ }
+ dotFound = true
+ default:
+ switch c {
+ case ',', ']', '}', ' ', '\t', '\n', '\r':
+ if iter.head == i {
+ return false // if - without following digits
+ }
+ iter.head = i
+ return true // must be valid
+ }
+ return false // may be invalid
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipString() {
+ if !iter.trySkipString() {
+ iter.unreadByte()
+ iter.ReadString()
+ }
+}
+
+func (iter *Iterator) trySkipString() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ iter.head = i + 1
+ return true // valid
+ } else if c == '\\' {
+ return false
+ } else if c < ' ' {
+ iter.ReportError("trySkipString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return true // already failed
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipObject() {
+ iter.unreadByte()
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ return true
+ })
+}
+
+func (iter *Iterator) skipArray() {
+ iter.unreadByte()
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ iter.Skip()
+ return true
+ })
+}
diff --git a/vendor/github.com/json-iterator/go/iter_str.go b/vendor/github.com/json-iterator/go/iter_str.go
new file mode 100644
index 0000000..adc487e
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_str.go
@@ -0,0 +1,215 @@
+package jsoniter
+
+import (
+ "fmt"
+ "unicode/utf16"
+)
+
+// ReadString read string from iterator
+func (iter *Iterator) ReadString() (ret string) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ ret = string(iter.buf[iter.head:i])
+ iter.head = i + 1
+ return ret
+ } else if c == '\\' {
+ break
+ } else if c < ' ' {
+ iter.ReportError("ReadString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return
+ }
+ }
+ return iter.readStringSlowPath()
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return ""
+ }
+ iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readStringSlowPath() (ret string) {
+ var str []byte
+ var c byte
+ for iter.Error == nil {
+ c = iter.readByte()
+ if c == '"' {
+ return string(str)
+ }
+ if c == '\\' {
+ c = iter.readByte()
+ str = iter.readEscapedChar(c, str)
+ } else {
+ str = append(str, c)
+ }
+ }
+ iter.ReportError("readStringSlowPath", "unexpected end of input")
+ return
+}
+
+func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
+ switch c {
+ case 'u':
+ r := iter.readU4()
+ if utf16.IsSurrogate(r) {
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != '\\' {
+ iter.unreadByte()
+ str = appendRune(str, r)
+ return str
+ }
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != 'u' {
+ str = appendRune(str, r)
+ return iter.readEscapedChar(c, str)
+ }
+ r2 := iter.readU4()
+ if iter.Error != nil {
+ return nil
+ }
+ combined := utf16.DecodeRune(r, r2)
+ if combined == '\uFFFD' {
+ str = appendRune(str, r)
+ str = appendRune(str, r2)
+ } else {
+ str = appendRune(str, combined)
+ }
+ } else {
+ str = appendRune(str, r)
+ }
+ case '"':
+ str = append(str, '"')
+ case '\\':
+ str = append(str, '\\')
+ case '/':
+ str = append(str, '/')
+ case 'b':
+ str = append(str, '\b')
+ case 'f':
+ str = append(str, '\f')
+ case 'n':
+ str = append(str, '\n')
+ case 'r':
+ str = append(str, '\r')
+ case 't':
+ str = append(str, '\t')
+ default:
+ iter.ReportError("readEscapedChar",
+ `invalid escape char after \`)
+ return nil
+ }
+ return str
+}
+
+// ReadStringAsSlice read string from iterator without copying into string form.
+// The []byte can not be kept, as it will change after next iterator call.
+func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ // for: field name, base64, number
+ if iter.buf[i] == '"' {
+ // fast path: reuse the underlying buffer
+ ret = iter.buf[iter.head:i]
+ iter.head = i + 1
+ return ret
+ }
+ }
+ readLen := iter.tail - iter.head
+ copied := make([]byte, readLen, readLen*2)
+ copy(copied, iter.buf[iter.head:iter.tail])
+ iter.head = iter.tail
+ for iter.Error == nil {
+ c := iter.readByte()
+ if c == '"' {
+ return copied
+ }
+ copied = append(copied, c)
+ }
+ return copied
+ }
+ iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readU4() (ret rune) {
+ for i := 0; i < 4; i++ {
+ c := iter.readByte()
+ if iter.Error != nil {
+ return
+ }
+ if c >= '0' && c <= '9' {
+ ret = ret*16 + rune(c-'0')
+ } else if c >= 'a' && c <= 'f' {
+ ret = ret*16 + rune(c-'a'+10)
+ } else if c >= 'A' && c <= 'F' {
+ ret = ret*16 + rune(c-'A'+10)
+ } else {
+ iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
+ return
+ }
+ }
+ return ret
+}
+
+const (
+ t1 = 0x00 // 0000 0000
+ tx = 0x80 // 1000 0000
+ t2 = 0xC0 // 1100 0000
+ t3 = 0xE0 // 1110 0000
+ t4 = 0xF0 // 1111 0000
+ t5 = 0xF8 // 1111 1000
+
+ maskx = 0x3F // 0011 1111
+ mask2 = 0x1F // 0001 1111
+ mask3 = 0x0F // 0000 1111
+ mask4 = 0x07 // 0000 0111
+
+ rune1Max = 1<<7 - 1
+ rune2Max = 1<<11 - 1
+ rune3Max = 1<<16 - 1
+
+ surrogateMin = 0xD800
+ surrogateMax = 0xDFFF
+
+ maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
+ runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
+)
+
+func appendRune(p []byte, r rune) []byte {
+ // Negative values are erroneous. Making it unsigned addresses the problem.
+ switch i := uint32(r); {
+ case i <= rune1Max:
+ p = append(p, byte(r))
+ return p
+ case i <= rune2Max:
+ p = append(p, t2|byte(r>>6))
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ case i > maxRune, surrogateMin <= i && i <= surrogateMax:
+ r = runeError
+ fallthrough
+ case i <= rune3Max:
+ p = append(p, t3|byte(r>>12))
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ default:
+ p = append(p, t4|byte(r>>18))
+ p = append(p, tx|byte(r>>12)&maskx)
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go
new file mode 100644
index 0000000..c2934f9
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter.go
@@ -0,0 +1,18 @@
+// Package jsoniter implements encoding and decoding of JSON as defined in
+// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
+// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
+// and variable type declarations (if any).
+// jsoniter interfaces gives 100% compatibility with code using standard lib.
+//
+// "JSON and Go"
+// (https://golang.org/doc/articles/json_and_go.html)
+// gives a description of how Marshal/Unmarshal operate
+// between arbitrary or predefined json objects and bytes,
+// and it applies to jsoniter.Marshal/Unmarshal as well.
+//
+// Besides, jsoniter.Iterator provides a different set of interfaces
+// iterating given bytes/string/reader
+// and yielding parsed elements one by one.
+// This set of interfaces reads input as required and gives
+// better performance.
+package jsoniter
diff --git a/vendor/github.com/json-iterator/go/pool.go b/vendor/github.com/json-iterator/go/pool.go
new file mode 100644
index 0000000..e2389b5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/pool.go
@@ -0,0 +1,42 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// IteratorPool a thread safe pool of iterators with same configuration
+type IteratorPool interface {
+ BorrowIterator(data []byte) *Iterator
+ ReturnIterator(iter *Iterator)
+}
+
+// StreamPool a thread safe pool of streams with same configuration
+type StreamPool interface {
+ BorrowStream(writer io.Writer) *Stream
+ ReturnStream(stream *Stream)
+}
+
+func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
+ stream := cfg.streamPool.Get().(*Stream)
+ stream.Reset(writer)
+ return stream
+}
+
+func (cfg *frozenConfig) ReturnStream(stream *Stream) {
+ stream.out = nil
+ stream.Error = nil
+ stream.Attachment = nil
+ cfg.streamPool.Put(stream)
+}
+
+func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
+ iter := cfg.iteratorPool.Get().(*Iterator)
+ iter.ResetBytes(data)
+ return iter
+}
+
+func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
+ iter.Error = nil
+ iter.Attachment = nil
+ cfg.iteratorPool.Put(iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect.go b/vendor/github.com/json-iterator/go/reflect.go
new file mode 100644
index 0000000..4459e20
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect.go
@@ -0,0 +1,332 @@
+package jsoniter
+
+import (
+ "fmt"
+ "reflect"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+// ValDecoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValDecoder with json.Decoder.
+// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
+//
+// Reflection on type to create decoders, which is then cached
+// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
+// 1. create instance of new value, for example *int will need a int to be allocated
+// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
+// 3. assignment to map, both key and value will be reflect.Value
+// For a simple struct binding, it will be reflect.Value free and allocation free
+type ValDecoder interface {
+ Decode(ptr unsafe.Pointer, iter *Iterator)
+}
+
+// ValEncoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValEncoder with json.Encoder.
+// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
+type ValEncoder interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+ Encode(ptr unsafe.Pointer, stream *Stream)
+}
+
+type checkIsEmpty interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+}
+
+type ctx struct {
+ *frozenConfig
+ prefix string
+ encoders map[reflect2.Type]ValEncoder
+ decoders map[reflect2.Type]ValDecoder
+}
+
+func (b *ctx) caseSensitive() bool {
+ if b.frozenConfig == nil {
+ // default is case-insensitive
+ return false
+ }
+ return b.frozenConfig.caseSensitive
+}
+
+func (b *ctx) append(prefix string) *ctx {
+ return &ctx{
+ frozenConfig: b.frozenConfig,
+ prefix: b.prefix + " " + prefix,
+ encoders: b.encoders,
+ decoders: b.decoders,
+ }
+}
+
+// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
+func (iter *Iterator) ReadVal(obj interface{}) {
+ cacheKey := reflect2.RTypeOf(obj)
+ decoder := iter.cfg.getDecoderFromCache(cacheKey)
+ if decoder == nil {
+ typ := reflect2.TypeOf(obj)
+ if typ.Kind() != reflect.Ptr {
+ iter.ReportError("ReadVal", "can only unmarshal into pointer")
+ return
+ }
+ decoder = iter.cfg.DecoderOf(typ)
+ }
+ ptr := reflect2.PtrOf(obj)
+ if ptr == nil {
+ iter.ReportError("ReadVal", "can not read into nil pointer")
+ return
+ }
+ decoder.Decode(ptr, iter)
+}
+
+// WriteVal copy the go interface into underlying JSON, same as json.Marshal
+func (stream *Stream) WriteVal(val interface{}) {
+ if nil == val {
+ stream.WriteNil()
+ return
+ }
+ cacheKey := reflect2.RTypeOf(val)
+ encoder := stream.cfg.getEncoderFromCache(cacheKey)
+ if encoder == nil {
+ typ := reflect2.TypeOf(val)
+ encoder = stream.cfg.EncoderOf(typ)
+ }
+ encoder.Encode(reflect2.PtrOf(val), stream)
+}
+
+func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
+ cacheKey := typ.RType()
+ decoder := cfg.getDecoderFromCache(cacheKey)
+ if decoder != nil {
+ return decoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder = decoderOfType(ctx, ptrType.Elem())
+ cfg.addDecoderToCache(cacheKey, decoder)
+ return decoder
+}
+
+func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ return decoder
+}
+
+func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoders[typ]
+ if decoder != nil {
+ return decoder
+ }
+ placeholder := &placeholderDecoder{}
+ ctx.decoders[typ] = placeholder
+ decoder = _createDecoderOfType(ctx, typ)
+ placeholder.decoder = decoder
+ return decoder
+}
+
+func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := createDecoderOfJsonRawMessage(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfJsonNumber(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfMarshaler(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfAny(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfNative(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ switch typ.Kind() {
+ case reflect.Interface:
+ ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
+ if isIFace {
+ return &ifaceDecoder{valType: ifaceType}
+ }
+ return &efaceDecoder{}
+ case reflect.Struct:
+ return decoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return decoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return decoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return decoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return decoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
+ cacheKey := typ.RType()
+ encoder := cfg.getEncoderFromCache(cacheKey)
+ if encoder != nil {
+ return encoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ encoder = encoderOfType(ctx, typ)
+ if typ.LikePtr() {
+ encoder = &onePtrEncoder{encoder}
+ }
+ cfg.addEncoderToCache(cacheKey, encoder)
+ return encoder
+}
+
+type onePtrEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ return encoder
+}
+
+func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoders[typ]
+ if encoder != nil {
+ return encoder
+ }
+ placeholder := &placeholderEncoder{}
+ ctx.encoders[typ] = placeholder
+ encoder = _createEncoderOfType(ctx, typ)
+ placeholder.encoder = encoder
+ return encoder
+}
+func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := createEncoderOfJsonRawMessage(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfJsonNumber(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfMarshaler(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfAny(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return encoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return encoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return encoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return encoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+type lazyErrorDecoder struct {
+ err error
+}
+
+func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.WhatIsNext() != NilValue {
+ if iter.Error == nil {
+ iter.Error = decoder.err
+ }
+ } else {
+ iter.Skip()
+ }
+}
+
+type lazyErrorEncoder struct {
+ err error
+}
+
+func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if ptr == nil {
+ stream.WriteNil()
+ } else if stream.Error == nil {
+ stream.Error = encoder.err
+ }
+}
+
+func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type placeholderDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(ptr, iter)
+}
+
+type placeholderEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(ptr, stream)
+}
+
+func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_array.go b/vendor/github.com/json-iterator/go/reflect_array.go
new file mode 100644
index 0000000..13a0b7b
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_array.go
@@ -0,0 +1,104 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayDecoder{arrayType, decoder}
+}
+
+func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ if arrayType.Len() == 0 {
+ return emptyArrayEncoder{}
+ }
+ encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayEncoder{arrayType, encoder}
+}
+
+type emptyArrayEncoder struct{}
+
+func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyArray()
+}
+
+func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return true
+}
+
+type arrayEncoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemEncoder ValEncoder
+}
+
+func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteArrayStart()
+ elemPtr := unsafe.Pointer(ptr)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ for i := 1; i < encoder.arrayType.Len(); i++ {
+ stream.WriteMore()
+ elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
+ }
+}
+
+func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type arrayDecoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemDecoder ValDecoder
+}
+
+func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
+ }
+}
+
+func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ arrayType := decoder.arrayType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ return
+ }
+ iter.unreadByte()
+ elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ if length >= arrayType.Len() {
+ iter.Skip()
+ continue
+ }
+ idx := length
+ length += 1
+ elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_dynamic.go b/vendor/github.com/json-iterator/go/reflect_dynamic.go
new file mode 100644
index 0000000..8b6bc8b
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_dynamic.go
@@ -0,0 +1,70 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "unsafe"
+)
+
+type dynamicEncoder struct {
+ valType reflect2.Type
+}
+
+func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ stream.WriteVal(obj)
+}
+
+func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.valType.UnsafeIndirect(ptr) == nil
+}
+
+type efaceDecoder struct {
+}
+
+func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ pObj := (*interface{})(ptr)
+ obj := *pObj
+ if obj == nil {
+ *pObj = iter.Read()
+ return
+ }
+ typ := reflect2.TypeOf(obj)
+ if typ.Kind() != reflect.Ptr {
+ *pObj = iter.Read()
+ return
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ ptrElemType := ptrType.Elem()
+ if iter.WhatIsNext() == NilValue {
+ if ptrElemType.Kind() != reflect.Ptr {
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *pObj = nil
+ return
+ }
+ }
+ if reflect2.IsNil(obj) {
+ obj := ptrElemType.New()
+ iter.ReadVal(obj)
+ *pObj = obj
+ return
+ }
+ iter.ReadVal(obj)
+}
+
+type ifaceDecoder struct {
+ valType *reflect2.UnsafeIFaceType
+}
+
+func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
+ return
+ }
+ obj := decoder.valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ iter.ReportError("decode non empty interface", "can not unmarshal into nil")
+ return
+ }
+ iter.ReadVal(obj)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_extension.go b/vendor/github.com/json-iterator/go/reflect_extension.go
new file mode 100644
index 0000000..04f6875
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_extension.go
@@ -0,0 +1,483 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode"
+ "unsafe"
+)
+
+var typeDecoders = map[string]ValDecoder{}
+var fieldDecoders = map[string]ValDecoder{}
+var typeEncoders = map[string]ValEncoder{}
+var fieldEncoders = map[string]ValEncoder{}
+var extensions = []Extension{}
+
+// StructDescriptor describe how should we encode/decode the struct
+type StructDescriptor struct {
+ Type reflect2.Type
+ Fields []*Binding
+}
+
+// GetField get one field from the descriptor by its name.
+// Can not use map here to keep field orders.
+func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
+ for _, binding := range structDescriptor.Fields {
+ if binding.Field.Name() == fieldName {
+ return binding
+ }
+ }
+ return nil
+}
+
+// Binding describe how should we encode/decode the struct field
+type Binding struct {
+ levels []int
+ Field reflect2.StructField
+ FromNames []string
+ ToNames []string
+ Encoder ValEncoder
+ Decoder ValDecoder
+}
+
+// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
+// Can also rename fields by UpdateStructDescriptor.
+type Extension interface {
+ UpdateStructDescriptor(structDescriptor *StructDescriptor)
+ CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
+ CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
+ CreateDecoder(typ reflect2.Type) ValDecoder
+ CreateEncoder(typ reflect2.Type) ValEncoder
+ DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
+ DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
+}
+
+// DummyExtension embed this type get dummy implementation for all methods of Extension
+type DummyExtension struct {
+}
+
+// UpdateStructDescriptor No-op
+func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder No-op
+func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder No-op
+func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type EncoderExtension map[reflect2.Type]ValEncoder
+
+// UpdateStructDescriptor No-op
+func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateDecoder No-op
+func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder get encoder from map
+func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return extension[typ]
+}
+
+// CreateMapKeyDecoder No-op
+func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type DecoderExtension map[reflect2.Type]ValDecoder
+
+// UpdateStructDescriptor No-op
+func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder get decoder from map
+func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return extension[typ]
+}
+
+// CreateEncoder No-op
+func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type funcDecoder struct {
+ fun DecoderFunc
+}
+
+func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.fun(ptr, iter)
+}
+
+type funcEncoder struct {
+ fun EncoderFunc
+ isEmptyFunc func(ptr unsafe.Pointer) bool
+}
+
+func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.fun(ptr, stream)
+}
+
+func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ if encoder.isEmptyFunc == nil {
+ return false
+ }
+ return encoder.isEmptyFunc(ptr)
+}
+
+// DecoderFunc the function form of TypeDecoder
+type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
+
+// EncoderFunc the function form of TypeEncoder
+type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
+
+// RegisterTypeDecoderFunc register TypeDecoder for a type with function
+func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
+ typeDecoders[typ] = &funcDecoder{fun}
+}
+
+// RegisterTypeDecoder register TypeDecoder for a typ
+func RegisterTypeDecoder(typ string, decoder ValDecoder) {
+ typeDecoders[typ] = decoder
+}
+
+// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
+func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
+ RegisterFieldDecoder(typ, field, &funcDecoder{fun})
+}
+
+// RegisterFieldDecoder register TypeDecoder for a struct field
+func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
+ fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
+}
+
+// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
+func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
+}
+
+// RegisterTypeEncoder register TypeEncoder for a type
+func RegisterTypeEncoder(typ string, encoder ValEncoder) {
+ typeEncoders[typ] = encoder
+}
+
+// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
+func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
+}
+
+// RegisterFieldEncoder register TypeEncoder for a struct field
+func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
+ fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
+}
+
+// RegisterExtension register extension
+func RegisterExtension(extension Extension) {
+ extensions = append(extensions, extension)
+}
+
+func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := _getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ }
+ return decoder
+}
+func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ for _, extension := range extensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ decoder := ctx.decoderExtension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ typeName := typ.String()
+ decoder = typeDecoders[typeName]
+ if decoder != nil {
+ return decoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder := typeDecoders[ptrType.Elem().String()]
+ if decoder != nil {
+ return &OptionalDecoder{ptrType.Elem(), decoder}
+ }
+ }
+ return nil
+}
+
+func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := _getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ }
+ return encoder
+}
+
+func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ for _, extension := range extensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ encoder := ctx.encoderExtension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ typeName := typ.String()
+ encoder = typeEncoders[typeName]
+ if encoder != nil {
+ return encoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ typePtr := typ.(*reflect2.UnsafePtrType)
+ encoder := typeEncoders[typePtr.Elem().String()]
+ if encoder != nil {
+ return &OptionalEncoder{encoder}
+ }
+ }
+ return nil
+}
+
+func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
+ structType := typ.(*reflect2.UnsafeStructType)
+ embeddedBindings := []*Binding{}
+ bindings := []*Binding{}
+ for i := 0; i < structType.NumField(); i++ {
+ field := structType.Field(i)
+ tag, hastag := field.Tag().Lookup(ctx.getTagKey())
+ if ctx.onlyTaggedField && !hastag {
+ continue
+ }
+ tagParts := strings.Split(tag, ",")
+ if tag == "-" {
+ continue
+ }
+ if field.Anonymous() && (tag == "" || tagParts[0] == "") {
+ if field.Type().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, field.Type())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ } else if field.Type().Kind() == reflect.Ptr {
+ ptrType := field.Type().(*reflect2.UnsafePtrType)
+ if ptrType.Elem().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, ptrType.Elem())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &dereferenceEncoder{binding.Encoder}
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ }
+ }
+ }
+ fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
+ fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
+ decoder := fieldDecoders[fieldCacheKey]
+ if decoder == nil {
+ decoder = decoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ encoder := fieldEncoders[fieldCacheKey]
+ if encoder == nil {
+ encoder = encoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ binding := &Binding{
+ Field: field,
+ FromNames: fieldNames,
+ ToNames: fieldNames,
+ Decoder: decoder,
+ Encoder: encoder,
+ }
+ binding.levels = []int{i}
+ bindings = append(bindings, binding)
+ }
+ return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
+}
+func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
+ structDescriptor := &StructDescriptor{
+ Type: typ,
+ Fields: bindings,
+ }
+ for _, extension := range extensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
+ ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
+ for _, extension := range ctx.extraExtensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ processTags(structDescriptor, ctx.frozenConfig)
+ // merge normal & embedded bindings & sort with original order
+ allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
+ sort.Sort(allBindings)
+ structDescriptor.Fields = allBindings
+ return structDescriptor
+}
+
+type sortableBindings []*Binding
+
+func (bindings sortableBindings) Len() int {
+ return len(bindings)
+}
+
+func (bindings sortableBindings) Less(i, j int) bool {
+ left := bindings[i].levels
+ right := bindings[j].levels
+ k := 0
+ for {
+ if left[k] < right[k] {
+ return true
+ } else if left[k] > right[k] {
+ return false
+ }
+ k++
+ }
+}
+
+func (bindings sortableBindings) Swap(i, j int) {
+ bindings[i], bindings[j] = bindings[j], bindings[i]
+}
+
+func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
+ for _, binding := range structDescriptor.Fields {
+ shouldOmitEmpty := false
+ tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
+ for _, tagPart := range tagParts[1:] {
+ if tagPart == "omitempty" {
+ shouldOmitEmpty = true
+ } else if tagPart == "string" {
+ if binding.Field.Type().Kind() == reflect.String {
+ binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
+ binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
+ } else {
+ binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
+ binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
+ }
+ }
+ }
+ binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
+ binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
+ }
+}
+
+func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
+ // ignore?
+ if wholeTag == "-" {
+ return []string{}
+ }
+ // rename?
+ var fieldNames []string
+ if tagProvidedFieldName == "" {
+ fieldNames = []string{originalFieldName}
+ } else {
+ fieldNames = []string{tagProvidedFieldName}
+ }
+ // private?
+ isNotExported := unicode.IsLower(rune(originalFieldName[0]))
+ if isNotExported {
+ fieldNames = []string{}
+ }
+ return fieldNames
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_number.go b/vendor/github.com/json-iterator/go/reflect_json_number.go
new file mode 100644
index 0000000..98d45c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_number.go
@@ -0,0 +1,112 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "strconv"
+ "unsafe"
+)
+
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+func CastJsonNumber(val interface{}) (string, bool) {
+ switch typedVal := val.(type) {
+ case json.Number:
+ return string(typedVal), true
+ case Number:
+ return string(typedVal), true
+ }
+ return "", false
+}
+
+var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
+var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
+
+func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+type jsonNumberCodec struct {
+}
+
+func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*json.Number)(ptr)) = json.Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*json.Number)(ptr)) = ""
+ default:
+ *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*json.Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.Number)(ptr))) == 0
+}
+
+type jsoniterNumberCodec struct {
+}
+
+func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*Number)(ptr)) = Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*Number)(ptr)) = ""
+ default:
+ *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*Number)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_raw_message.go b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
new file mode 100644
index 0000000..f261993
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
@@ -0,0 +1,60 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
+var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
+
+func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+type jsonRawMessageCodec struct {
+}
+
+func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
+}
+
+func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+}
+
+type jsoniterRawMessageCodec struct {
+}
+
+func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*RawMessage)(ptr))))
+}
+
+func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*RawMessage)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_map.go b/vendor/github.com/json-iterator/go/reflect_map.go
new file mode 100644
index 0000000..7f66a88
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_map.go
@@ -0,0 +1,326 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "sort"
+ "unsafe"
+)
+
+func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
+ elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
+ return &mapDecoder{
+ mapType: mapType,
+ keyType: mapType.Key(),
+ elemType: mapType.Elem(),
+ keyDecoder: keyDecoder,
+ elemDecoder: elemDecoder,
+ }
+}
+
+func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ if ctx.sortMapKeys {
+ return &sortKeysMapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+ }
+ return &mapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+}
+
+func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ switch typ.Kind() {
+ case reflect.String:
+ return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
+ default:
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(textMarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{
+ valType: ptrType,
+ },
+ }
+ }
+ if typ.Implements(textMarshalerType) {
+ return &textUnmarshalerDecoder{
+ valType: typ,
+ }
+ }
+ return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ switch typ.Kind() {
+ case reflect.String:
+ return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
+ default:
+ if typ == textMarshalerType {
+ return &directTextMarshalerEncoder{
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+ if typ.Implements(textMarshalerType) {
+ return &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+ if typ.Kind() == reflect.Interface {
+ return &dynamicMapKeyEncoder{ctx, typ}
+ }
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+type mapDecoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyType reflect2.Type
+ elemType reflect2.Type
+ keyDecoder ValDecoder
+ elemDecoder ValDecoder
+}
+
+func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ mapType := decoder.mapType
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ *(*unsafe.Pointer)(ptr) = nil
+ mapType.UnsafeSet(ptr, mapType.UnsafeNew())
+ return
+ }
+ if mapType.UnsafeIsNil(ptr) {
+ mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
+ }
+ if c != '{' {
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == '}' {
+ return
+ }
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
+ return
+ }
+ iter.unreadByte()
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
+ }
+}
+
+type numericMapKeyDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.decoder.Decode(ptr, iter)
+ c = iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
+
+type numericMapKeyEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.encoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type dynamicMapKeyEncoder struct {
+ ctx *ctx
+ valType reflect2.Type
+}
+
+func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
+}
+
+func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
+}
+
+type mapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ for i := 0; iter.HasNext(); i++ {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ key, elem := iter.UnsafeNext()
+ encoder.keyEncoder.Encode(key, stream)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ stream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, stream)
+ }
+ stream.WriteObjectEnd()
+}
+
+func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type sortKeysMapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *(*unsafe.Pointer)(ptr) == nil {
+ stream.WriteNil()
+ return
+ }
+ stream.WriteObjectStart()
+ mapIter := encoder.mapType.UnsafeIterate(ptr)
+ subStream := stream.cfg.BorrowStream(nil)
+ subIter := stream.cfg.BorrowIterator(nil)
+ keyValues := encodedKeyValues{}
+ for mapIter.HasNext() {
+ subStream.buf = make([]byte, 0, 64)
+ key, elem := mapIter.UnsafeNext()
+ encoder.keyEncoder.Encode(key, subStream)
+ if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
+ stream.Error = subStream.Error
+ }
+ encodedKey := subStream.Buffer()
+ subIter.ResetBytes(encodedKey)
+ decodedKey := subIter.ReadString()
+ if stream.indention > 0 {
+ subStream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ subStream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, subStream)
+ keyValues = append(keyValues, encodedKV{
+ key: decodedKey,
+ keyValue: subStream.Buffer(),
+ })
+ }
+ sort.Sort(keyValues)
+ for i, keyValue := range keyValues {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ stream.Write(keyValue.keyValue)
+ }
+ stream.WriteObjectEnd()
+ stream.cfg.ReturnStream(subStream)
+ stream.cfg.ReturnIterator(subIter)
+}
+
+func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type encodedKeyValues []encodedKV
+
+type encodedKV struct {
+ key string
+ keyValue []byte
+}
+
+func (sv encodedKeyValues) Len() int { return len(sv) }
+func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }
diff --git a/vendor/github.com/json-iterator/go/reflect_marshaler.go b/vendor/github.com/json-iterator/go/reflect_marshaler.go
new file mode 100644
index 0000000..58ac959
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_marshaler.go
@@ -0,0 +1,218 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
+var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
+var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
+var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
+
+func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(unmarshalerType) {
+ return &referenceDecoder{
+ &unmarshalerDecoder{ptrType},
+ }
+ }
+ if ptrType.Implements(textUnmarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{ptrType},
+ }
+ }
+ return nil
+}
+
+func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == marshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ if typ.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: typ,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ ptrType := reflect2.PtrTo(typ)
+ if ctx.prefix != "" && ptrType.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: ptrType,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ if typ == textMarshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directTextMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ return encoder
+ }
+ if typ.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ // if prefix is empty, the type is the root type
+ if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: ptrType,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ return nil
+}
+
+type marshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+ valType reflect2.Type
+}
+
+func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := obj.(json.Marshaler)
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directMarshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*json.Marshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type textMarshalerEncoder struct {
+ valType reflect2.Type
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := (obj).(encoding.TextMarshaler)
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directTextMarshalerEncoder struct {
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*encoding.TextMarshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type unmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ unmarshaler := obj.(json.Unmarshaler)
+ iter.nextToken()
+ iter.unreadByte() // skip spaces
+ bytes := iter.SkipAndReturnBytes()
+ err := unmarshaler.UnmarshalJSON(bytes)
+ if err != nil {
+ iter.ReportError("unmarshalerDecoder", err.Error())
+ }
+}
+
+type textUnmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ ptrType := valType.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elem := elemType.UnsafeNew()
+ ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
+ obj = valType.UnsafeIndirect(ptr)
+ }
+ unmarshaler := (obj).(encoding.TextUnmarshaler)
+ str := iter.ReadString()
+ err := unmarshaler.UnmarshalText([]byte(str))
+ if err != nil {
+ iter.ReportError("textUnmarshalerDecoder", err.Error())
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_native.go b/vendor/github.com/json-iterator/go/reflect_native.go
new file mode 100644
index 0000000..9042eb0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_native.go
@@ -0,0 +1,451 @@
+package jsoniter
+
+import (
+ "encoding/base64"
+ "reflect"
+ "strconv"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+const ptrSize = 32 << uintptr(^uintptr(0)>>63)
+
+func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ kind := typ.Kind()
+ switch kind {
+ case reflect.String:
+ if typeName != "string" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ switch typ.Kind() {
+ case reflect.String:
+ if typeName != "string" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+type stringCodec struct {
+}
+
+func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = iter.ReadString()
+}
+
+func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteString(str)
+}
+
+func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+type int8Codec struct {
+}
+
+func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int8)(ptr)) = iter.ReadInt8()
+ }
+}
+
+func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt8(*((*int8)(ptr)))
+}
+
+func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int8)(ptr)) == 0
+}
+
+type int16Codec struct {
+}
+
+func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int16)(ptr)) = iter.ReadInt16()
+ }
+}
+
+func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt16(*((*int16)(ptr)))
+}
+
+func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int16)(ptr)) == 0
+}
+
+type int32Codec struct {
+}
+
+func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int32)(ptr)) = iter.ReadInt32()
+ }
+}
+
+func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt32(*((*int32)(ptr)))
+}
+
+func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int32)(ptr)) == 0
+}
+
+type int64Codec struct {
+}
+
+func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int64)(ptr)) = iter.ReadInt64()
+ }
+}
+
+func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt64(*((*int64)(ptr)))
+}
+
+func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int64)(ptr)) == 0
+}
+
+type uint8Codec struct {
+}
+
+func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint8)(ptr)) = iter.ReadUint8()
+ }
+}
+
+func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint8(*((*uint8)(ptr)))
+}
+
+func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint8)(ptr)) == 0
+}
+
+type uint16Codec struct {
+}
+
+func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint16)(ptr)) = iter.ReadUint16()
+ }
+}
+
+func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint16(*((*uint16)(ptr)))
+}
+
+func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint16)(ptr)) == 0
+}
+
+type uint32Codec struct {
+}
+
+func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint32)(ptr)) = iter.ReadUint32()
+ }
+}
+
+func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint32(*((*uint32)(ptr)))
+}
+
+func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint32)(ptr)) == 0
+}
+
+type uint64Codec struct {
+}
+
+func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint64)(ptr)) = iter.ReadUint64()
+ }
+}
+
+func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint64(*((*uint64)(ptr)))
+}
+
+func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint64)(ptr)) == 0
+}
+
+type float32Codec struct {
+}
+
+func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float32)(ptr)) = iter.ReadFloat32()
+ }
+}
+
+func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32(*((*float32)(ptr)))
+}
+
+func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type float64Codec struct {
+}
+
+func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float64)(ptr)) = iter.ReadFloat64()
+ }
+}
+
+func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64(*((*float64)(ptr)))
+}
+
+func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+type boolCodec struct {
+}
+
+func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*bool)(ptr)) = iter.ReadBool()
+ }
+}
+
+func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteBool(*((*bool)(ptr)))
+}
+
+func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return !(*((*bool)(ptr)))
+}
+
+type base64Codec struct {
+ sliceType *reflect2.UnsafeSliceType
+ sliceDecoder ValDecoder
+}
+
+func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ codec.sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ switch iter.WhatIsNext() {
+ case StringValue:
+ src := iter.ReadString()
+ dst, err := base64.StdEncoding.DecodeString(src)
+ if err != nil {
+ iter.ReportError("decode base64", err.Error())
+ } else {
+ codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
+ }
+ case ArrayValue:
+ codec.sliceDecoder.Decode(ptr, iter)
+ default:
+ iter.ReportError("base64Codec", "invalid input")
+ }
+}
+
+func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ src := *((*[]byte)(ptr))
+ if len(src) == 0 {
+ stream.WriteNil()
+ return
+ }
+ encoding := base64.StdEncoding
+ stream.writeByte('"')
+ size := encoding.EncodedLen(len(src))
+ buf := make([]byte, size)
+ encoding.Encode(buf, src)
+ stream.buf = append(stream.buf, buf...)
+ stream.writeByte('"')
+}
+
+func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*[]byte)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_optional.go b/vendor/github.com/json-iterator/go/reflect_optional.go
new file mode 100644
index 0000000..43ec71d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_optional.go
@@ -0,0 +1,133 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "unsafe"
+)
+
+func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ decoder := decoderOfType(ctx, elemType)
+ if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
+ return &dereferenceDecoder{elemType, decoder}
+ }
+ return &OptionalDecoder{elemType, decoder}
+}
+
+func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elemEncoder := encoderOfType(ctx, elemType)
+ encoder := &OptionalEncoder{elemEncoder}
+ return encoder
+}
+
+type OptionalDecoder struct {
+ ValueType reflect2.Type
+ ValueDecoder ValDecoder
+}
+
+func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*unsafe.Pointer)(ptr)) = nil
+ } else {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.ValueType.UnsafeNew()
+ decoder.ValueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+ }
+}
+
+type dereferenceDecoder struct {
+ // only to deference a pointer
+ valueType reflect2.Type
+ valueDecoder ValDecoder
+}
+
+func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.valueType.UnsafeNew()
+ decoder.valueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+}
+
+type OptionalEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*unsafe.Pointer)(ptr)) == nil
+}
+
+type dereferenceEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ dePtr := *((*unsafe.Pointer)(ptr))
+ if dePtr == nil {
+ return true
+ }
+ return encoder.ValueEncoder.IsEmpty(dePtr)
+}
+
+func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ deReferenced := *((*unsafe.Pointer)(ptr))
+ if deReferenced == nil {
+ return true
+ }
+ isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := unsafe.Pointer(deReferenced)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type referenceEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+type referenceDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_slice.go b/vendor/github.com/json-iterator/go/reflect_slice.go
new file mode 100644
index 0000000..9441d79
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_slice.go
@@ -0,0 +1,99 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceDecoder{sliceType, decoder}
+}
+
+func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceEncoder{sliceType, encoder}
+}
+
+type sliceEncoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemEncoder ValEncoder
+}
+
+func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if encoder.sliceType.UnsafeIsNil(ptr) {
+ stream.WriteNil()
+ return
+ }
+ length := encoder.sliceType.UnsafeLengthOf(ptr)
+ if length == 0 {
+ stream.WriteEmptyArray()
+ return
+ }
+ stream.WriteArrayStart()
+ encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
+ for i := 1; i < length; i++ {
+ stream.WriteMore()
+ elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
+ }
+}
+
+func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.sliceType.UnsafeLengthOf(ptr) == 0
+}
+
+type sliceDecoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemDecoder ValDecoder
+}
+
+func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
+ }
+}
+
+func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ sliceType := decoder.sliceType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
+ return
+ }
+ iter.unreadByte()
+ sliceType.UnsafeGrow(ptr, 1)
+ elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ idx := length
+ length += 1
+ sliceType.UnsafeGrow(ptr, length)
+ elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
new file mode 100644
index 0000000..355d2d1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
@@ -0,0 +1,1048 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "strings"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
+ bindings := map[string]*Binding{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, fromName := range binding.FromNames {
+ old := bindings[fromName]
+ if old == nil {
+ bindings[fromName] = binding
+ continue
+ }
+ ignoreOld, ignoreNew := resolveConflictBinding(ctx.frozenConfig, old, binding)
+ if ignoreOld {
+ delete(bindings, fromName)
+ }
+ if !ignoreNew {
+ bindings[fromName] = binding
+ }
+ }
+ }
+ fields := map[string]*structFieldDecoder{}
+ for k, binding := range bindings {
+ fields[k] = binding.Decoder.(*structFieldDecoder)
+ }
+
+ if !ctx.caseSensitive() {
+ for k, binding := range bindings {
+ if _, found := fields[strings.ToLower(k)]; !found {
+ fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
+ }
+ }
+ }
+
+ return createStructDecoder(ctx, typ, fields)
+}
+
+func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structFieldDecoder) ValDecoder {
+ if ctx.disallowUnknownFields {
+ return &generalStructDecoder{typ: typ, fields: fields, disallowUnknownFields: true}
+ }
+ knownHash := map[int64]struct{}{
+ 0: {},
+ }
+
+ switch len(fields) {
+ case 0:
+ return &skipObjectDecoder{typ}
+ case 1:
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
+ }
+ case 2:
+ var fieldHash1 int64
+ var fieldHash2 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldHash1 == 0 {
+ fieldHash1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else {
+ fieldHash2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ }
+ }
+ return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
+ case 3:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ }
+ }
+ return &threeFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3}
+ case 4:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ }
+ }
+ return &fourFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4}
+ case 5:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ }
+ }
+ return &fiveFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5}
+ case 6:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ }
+ }
+ return &sixFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6}
+ case 7:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ }
+ }
+ return &sevenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7}
+ case 8:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ }
+ }
+ return &eightFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8}
+ case 9:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ }
+ }
+ return &nineFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9}
+ case 10:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldName10 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ var fieldDecoder10 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else if fieldName9 == 0 {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ } else {
+ fieldName10 = fieldHash
+ fieldDecoder10 = fieldDecoder
+ }
+ }
+ return &tenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9,
+ fieldName10, fieldDecoder10}
+ }
+ return &generalStructDecoder{typ, fields, false}
+}
+
+type generalStructDecoder struct {
+ typ reflect2.Type
+ fields map[string]*structFieldDecoder
+ disallowUnknownFields bool
+}
+
+func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ var c byte
+ for c = ','; c == ','; c = iter.nextToken() {
+ decoder.decodeOneField(ptr, iter)
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ if c != '}' {
+ iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
+ }
+}
+
+func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
+ var field string
+ var fieldDecoder *structFieldDecoder
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes := iter.ReadStringAsSlice()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ } else {
+ field = iter.ReadString()
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ }
+ if fieldDecoder == nil {
+ msg := "found unknown field: " + field
+ if decoder.disallowUnknownFields {
+ iter.ReportError("ReadObject", msg)
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ iter.Skip()
+ return
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ fieldDecoder.Decode(ptr, iter)
+}
+
+type skipObjectDecoder struct {
+ typ reflect2.Type
+}
+
+func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valueType := iter.WhatIsNext()
+ if valueType != ObjectValue && valueType != NilValue {
+ iter.ReportError("skipObjectDecoder", "expect object or null")
+ return
+ }
+ iter.Skip()
+}
+
+type oneFieldStructDecoder struct {
+ typ reflect2.Type
+ fieldHash int64
+ fieldDecoder *structFieldDecoder
+}
+
+func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ if iter.readFieldHash() == decoder.fieldHash {
+ decoder.fieldDecoder.Decode(ptr, iter)
+ } else {
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type twoFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+}
+
+func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type threeFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+}
+
+func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fourFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+}
+
+func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fiveFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+}
+
+func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sixFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+}
+
+func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sevenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+}
+
+func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type eightFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+}
+
+func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type nineFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+}
+
+func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type tenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+ fieldHash10 int64
+ fieldDecoder10 *structFieldDecoder
+}
+
+func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ case decoder.fieldHash10:
+ decoder.fieldDecoder10.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type structFieldDecoder struct {
+ field reflect2.StructField
+ fieldDecoder ValDecoder
+}
+
+func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ fieldPtr := decoder.field.UnsafeGet(ptr)
+ decoder.fieldDecoder.Decode(fieldPtr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error())
+ }
+}
+
+type stringModeStringDecoder struct {
+ elemDecoder ValDecoder
+ cfg *frozenConfig
+}
+
+func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.elemDecoder.Decode(ptr, iter)
+ str := *((*string)(ptr))
+ tempIter := decoder.cfg.BorrowIterator([]byte(str))
+ defer decoder.cfg.ReturnIterator(tempIter)
+ *((*string)(ptr)) = tempIter.ReadString()
+}
+
+type stringModeNumberDecoder struct {
+ elemDecoder ValDecoder
+}
+
+func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.elemDecoder.Decode(ptr, iter)
+ if iter.Error != nil {
+ return
+ }
+ c = iter.readByte()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_encoder.go b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
new file mode 100644
index 0000000..d0759cf
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
@@ -0,0 +1,210 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
+ type bindingTo struct {
+ binding *Binding
+ toName string
+ ignored bool
+ }
+ orderedBindings := []*bindingTo{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, toName := range binding.ToNames {
+ new := &bindingTo{
+ binding: binding,
+ toName: toName,
+ }
+ for _, old := range orderedBindings {
+ if old.toName != toName {
+ continue
+ }
+ old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
+ }
+ orderedBindings = append(orderedBindings, new)
+ }
+ }
+ if len(orderedBindings) == 0 {
+ return &emptyStructEncoder{}
+ }
+ finalOrderedFields := []structFieldTo{}
+ for _, bindingTo := range orderedBindings {
+ if !bindingTo.ignored {
+ finalOrderedFields = append(finalOrderedFields, structFieldTo{
+ encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
+ toName: bindingTo.toName,
+ })
+ }
+ }
+ return &structEncoder{typ, finalOrderedFields}
+}
+
+func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
+ encoder := createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return &structEncoder{typ: typ}
+ case reflect.Array:
+ return &arrayEncoder{}
+ case reflect.Slice:
+ return &sliceEncoder{}
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return &OptionalEncoder{}
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
+ }
+}
+
+func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
+ newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
+ oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
+ if newTagged {
+ if oldTagged {
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ } else {
+ return true, false
+ }
+ } else {
+ if oldTagged {
+ return true, false
+ }
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ }
+}
+
+type structFieldEncoder struct {
+ field reflect2.StructField
+ fieldEncoder ValEncoder
+ omitempty bool
+}
+
+func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ encoder.fieldEncoder.Encode(fieldPtr, stream)
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
+ }
+}
+
+func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return encoder.fieldEncoder.IsEmpty(fieldPtr)
+}
+
+func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type IsEmbeddedPtrNil interface {
+ IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
+}
+
+type structEncoder struct {
+ typ reflect2.Type
+ fields []structFieldTo
+}
+
+type structFieldTo struct {
+ encoder *structFieldEncoder
+ toName string
+}
+
+func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ isNotFirst := false
+ for _, field := range encoder.fields {
+ if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
+ continue
+ }
+ if field.encoder.IsEmbeddedPtrNil(ptr) {
+ continue
+ }
+ if isNotFirst {
+ stream.WriteMore()
+ }
+ stream.WriteObjectField(field.toName)
+ field.encoder.Encode(ptr, stream)
+ isNotFirst = true
+ }
+ stream.WriteObjectEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
+ }
+}
+
+func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type emptyStructEncoder struct {
+}
+
+func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyObject()
+}
+
+func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type stringModeNumberEncoder struct {
+ elemEncoder ValEncoder
+}
+
+func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.elemEncoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
+
+type stringModeStringEncoder struct {
+ elemEncoder ValEncoder
+ cfg *frozenConfig
+}
+
+func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ tempStream := encoder.cfg.BorrowStream(nil)
+ defer encoder.cfg.ReturnStream(tempStream)
+ encoder.elemEncoder.Encode(ptr, tempStream)
+ stream.WriteString(string(tempStream.Buffer()))
+}
+
+func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/stream.go b/vendor/github.com/json-iterator/go/stream.go
new file mode 100644
index 0000000..17662fd
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream.go
@@ -0,0 +1,211 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// stream is a io.Writer like object, with JSON specific write functions.
+// Error is not returned as return value, but stored as Error member on this stream instance.
+type Stream struct {
+ cfg *frozenConfig
+ out io.Writer
+ buf []byte
+ Error error
+ indention int
+ Attachment interface{} // open for customized encoder
+}
+
+// NewStream create new stream instance.
+// cfg can be jsoniter.ConfigDefault.
+// out can be nil if write to internal buffer.
+// bufSize is the initial size for the internal buffer in bytes.
+func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
+ return &Stream{
+ cfg: cfg.(*frozenConfig),
+ out: out,
+ buf: make([]byte, 0, bufSize),
+ Error: nil,
+ indention: 0,
+ }
+}
+
+// Pool returns a pool can provide more stream with same configuration
+func (stream *Stream) Pool() StreamPool {
+ return stream.cfg
+}
+
+// Reset reuse this stream instance by assign a new writer
+func (stream *Stream) Reset(out io.Writer) {
+ stream.out = out
+ stream.buf = stream.buf[:0]
+}
+
+// Available returns how many bytes are unused in the buffer.
+func (stream *Stream) Available() int {
+ return cap(stream.buf) - len(stream.buf)
+}
+
+// Buffered returns the number of bytes that have been written into the current buffer.
+func (stream *Stream) Buffered() int {
+ return len(stream.buf)
+}
+
+// Buffer if writer is nil, use this method to take the result
+func (stream *Stream) Buffer() []byte {
+ return stream.buf
+}
+
+// SetBuffer allows to append to the internal buffer directly
+func (stream *Stream) SetBuffer(buf []byte) {
+ stream.buf = buf
+}
+
+// Write writes the contents of p into the buffer.
+// It returns the number of bytes written.
+// If nn < len(p), it also returns an error explaining
+// why the write is short.
+func (stream *Stream) Write(p []byte) (nn int, err error) {
+ stream.buf = append(stream.buf, p...)
+ if stream.out != nil {
+ nn, err = stream.out.Write(stream.buf)
+ stream.buf = stream.buf[nn:]
+ return
+ }
+ return len(p), nil
+}
+
+// WriteByte writes a single byte.
+func (stream *Stream) writeByte(c byte) {
+ stream.buf = append(stream.buf, c)
+}
+
+func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
+ stream.buf = append(stream.buf, c1, c2)
+}
+
+func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3)
+}
+
+func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4)
+}
+
+func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
+}
+
+// Flush writes any buffered data to the underlying io.Writer.
+func (stream *Stream) Flush() error {
+ if stream.out == nil {
+ return nil
+ }
+ if stream.Error != nil {
+ return stream.Error
+ }
+ n, err := stream.out.Write(stream.buf)
+ if err != nil {
+ if stream.Error == nil {
+ stream.Error = err
+ }
+ return err
+ }
+ stream.buf = stream.buf[n:]
+ return nil
+}
+
+// WriteRaw write string out without quotes, just like []byte
+func (stream *Stream) WriteRaw(s string) {
+ stream.buf = append(stream.buf, s...)
+}
+
+// WriteNil write null to stream
+func (stream *Stream) WriteNil() {
+ stream.writeFourBytes('n', 'u', 'l', 'l')
+}
+
+// WriteTrue write true to stream
+func (stream *Stream) WriteTrue() {
+ stream.writeFourBytes('t', 'r', 'u', 'e')
+}
+
+// WriteFalse write false to stream
+func (stream *Stream) WriteFalse() {
+ stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
+}
+
+// WriteBool write true or false into stream
+func (stream *Stream) WriteBool(val bool) {
+ if val {
+ stream.WriteTrue()
+ } else {
+ stream.WriteFalse()
+ }
+}
+
+// WriteObjectStart write { with possible indention
+func (stream *Stream) WriteObjectStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('{')
+ stream.writeIndention(0)
+}
+
+// WriteObjectField write "field": with possible indention
+func (stream *Stream) WriteObjectField(field string) {
+ stream.WriteString(field)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(':', ' ')
+ } else {
+ stream.writeByte(':')
+ }
+}
+
+// WriteObjectEnd write } with possible indention
+func (stream *Stream) WriteObjectEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte('}')
+}
+
+// WriteEmptyObject write {}
+func (stream *Stream) WriteEmptyObject() {
+ stream.writeByte('{')
+ stream.writeByte('}')
+}
+
+// WriteMore write , with possible indention
+func (stream *Stream) WriteMore() {
+ stream.writeByte(',')
+ stream.writeIndention(0)
+ stream.Flush()
+}
+
+// WriteArrayStart write [ with possible indention
+func (stream *Stream) WriteArrayStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('[')
+ stream.writeIndention(0)
+}
+
+// WriteEmptyArray write []
+func (stream *Stream) WriteEmptyArray() {
+ stream.writeTwoBytes('[', ']')
+}
+
+// WriteArrayEnd write ] with possible indention
+func (stream *Stream) WriteArrayEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte(']')
+}
+
+func (stream *Stream) writeIndention(delta int) {
+ if stream.indention == 0 {
+ return
+ }
+ stream.writeByte('\n')
+ toWrite := stream.indention - delta
+ for i := 0; i < toWrite; i++ {
+ stream.buf = append(stream.buf, ' ')
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_float.go b/vendor/github.com/json-iterator/go/stream_float.go
new file mode 100644
index 0000000..f318d2c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_float.go
@@ -0,0 +1,94 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var pow10 []uint64
+
+func init() {
+ pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
+}
+
+// WriteFloat32 write float32 to stream
+func (stream *Stream) WriteFloat32(val float32) {
+ abs := math.Abs(float64(val))
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
+}
+
+// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat32Lossy(val float32) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat32(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(float64(val)*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
+
+// WriteFloat64 write float64 to stream
+func (stream *Stream) WriteFloat64(val float64) {
+ abs := math.Abs(val)
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if abs < 1e-6 || abs >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
+}
+
+// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat64Lossy(val float64) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat64(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(val*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_int.go b/vendor/github.com/json-iterator/go/stream_int.go
new file mode 100644
index 0000000..d1059ee
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_int.go
@@ -0,0 +1,190 @@
+package jsoniter
+
+var digits []uint32
+
+func init() {
+ digits = make([]uint32, 1000)
+ for i := uint32(0); i < 1000; i++ {
+ digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
+ if i < 10 {
+ digits[i] += 2 << 24
+ } else if i < 100 {
+ digits[i] += 1 << 24
+ }
+ }
+}
+
+func writeFirstBuf(space []byte, v uint32) []byte {
+ start := v >> 24
+ if start == 0 {
+ space = append(space, byte(v>>16), byte(v>>8))
+ } else if start == 1 {
+ space = append(space, byte(v>>8))
+ }
+ space = append(space, byte(v))
+ return space
+}
+
+func writeBuf(buf []byte, v uint32) []byte {
+ return append(buf, byte(v>>16), byte(v>>8), byte(v))
+}
+
+// WriteUint8 write uint8 to stream
+func (stream *Stream) WriteUint8(val uint8) {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteInt8 write int8 to stream
+func (stream *Stream) WriteInt8(nval int8) {
+ var val uint8
+ if nval < 0 {
+ val = uint8(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint8(nval)
+ }
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteUint16 write uint16 to stream
+func (stream *Stream) WriteUint16(val uint16) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+}
+
+// WriteInt16 write int16 to stream
+func (stream *Stream) WriteInt16(nval int16) {
+ var val uint16
+ if nval < 0 {
+ val = uint16(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint16(nval)
+ }
+ stream.WriteUint16(val)
+}
+
+// WriteUint32 write uint32 to stream
+func (stream *Stream) WriteUint32(val uint32) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ } else {
+ r3 := q2 - q3*1000
+ stream.buf = append(stream.buf, byte(q3+'0'))
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt32 write int32 to stream
+func (stream *Stream) WriteInt32(nval int32) {
+ var val uint32
+ if nval < 0 {
+ val = uint32(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint32(nval)
+ }
+ stream.WriteUint32(val)
+}
+
+// WriteUint64 write uint64 to stream
+func (stream *Stream) WriteUint64(val uint64) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r3 := q2 - q3*1000
+ q4 := q3 / 1000
+ if q4 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q3])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r4 := q3 - q4*1000
+ q5 := q4 / 1000
+ if q5 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q4])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r5 := q4 - q5*1000
+ q6 := q5 / 1000
+ if q6 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q5])
+ } else {
+ stream.buf = writeFirstBuf(stream.buf, digits[q6])
+ r6 := q5 - q6*1000
+ stream.buf = writeBuf(stream.buf, digits[r6])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r5])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt64 write int64 to stream
+func (stream *Stream) WriteInt64(nval int64) {
+ var val uint64
+ if nval < 0 {
+ val = uint64(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint64(nval)
+ }
+ stream.WriteUint64(val)
+}
+
+// WriteInt write int to stream
+func (stream *Stream) WriteInt(val int) {
+ stream.WriteInt64(int64(val))
+}
+
+// WriteUint write uint to stream
+func (stream *Stream) WriteUint(val uint) {
+ stream.WriteUint64(uint64(val))
+}
diff --git a/vendor/github.com/json-iterator/go/stream_str.go b/vendor/github.com/json-iterator/go/stream_str.go
new file mode 100644
index 0000000..54c2ba0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_str.go
@@ -0,0 +1,372 @@
+package jsoniter
+
+import (
+ "unicode/utf8"
+)
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML