mirror of
https://github.com/jesseduffield/lazygit.git
synced 2025-05-12 12:55:47 +02:00
Add invopop/jsonschema fork
This commit is contained in:
parent
b123719107
commit
df5b3693d6
38 changed files with 6087 additions and 4 deletions
12
vendor/github.com/buger/jsonparser/.gitignore
generated
vendored
Normal file
12
vendor/github.com/buger/jsonparser/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
|
||||
*.test
|
||||
|
||||
*.out
|
||||
|
||||
*.mprof
|
||||
|
||||
.idea
|
||||
|
||||
vendor/github.com/buger/goterm/
|
||||
prof.cpu
|
||||
prof.mem
|
12
vendor/github.com/buger/jsonparser/Dockerfile
generated
vendored
Normal file
12
vendor/github.com/buger/jsonparser/Dockerfile
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
FROM golang:1.6
|
||||
|
||||
RUN go get github.com/Jeffail/gabs
|
||||
RUN go get github.com/bitly/go-simplejson
|
||||
RUN go get github.com/pquerna/ffjson
|
||||
RUN go get github.com/antonholmquist/jason
|
||||
RUN go get github.com/mreiferson/go-ujson
|
||||
RUN go get -tags=unsafe -u github.com/ugorji/go/codec
|
||||
RUN go get github.com/mailru/easyjson
|
||||
|
||||
WORKDIR /go/src/github.com/buger/jsonparser
|
||||
ADD . /go/src/github.com/buger/jsonparser
|
21
vendor/github.com/buger/jsonparser/LICENSE
generated
vendored
Normal file
21
vendor/github.com/buger/jsonparser/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2016 Leonid Bugaev
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
36
vendor/github.com/buger/jsonparser/Makefile
generated
vendored
Normal file
36
vendor/github.com/buger/jsonparser/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
SOURCE = parser.go
|
||||
CONTAINER = jsonparser
|
||||
SOURCE_PATH = /go/src/github.com/buger/jsonparser
|
||||
BENCHMARK = JsonParser
|
||||
BENCHTIME = 5s
|
||||
TEST = .
|
||||
DRUN = docker run -v `pwd`:$(SOURCE_PATH) -i -t $(CONTAINER)
|
||||
|
||||
build:
|
||||
docker build -t $(CONTAINER) .
|
||||
|
||||
race:
|
||||
$(DRUN) --env GORACE="halt_on_error=1" go test ./. $(ARGS) -v -race -timeout 15s
|
||||
|
||||
bench:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -benchtime $(BENCHTIME) -v
|
||||
|
||||
bench_local:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench . $(ARGS) -benchtime $(BENCHTIME) -v
|
||||
|
||||
profile:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -memprofile mem.mprof -v
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -cpuprofile cpu.out -v
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -c
|
||||
|
||||
test:
|
||||
$(DRUN) go test $(LDFLAGS) ./ -run $(TEST) -timeout 10s $(ARGS) -v
|
||||
|
||||
fmt:
|
||||
$(DRUN) go fmt ./...
|
||||
|
||||
vet:
|
||||
$(DRUN) go vet ./.
|
||||
|
||||
bash:
|
||||
$(DRUN) /bin/bash
|
365
vendor/github.com/buger/jsonparser/README.md
generated
vendored
Normal file
365
vendor/github.com/buger/jsonparser/README.md
generated
vendored
Normal file
|
@ -0,0 +1,365 @@
|
|||
[](https://goreportcard.com/report/github.com/buger/jsonparser) 
|
||||
# Alternative JSON parser for Go (10x times faster standard library)
|
||||
|
||||
It does not require you to know the structure of the payload (eg. create structs), and allows accessing fields by providing the path to them. It is up to **10 times faster** than standard `encoding/json` package (depending on payload size and usage), **allocates no memory**. See benchmarks below.
|
||||
|
||||
## Rationale
|
||||
Originally I made this for a project that relies on a lot of 3rd party APIs that can be unpredictable and complex.
|
||||
I love simplicity and prefer to avoid external dependecies. `encoding/json` requires you to know exactly your data structures, or if you prefer to use `map[string]interface{}` instead, it will be very slow and hard to manage.
|
||||
I investigated what's on the market and found that most libraries are just wrappers around `encoding/json`, there is few options with own parsers (`ffjson`, `easyjson`), but they still requires you to create data structures.
|
||||
|
||||
|
||||
Goal of this project is to push JSON parser to the performance limits and not sacrifice with compliance and developer user experience.
|
||||
|
||||
## Example
|
||||
For the given JSON our goal is to extract the user's full name, number of github followers and avatar.
|
||||
|
||||
```go
|
||||
import "github.com/buger/jsonparser"
|
||||
|
||||
...
|
||||
|
||||
data := []byte(`{
|
||||
"person": {
|
||||
"name": {
|
||||
"first": "Leonid",
|
||||
"last": "Bugaev",
|
||||
"fullName": "Leonid Bugaev"
|
||||
},
|
||||
"github": {
|
||||
"handle": "buger",
|
||||
"followers": 109
|
||||
},
|
||||
"avatars": [
|
||||
{ "url": "https://avatars1.githubusercontent.com/u/14009?v=3&s=460", "type": "thumbnail" }
|
||||
]
|
||||
},
|
||||
"company": {
|
||||
"name": "Acme"
|
||||
}
|
||||
}`)
|
||||
|
||||
// You can specify key path by providing arguments to Get function
|
||||
jsonparser.Get(data, "person", "name", "fullName")
|
||||
|
||||
// There is `GetInt` and `GetBoolean` helpers if you exactly know key data type
|
||||
jsonparser.GetInt(data, "person", "github", "followers")
|
||||
|
||||
// When you try to get object, it will return you []byte slice pointer to data containing it
|
||||
// In `company` it will be `{"name": "Acme"}`
|
||||
jsonparser.Get(data, "company")
|
||||
|
||||
// If the key doesn't exist it will throw an error
|
||||
var size int64
|
||||
if value, err := jsonparser.GetInt(data, "company", "size"); err == nil {
|
||||
size = value
|
||||
}
|
||||
|
||||
// You can use `ArrayEach` helper to iterate items [item1, item2 .... itemN]
|
||||
jsonparser.ArrayEach(data, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
|
||||
fmt.Println(jsonparser.Get(value, "url"))
|
||||
}, "person", "avatars")
|
||||
|
||||
// Or use can access fields by index!
|
||||
jsonparser.GetString(data, "person", "avatars", "[0]", "url")
|
||||
|
||||
// You can use `ObjectEach` helper to iterate objects { "key1":object1, "key2":object2, .... "keyN":objectN }
|
||||
jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
|
||||
fmt.Printf("Key: '%s'\n Value: '%s'\n Type: %s\n", string(key), string(value), dataType)
|
||||
return nil
|
||||
}, "person", "name")
|
||||
|
||||
// The most efficient way to extract multiple keys is `EachKey`
|
||||
|
||||
paths := [][]string{
|
||||
[]string{"person", "name", "fullName"},
|
||||
[]string{"person", "avatars", "[0]", "url"},
|
||||
[]string{"company", "url"},
|
||||
}
|
||||
jsonparser.EachKey(data, func(idx int, value []byte, vt jsonparser.ValueType, err error){
|
||||
switch idx {
|
||||
case 0: // []string{"person", "name", "fullName"}
|
||||
...
|
||||
case 1: // []string{"person", "avatars", "[0]", "url"}
|
||||
...
|
||||
case 2: // []string{"company", "url"},
|
||||
...
|
||||
}
|
||||
}, paths...)
|
||||
|
||||
// For more information see docs below
|
||||
```
|
||||
|
||||
## Need to speedup your app?
|
||||
|
||||
I'm available for consulting and can help you push your app performance to the limits. Ping me at: leonsbox@gmail.com.
|
||||
|
||||
## Reference
|
||||
|
||||
Library API is really simple. You just need the `Get` method to perform any operation. The rest is just helpers around it.
|
||||
|
||||
You also can view API at [godoc.org](https://godoc.org/github.com/buger/jsonparser)
|
||||
|
||||
|
||||
### **`Get`**
|
||||
```go
|
||||
func Get(data []byte, keys ...string) (value []byte, dataType jsonparser.ValueType, offset int, err error)
|
||||
```
|
||||
Receives data structure, and key path to extract value from.
|
||||
|
||||
Returns:
|
||||
* `value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error
|
||||
* `dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null`
|
||||
* `offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper.
|
||||
* `err` - If the key is not found or any other parsing issue, it should return error. If key not found it also sets `dataType` to `NotExist`
|
||||
|
||||
Accepts multiple keys to specify path to JSON value (in case of quering nested structures).
|
||||
If no keys are provided it will try to extract the closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation.
|
||||
|
||||
Note that keys can be an array indexes: `jsonparser.GetInt("person", "avatars", "[0]", "url")`, pretty cool, yeah?
|
||||
|
||||
### **`GetString`**
|
||||
```go
|
||||
func GetString(data []byte, keys ...string) (val string, err error)
|
||||
```
|
||||
Returns strings properly handing escaped and unicode characters. Note that this will cause additional memory allocations.
|
||||
|
||||
### **`GetUnsafeString`**
|
||||
If you need string in your app, and ready to sacrifice with support of escaped symbols in favor of speed. It returns string mapped to existing byte slice memory, without any allocations:
|
||||
```go
|
||||
s, _, := jsonparser.GetUnsafeString(data, "person", "name", "title")
|
||||
switch s {
|
||||
case 'CEO':
|
||||
...
|
||||
case 'Engineer'
|
||||
...
|
||||
...
|
||||
}
|
||||
```
|
||||
Note that `unsafe` here means that your string will exist until GC will free underlying byte slice, for most of cases it means that you can use this string only in current context, and should not pass it anywhere externally: through channels or any other way.
|
||||
|
||||
|
||||
### **`GetBoolean`**, **`GetInt`** and **`GetFloat`**
|
||||
```go
|
||||
func GetBoolean(data []byte, keys ...string) (val bool, err error)
|
||||
|
||||
func GetFloat(data []byte, keys ...string) (val float64, err error)
|
||||
|
||||
func GetInt(data []byte, keys ...string) (val int64, err error)
|
||||
```
|
||||
If you know the key type, you can use the helpers above.
|
||||
If key data type do not match, it will return error.
|
||||
|
||||
### **`ArrayEach`**
|
||||
```go
|
||||
func ArrayEach(data []byte, cb func(value []byte, dataType jsonparser.ValueType, offset int, err error), keys ...string)
|
||||
```
|
||||
Needed for iterating arrays, accepts a callback function with the same return arguments as `Get`.
|
||||
|
||||
### **`ObjectEach`**
|
||||
```go
|
||||
func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error)
|
||||
```
|
||||
Needed for iterating object, accepts a callback function. Example:
|
||||
```go
|
||||
var handler func([]byte, []byte, jsonparser.ValueType, int) error
|
||||
handler = func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
|
||||
//do stuff here
|
||||
}
|
||||
jsonparser.ObjectEach(myJson, handler)
|
||||
```
|
||||
|
||||
|
||||
### **`EachKey`**
|
||||
```go
|
||||
func EachKey(data []byte, cb func(idx int, value []byte, dataType jsonparser.ValueType, err error), paths ...[]string)
|
||||
```
|
||||
When you need to read multiple keys, and you do not afraid of low-level API `EachKey` is your friend. It read payload only single time, and calls callback function once path is found. For example when you call multiple times `Get`, it has to process payload multiple times, each time you call it. Depending on payload `EachKey` can be multiple times faster than `Get`. Path can use nested keys as well!
|
||||
|
||||
```go
|
||||
paths := [][]string{
|
||||
[]string{"uuid"},
|
||||
[]string{"tz"},
|
||||
[]string{"ua"},
|
||||
[]string{"st"},
|
||||
}
|
||||
var data SmallPayload
|
||||
|
||||
jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error){
|
||||
switch idx {
|
||||
case 0:
|
||||
data.Uuid, _ = value
|
||||
case 1:
|
||||
v, _ := jsonparser.ParseInt(value)
|
||||
data.Tz = int(v)
|
||||
case 2:
|
||||
data.Ua, _ = value
|
||||
case 3:
|
||||
v, _ := jsonparser.ParseInt(value)
|
||||
data.St = int(v)
|
||||
}
|
||||
}, paths...)
|
||||
```
|
||||
|
||||
### **`Set`**
|
||||
```go
|
||||
func Set(data []byte, setValue []byte, keys ...string) (value []byte, err error)
|
||||
```
|
||||
Receives existing data structure, key path to set, and value to set at that key. *This functionality is experimental.*
|
||||
|
||||
Returns:
|
||||
* `value` - Pointer to original data structure with updated or added key value.
|
||||
* `err` - If any parsing issue, it should return error.
|
||||
|
||||
Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures).
|
||||
|
||||
Note that keys can be an array indexes: `jsonparser.Set(data, []byte("http://github.com"), "person", "avatars", "[0]", "url")`
|
||||
|
||||
### **`Delete`**
|
||||
```go
|
||||
func Delete(data []byte, keys ...string) value []byte
|
||||
```
|
||||
Receives existing data structure, and key path to delete. *This functionality is experimental.*
|
||||
|
||||
Returns:
|
||||
* `value` - Pointer to original data structure with key path deleted if it can be found. If there is no key path, then the whole data structure is deleted.
|
||||
|
||||
Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures).
|
||||
|
||||
Note that keys can be an array indexes: `jsonparser.Delete(data, "person", "avatars", "[0]", "url")`
|
||||
|
||||
|
||||
## What makes it so fast?
|
||||
* It does not rely on `encoding/json`, `reflection` or `interface{}`, the only real package dependency is `bytes`.
|
||||
* Operates with JSON payload on byte level, providing you pointers to the original data structure: no memory allocation.
|
||||
* No automatic type conversions, by default everything is a []byte, but it provides you value type, so you can convert by yourself (there is few helpers included).
|
||||
* Does not parse full record, only keys you specified
|
||||
|
||||
|
||||
## Benchmarks
|
||||
|
||||
There are 3 benchmark types, trying to simulate real-life usage for small, medium and large JSON payloads.
|
||||
For each metric, the lower value is better. Time/op is in nanoseconds. Values better than standard encoding/json marked as bold text.
|
||||
Benchmarks run on standard Linode 1024 box.
|
||||
|
||||
Compared libraries:
|
||||
* https://golang.org/pkg/encoding/json
|
||||
* https://github.com/Jeffail/gabs
|
||||
* https://github.com/a8m/djson
|
||||
* https://github.com/bitly/go-simplejson
|
||||
* https://github.com/antonholmquist/jason
|
||||
* https://github.com/mreiferson/go-ujson
|
||||
* https://github.com/ugorji/go/codec
|
||||
* https://github.com/pquerna/ffjson
|
||||
* https://github.com/mailru/easyjson
|
||||
* https://github.com/buger/jsonparser
|
||||
|
||||
#### TLDR
|
||||
If you want to skip next sections we have 2 winner: `jsonparser` and `easyjson`.
|
||||
`jsonparser` is up to 10 times faster than standard `encoding/json` package (depending on payload size and usage), and almost infinitely (literally) better in memory consumption because it operates with data on byte level, and provide direct slice pointers.
|
||||
`easyjson` wins in CPU in medium tests and frankly i'm impressed with this package: it is remarkable results considering that it is almost drop-in replacement for `encoding/json` (require some code generation).
|
||||
|
||||
It's hard to fully compare `jsonparser` and `easyjson` (or `ffson`), they a true parsers and fully process record, unlike `jsonparser` which parse only keys you specified.
|
||||
|
||||
If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choice. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`.
|
||||
|
||||
`jsonparser` performance heavily depends on usage, and it works best when you do not need to process full record, only some keys. The more calls you need to make, the slower it will be, in contrast `easyjson` (or `ffjson`, `encoding/json`) parser record only 1 time, and then you can make as many calls as you want.
|
||||
|
||||
With great power comes great responsibility! :)
|
||||
|
||||
|
||||
#### Small payload
|
||||
|
||||
Each test processes 190 bytes of http log as a JSON record.
|
||||
It should read multiple fields.
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_small_payload_test.go
|
||||
|
||||
Library | time/op | bytes/op | allocs/op
|
||||
------ | ------- | -------- | -------
|
||||
encoding/json struct | 7879 | 880 | 18
|
||||
encoding/json interface{} | 8946 | 1521 | 38
|
||||
Jeffail/gabs | 10053 | 1649 | 46
|
||||
bitly/go-simplejson | 10128 | 2241 | 36
|
||||
antonholmquist/jason | 27152 | 7237 | 101
|
||||
github.com/ugorji/go/codec | 8806 | 2176 | 31
|
||||
mreiferson/go-ujson | **7008** | **1409** | 37
|
||||
a8m/djson | 3862 | 1249 | 30
|
||||
pquerna/ffjson | **3769** | **624** | **15**
|
||||
mailru/easyjson | **2002** | **192** | **9**
|
||||
buger/jsonparser | **1367** | **0** | **0**
|
||||
buger/jsonparser (EachKey API) | **809** | **0** | **0**
|
||||
|
||||
Winners are ffjson, easyjson and jsonparser, where jsonparser is up to 9.8x faster than encoding/json and 4.6x faster than ffjson, and slightly faster than easyjson.
|
||||
If you look at memory allocation, jsonparser has no rivals, as it makes no data copy and operates with raw []byte structures and pointers to it.
|
||||
|
||||
#### Medium payload
|
||||
|
||||
Each test processes a 2.4kb JSON record (based on Clearbit API).
|
||||
It should read multiple nested fields and 1 array.
|
||||
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_medium_payload_test.go
|
||||
|
||||
| Library | time/op | bytes/op | allocs/op |
|
||||
| ------- | ------- | -------- | --------- |
|
||||
| encoding/json struct | 57749 | 1336 | 29 |
|
||||
| encoding/json interface{} | 79297 | 10627 | 215 |
|
||||
| Jeffail/gabs | 83807 | 11202 | 235 |
|
||||
| bitly/go-simplejson | 88187 | 17187 | 220 |
|
||||
| antonholmquist/jason | 94099 | 19013 | 247 |
|
||||
| github.com/ugorji/go/codec | 114719 | 6712 | 152 |
|
||||
| mreiferson/go-ujson | **56972** | 11547 | 270 |
|
||||
| a8m/djson | 28525 | 10196 | 198 |
|
||||
| pquerna/ffjson | **20298** | **856** | **20** |
|
||||
| mailru/easyjson | **10512** | **336** | **12** |
|
||||
| buger/jsonparser | **15955** | **0** | **0** |
|
||||
| buger/jsonparser (EachKey API) | **8916** | **0** | **0** |
|
||||
|
||||
The difference between ffjson and jsonparser in CPU usage is smaller, while the memory consumption difference is growing. On the other hand `easyjson` shows remarkable performance for medium payload.
|
||||
|
||||
`gabs`, `go-simplejson` and `jason` are based on encoding/json and map[string]interface{} and actually only helpers for unstructured JSON, their performance correlate with `encoding/json interface{}`, and they will skip next round.
|
||||
`go-ujson` while have its own parser, shows same performance as `encoding/json`, also skips next round. Same situation with `ugorji/go/codec`, but it showed unexpectedly bad performance for complex payloads.
|
||||
|
||||
|
||||
#### Large payload
|
||||
|
||||
Each test processes a 24kb JSON record (based on Discourse API)
|
||||
It should read 2 arrays, and for each item in array get a few fields.
|
||||
Basically it means processing a full JSON file.
|
||||
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_large_payload_test.go
|
||||
|
||||
| Library | time/op | bytes/op | allocs/op |
|
||||
| --- | --- | --- | --- |
|
||||
| encoding/json struct | 748336 | 8272 | 307 |
|
||||
| encoding/json interface{} | 1224271 | 215425 | 3395 |
|
||||
| a8m/djson | 510082 | 213682 | 2845 |
|
||||
| pquerna/ffjson | **312271** | **7792** | **298** |
|
||||
| mailru/easyjson | **154186** | **6992** | **288** |
|
||||
| buger/jsonparser | **85308** | **0** | **0** |
|
||||
|
||||
`jsonparser` now is a winner, but do not forget that it is way more lightweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough)
|
||||
|
||||
Also last benchmark did not included `EachKey` test, because in this particular case we need to read lot of Array values, and using `ArrayEach` is more efficient.
|
||||
|
||||
## Questions and support
|
||||
|
||||
All bug-reports and suggestions should go though Github Issues.
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork it
|
||||
2. Create your feature branch (git checkout -b my-new-feature)
|
||||
3. Commit your changes (git commit -am 'Added some feature')
|
||||
4. Push to the branch (git push origin my-new-feature)
|
||||
5. Create new Pull Request
|
||||
|
||||
## Development
|
||||
|
||||
All my development happens using Docker, and repo include some Make tasks to simplify development.
|
||||
|
||||
* `make build` - builds docker image, usually can be called only once
|
||||
* `make test` - run tests
|
||||
* `make fmt` - run go fmt
|
||||
* `make bench` - run benchmarks (if you need to run only single benchmark modify `BENCHMARK` variable in make file)
|
||||
* `make profile` - runs benchmark and generate 3 files- `cpu.out`, `mem.mprof` and `benchmark.test` binary, which can be used for `go tool pprof`
|
||||
* `make bash` - enter container (i use it for running `go tool pprof` above)
|
47
vendor/github.com/buger/jsonparser/bytes.go
generated
vendored
Normal file
47
vendor/github.com/buger/jsonparser/bytes.go
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
package jsonparser
|
||||
|
||||
import (
|
||||
bio "bytes"
|
||||
)
|
||||
|
||||
// minInt64 '-9223372036854775808' is the smallest representable number in int64
|
||||
const minInt64 = `9223372036854775808`
|
||||
|
||||
// About 2x faster then strconv.ParseInt because it only supports base 10, which is enough for JSON
|
||||
func parseInt(bytes []byte) (v int64, ok bool, overflow bool) {
|
||||
if len(bytes) == 0 {
|
||||
return 0, false, false
|
||||
}
|
||||
|
||||
var neg bool = false
|
||||
if bytes[0] == '-' {
|
||||
neg = true
|
||||
bytes = bytes[1:]
|
||||
}
|
||||
|
||||
var b int64 = 0
|
||||
for _, c := range bytes {
|
||||
if c >= '0' && c <= '9' {
|
||||
b = (10 * v) + int64(c-'0')
|
||||
} else {
|
||||
return 0, false, false
|
||||
}
|
||||
if overflow = (b < v); overflow {
|
||||
break
|
||||
}
|
||||
v = b
|
||||
}
|
||||
|
||||
if overflow {
|
||||
if neg && bio.Equal(bytes, []byte(minInt64)) {
|
||||
return b, true, false
|
||||
}
|
||||
return 0, false, true
|
||||
}
|
||||
|
||||
if neg {
|
||||
return -v, true, false
|
||||
} else {
|
||||
return v, true, false
|
||||
}
|
||||
}
|
25
vendor/github.com/buger/jsonparser/bytes_safe.go
generated
vendored
Normal file
25
vendor/github.com/buger/jsonparser/bytes_safe.go
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
// +build appengine appenginevm
|
||||
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// See fastbytes_unsafe.go for explanation on why *[]byte is used (signatures must be consistent with those in that file)
|
||||
|
||||
func equalStr(b *[]byte, s string) bool {
|
||||
return string(*b) == s
|
||||
}
|
||||
|
||||
func parseFloat(b *[]byte) (float64, error) {
|
||||
return strconv.ParseFloat(string(*b), 64)
|
||||
}
|
||||
|
||||
func bytesToString(b *[]byte) string {
|
||||
return string(*b)
|
||||
}
|
||||
|
||||
func StringToBytes(s string) []byte {
|
||||
return []byte(s)
|
||||
}
|
44
vendor/github.com/buger/jsonparser/bytes_unsafe.go
generated
vendored
Normal file
44
vendor/github.com/buger/jsonparser/bytes_unsafe.go
generated
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
// +build !appengine,!appenginevm
|
||||
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
//
|
||||
// The reason for using *[]byte rather than []byte in parameters is an optimization. As of Go 1.6,
|
||||
// the compiler cannot perfectly inline the function when using a non-pointer slice. That is,
|
||||
// the non-pointer []byte parameter version is slower than if its function body is manually
|
||||
// inlined, whereas the pointer []byte version is equally fast to the manually inlined
|
||||
// version. Instruction count in assembly taken from "go tool compile" confirms this difference.
|
||||
//
|
||||
// TODO: Remove hack after Go 1.7 release
|
||||
//
|
||||
func equalStr(b *[]byte, s string) bool {
|
||||
return *(*string)(unsafe.Pointer(b)) == s
|
||||
}
|
||||
|
||||
func parseFloat(b *[]byte) (float64, error) {
|
||||
return strconv.ParseFloat(*(*string)(unsafe.Pointer(b)), 64)
|
||||
}
|
||||
|
||||
// A hack until issue golang/go#2632 is fixed.
|
||||
// See: https://github.com/golang/go/issues/2632
|
||||
func bytesToString(b *[]byte) string {
|
||||
return *(*string)(unsafe.Pointer(b))
|
||||
}
|
||||
|
||||
func StringToBytes(s string) []byte {
|
||||
b := make([]byte, 0, 0)
|
||||
bh := (*reflect.SliceHeader)(unsafe.Pointer(&b))
|
||||
sh := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
bh.Data = sh.Data
|
||||
bh.Cap = sh.Len
|
||||
bh.Len = sh.Len
|
||||
runtime.KeepAlive(s)
|
||||
return b
|
||||
}
|
173
vendor/github.com/buger/jsonparser/escape.go
generated
vendored
Normal file
173
vendor/github.com/buger/jsonparser/escape.go
generated
vendored
Normal file
|
@ -0,0 +1,173 @@
|
|||
package jsonparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// JSON Unicode stuff: see https://tools.ietf.org/html/rfc7159#section-7
|
||||
|
||||
const supplementalPlanesOffset = 0x10000
|
||||
const highSurrogateOffset = 0xD800
|
||||
const lowSurrogateOffset = 0xDC00
|
||||
|
||||
const basicMultilingualPlaneReservedOffset = 0xDFFF
|
||||
const basicMultilingualPlaneOffset = 0xFFFF
|
||||
|
||||
func combineUTF16Surrogates(high, low rune) rune {
|
||||
return supplementalPlanesOffset + (high-highSurrogateOffset)<<10 + (low - lowSurrogateOffset)
|
||||
}
|
||||
|
||||
const badHex = -1
|
||||
|
||||
func h2I(c byte) int {
|
||||
switch {
|
||||
case c >= '0' && c <= '9':
|
||||
return int(c - '0')
|
||||
case c >= 'A' && c <= 'F':
|
||||
return int(c - 'A' + 10)
|
||||
case c >= 'a' && c <= 'f':
|
||||
return int(c - 'a' + 10)
|
||||
}
|
||||
return badHex
|
||||
}
|
||||
|
||||
// decodeSingleUnicodeEscape decodes a single \uXXXX escape sequence. The prefix \u is assumed to be present and
|
||||
// is not checked.
|
||||
// In JSON, these escapes can either come alone or as part of "UTF16 surrogate pairs" that must be handled together.
|
||||
// This function only handles one; decodeUnicodeEscape handles this more complex case.
|
||||
func decodeSingleUnicodeEscape(in []byte) (rune, bool) {
|
||||
// We need at least 6 characters total
|
||||
if len(in) < 6 {
|
||||
return utf8.RuneError, false
|
||||
}
|
||||
|
||||
// Convert hex to decimal
|
||||
h1, h2, h3, h4 := h2I(in[2]), h2I(in[3]), h2I(in[4]), h2I(in[5])
|
||||
if h1 == badHex || h2 == badHex || h3 == badHex || h4 == badHex {
|
||||
return utf8.RuneError, false
|
||||
}
|
||||
|
||||
// Compose the hex digits
|
||||
return rune(h1<<12 + h2<<8 + h3<<4 + h4), true
|
||||
}
|
||||
|
||||
// isUTF16EncodedRune checks if a rune is in the range for non-BMP characters,
|
||||
// which is used to describe UTF16 chars.
|
||||
// Source: https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane
|
||||
func isUTF16EncodedRune(r rune) bool {
|
||||
return highSurrogateOffset <= r && r <= basicMultilingualPlaneReservedOffset
|
||||
}
|
||||
|
||||
func decodeUnicodeEscape(in []byte) (rune, int) {
|
||||
if r, ok := decodeSingleUnicodeEscape(in); !ok {
|
||||
// Invalid Unicode escape
|
||||
return utf8.RuneError, -1
|
||||
} else if r <= basicMultilingualPlaneOffset && !isUTF16EncodedRune(r) {
|
||||
// Valid Unicode escape in Basic Multilingual Plane
|
||||
return r, 6
|
||||
} else if r2, ok := decodeSingleUnicodeEscape(in[6:]); !ok { // Note: previous decodeSingleUnicodeEscape success guarantees at least 6 bytes remain
|
||||
// UTF16 "high surrogate" without manditory valid following Unicode escape for the "low surrogate"
|
||||
return utf8.RuneError, -1
|
||||
} else if r2 < lowSurrogateOffset {
|
||||
// Invalid UTF16 "low surrogate"
|
||||
return utf8.RuneError, -1
|
||||
} else {
|
||||
// Valid UTF16 surrogate pair
|
||||
return combineUTF16Surrogates(r, r2), 12
|
||||
}
|
||||
}
|
||||
|
||||
// backslashCharEscapeTable: when '\X' is found for some byte X, it is to be replaced with backslashCharEscapeTable[X]
|
||||
var backslashCharEscapeTable = [...]byte{
|
||||
'"': '"',
|
||||
'\\': '\\',
|
||||
'/': '/',
|
||||
'b': '\b',
|
||||
'f': '\f',
|
||||
'n': '\n',
|
||||
'r': '\r',
|
||||
't': '\t',
|
||||
}
|
||||
|
||||
// unescapeToUTF8 unescapes the single escape sequence starting at 'in' into 'out' and returns
|
||||
// how many characters were consumed from 'in' and emitted into 'out'.
|
||||
// If a valid escape sequence does not appear as a prefix of 'in', (-1, -1) to signal the error.
|
||||
func unescapeToUTF8(in, out []byte) (inLen int, outLen int) {
|
||||
if len(in) < 2 || in[0] != '\\' {
|
||||
// Invalid escape due to insufficient characters for any escape or no initial backslash
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7159#section-7
|
||||
switch e := in[1]; e {
|
||||
case '"', '\\', '/', 'b', 'f', 'n', 'r', 't':
|
||||
// Valid basic 2-character escapes (use lookup table)
|
||||
out[0] = backslashCharEscapeTable[e]
|
||||
return 2, 1
|
||||
case 'u':
|
||||
// Unicode escape
|
||||
if r, inLen := decodeUnicodeEscape(in); inLen == -1 {
|
||||
// Invalid Unicode escape
|
||||
return -1, -1
|
||||
} else {
|
||||
// Valid Unicode escape; re-encode as UTF8
|
||||
outLen := utf8.EncodeRune(out, r)
|
||||
return inLen, outLen
|
||||
}
|
||||
}
|
||||
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// unescape unescapes the string contained in 'in' and returns it as a slice.
|
||||
// If 'in' contains no escaped characters:
|
||||
// Returns 'in'.
|
||||
// Else, if 'out' is of sufficient capacity (guaranteed if cap(out) >= len(in)):
|
||||
// 'out' is used to build the unescaped string and is returned with no extra allocation
|
||||
// Else:
|
||||
// A new slice is allocated and returned.
|
||||
func Unescape(in, out []byte) ([]byte, error) {
|
||||
firstBackslash := bytes.IndexByte(in, '\\')
|
||||
if firstBackslash == -1 {
|
||||
return in, nil
|
||||
}
|
||||
|
||||
// Get a buffer of sufficient size (allocate if needed)
|
||||
if cap(out) < len(in) {
|
||||
out = make([]byte, len(in))
|
||||
} else {
|
||||
out = out[0:len(in)]
|
||||
}
|
||||
|
||||
// Copy the first sequence of unescaped bytes to the output and obtain a buffer pointer (subslice)
|
||||
copy(out, in[:firstBackslash])
|
||||
in = in[firstBackslash:]
|
||||
buf := out[firstBackslash:]
|
||||
|
||||
for len(in) > 0 {
|
||||
// Unescape the next escaped character
|
||||
inLen, bufLen := unescapeToUTF8(in, buf)
|
||||
if inLen == -1 {
|
||||
return nil, MalformedStringEscapeError
|
||||
}
|
||||
|
||||
in = in[inLen:]
|
||||
buf = buf[bufLen:]
|
||||
|
||||
// Copy everything up until the next backslash
|
||||
nextBackslash := bytes.IndexByte(in, '\\')
|
||||
if nextBackslash == -1 {
|
||||
copy(buf, in)
|
||||
buf = buf[len(in):]
|
||||
break
|
||||
} else {
|
||||
copy(buf, in[:nextBackslash])
|
||||
buf = buf[nextBackslash:]
|
||||
in = in[nextBackslash:]
|
||||
}
|
||||
}
|
||||
|
||||
// Trim the out buffer to the amount that was actually emitted
|
||||
return out[:len(out)-len(buf)], nil
|
||||
}
|
117
vendor/github.com/buger/jsonparser/fuzz.go
generated
vendored
Normal file
117
vendor/github.com/buger/jsonparser/fuzz.go
generated
vendored
Normal file
|
@ -0,0 +1,117 @@
|
|||
package jsonparser
|
||||
|
||||
func FuzzParseString(data []byte) int {
|
||||
r, err := ParseString(data)
|
||||
if err != nil || r == "" {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzEachKey(data []byte) int {
|
||||
paths := [][]string{
|
||||
{"name"},
|
||||
{"order"},
|
||||
{"nested", "a"},
|
||||
{"nested", "b"},
|
||||
{"nested2", "a"},
|
||||
{"nested", "nested3", "b"},
|
||||
{"arr", "[1]", "b"},
|
||||
{"arrInt", "[3]"},
|
||||
{"arrInt", "[5]"},
|
||||
{"nested"},
|
||||
{"arr", "["},
|
||||
{"a\n", "b\n"},
|
||||
}
|
||||
EachKey(data, func(idx int, value []byte, vt ValueType, err error) {}, paths...)
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzDelete(data []byte) int {
|
||||
Delete(data, "test")
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzSet(data []byte) int {
|
||||
_, err := Set(data, []byte(`"new value"`), "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzObjectEach(data []byte) int {
|
||||
_ = ObjectEach(data, func(key, value []byte, valueType ValueType, off int) error {
|
||||
return nil
|
||||
})
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzParseFloat(data []byte) int {
|
||||
_, err := ParseFloat(data)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzParseInt(data []byte) int {
|
||||
_, err := ParseInt(data)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzParseBool(data []byte) int {
|
||||
_, err := ParseBoolean(data)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzTokenStart(data []byte) int {
|
||||
_ = tokenStart(data)
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzGetString(data []byte) int {
|
||||
_, err := GetString(data, "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzGetFloat(data []byte) int {
|
||||
_, err := GetFloat(data, "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzGetInt(data []byte) int {
|
||||
_, err := GetInt(data, "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzGetBoolean(data []byte) int {
|
||||
_, err := GetBoolean(data, "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzGetUnsafeString(data []byte) int {
|
||||
_, err := GetUnsafeString(data, "test")
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
47
vendor/github.com/buger/jsonparser/oss-fuzz-build.sh
generated
vendored
Normal file
47
vendor/github.com/buger/jsonparser/oss-fuzz-build.sh
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
#!/bin/bash -eu
|
||||
|
||||
git clone https://github.com/dvyukov/go-fuzz-corpus
|
||||
zip corpus.zip go-fuzz-corpus/json/corpus/*
|
||||
|
||||
cp corpus.zip $OUT/fuzzparsestring_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzParseString fuzzparsestring
|
||||
|
||||
cp corpus.zip $OUT/fuzzeachkey_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzEachKey fuzzeachkey
|
||||
|
||||
cp corpus.zip $OUT/fuzzdelete_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzDelete fuzzdelete
|
||||
|
||||
cp corpus.zip $OUT/fuzzset_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzSet fuzzset
|
||||
|
||||
cp corpus.zip $OUT/fuzzobjecteach_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzObjectEach fuzzobjecteach
|
||||
|
||||
cp corpus.zip $OUT/fuzzparsefloat_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzParseFloat fuzzparsefloat
|
||||
|
||||
cp corpus.zip $OUT/fuzzparseint_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzParseInt fuzzparseint
|
||||
|
||||
cp corpus.zip $OUT/fuzzparsebool_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzParseBool fuzzparsebool
|
||||
|
||||
cp corpus.zip $OUT/fuzztokenstart_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzTokenStart fuzztokenstart
|
||||
|
||||
cp corpus.zip $OUT/fuzzgetstring_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzGetString fuzzgetstring
|
||||
|
||||
cp corpus.zip $OUT/fuzzgetfloat_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzGetFloat fuzzgetfloat
|
||||
|
||||
cp corpus.zip $OUT/fuzzgetint_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzGetInt fuzzgetint
|
||||
|
||||
cp corpus.zip $OUT/fuzzgetboolean_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzGetBoolean fuzzgetboolean
|
||||
|
||||
cp corpus.zip $OUT/fuzzgetunsafestring_seed_corpus.zip
|
||||
compile_go_fuzzer github.com/buger/jsonparser FuzzGetUnsafeString fuzzgetunsafestring
|
||||
|
1283
vendor/github.com/buger/jsonparser/parser.go
generated
vendored
Normal file
1283
vendor/github.com/buger/jsonparser/parser.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue