Skip to content

Commit 9addec9

Browse files
authored
Merge pull request #109 from stoplightio/master
Add delete implementation
2 parents bb14bb6 + 5ef65f4 commit 9addec9

File tree

8 files changed

+382
-41
lines changed

8 files changed

+382
-41
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,5 @@
66
*.mprof
77

88
vendor/github.com/buger/goterm/
9+
prof.cpu
10+
prof.mem

README.md

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -215,6 +215,19 @@ Accepts multiple keys to specify path to JSON value (in case of updating or crea
215215

216216
Note that keys can be an array indexes: `jsonparser.Set(data, []byte("http://github.com"), "person", "avatars", "[0]", "url")`
217217

218+
### **`Delete`**
219+
```go
220+
func Delete(data []byte, keys ...string) value []byte
221+
```
222+
Receives existing data structure, and key path to delete. *This functionality is experimental.*
223+
224+
Returns:
225+
* `value` - Pointer to original data structure with key path deleted if it can be found. If there is no key path, then the whole data structure is deleted.
226+
227+
Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures).
228+
229+
Note that keys can be an array indexes: `jsonparser.Delete(data, "person", "avatars", "[0]", "url")`
230+
218231

219232
## What makes it so fast?
220233
* It does not rely on `encoding/json`, `reflection` or `interface{}`, the only real package dependency is `bytes`.
@@ -248,7 +261,7 @@ If you want to skip next sections we have 2 winner: `jsonparser` and `easyjson`.
248261
249262
It's hard to fully compare `jsonparser` and `easyjson` (or `ffson`), they a true parsers and fully process record, unlike `jsonparser` which parse only keys you specified.
250263
251-
If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choise. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`.
264+
If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choice. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`.
252265
253266
`jsonparser` performance heavily depends on usage, and it works best when you do not need to process full record, only some keys. The more calls you need to make, the slower it will be, in contrast `easyjson` (or `ffjson`, `encoding/json`) parser record only 1 time, and then you can make as many calls as you want.
254267
@@ -324,7 +337,7 @@ https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_large_payloa
324337
| mailru/easyjson | **154186** | **6992** | **288** |
325338
| buger/jsonparser | **85308** | **0** | **0** |
326339
327-
`jsonparser` now is a winner, but do not forget that it is way more lighweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough)
340+
`jsonparser` now is a winner, but do not forget that it is way more lightweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough)
328341
329342
Also last benchmark did not included `EachKey` test, because in this particular case we need to read lot of Array values, and using `ArrayEach` is more efficient.
330343

benchmark/benchmark_medium_payload_test.go

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ package benchmark
66

77
import (
88
"encoding/json"
9+
"testing"
10+
911
"github.com/Jeffail/gabs"
1012
"github.com/a8m/djson"
1113
"github.com/antonholmquist/jason"
@@ -15,7 +17,6 @@ import (
1517
"github.com/mreiferson/go-ujson"
1618
"github.com/pquerna/ffjson/ffjson"
1719
"github.com/ugorji/go/codec"
18-
"testing"
1920
// "fmt"
2021
"bytes"
2122
"errors"
@@ -37,6 +38,19 @@ func BenchmarkJsonParserMedium(b *testing.B) {
3738
}
3839
}
3940

41+
func BenchmarkJsonParserDeleteMedium(b *testing.B) {
42+
fixture := make([]byte, 0, len(mediumFixture))
43+
b.ResetTimer()
44+
for i := 0; i < b.N; i++ {
45+
fixture = append(fixture[:0], mediumFixture...)
46+
fixture = jsonparser.Delete(fixture, "person", "name", "fullName")
47+
fixture = jsonparser.Delete(fixture, "person", "github", "followers")
48+
fixture = jsonparser.Delete(fixture, "company")
49+
50+
nothing()
51+
}
52+
}
53+
4054
func BenchmarkJsonParserEachKeyManualMedium(b *testing.B) {
4155
paths := [][]string{
4256
[]string{"person", "name", "fullName"},

benchmark/benchmark_small_payload_test.go

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ package benchmark
66

77
import (
88
"encoding/json"
9+
"testing"
10+
911
"github.com/Jeffail/gabs"
1012
"github.com/a8m/djson"
1113
"github.com/antonholmquist/jason"
@@ -15,7 +17,6 @@ import (
1517
"github.com/mreiferson/go-ujson"
1618
"github.com/pquerna/ffjson/ffjson"
1719
"github.com/ugorji/go/codec"
18-
"testing"
1920
// "fmt"
2021
"bytes"
2122
"errors"
@@ -141,6 +142,20 @@ func BenchmarkJsonParserSetSmall(b *testing.B) {
141142
}
142143
}
143144

145+
func BenchmarkJsonParserDelSmall(b *testing.B) {
146+
fixture := make([]byte, 0, len(smallFixture))
147+
b.ResetTimer()
148+
for i := 0; i < b.N; i++ {
149+
fixture = append(fixture[:0], smallFixture...)
150+
fixture = jsonparser.Delete(fixture, "uuid")
151+
fixture = jsonparser.Delete(fixture, "tz")
152+
fixture = jsonparser.Delete(fixture, "ua")
153+
fixture = jsonparser.Delete(fixture, "stt")
154+
155+
nothing()
156+
}
157+
}
158+
144159
/*
145160
encoding/json
146161
*/

bytes_safe.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,7 @@ func parseFloat(b *[]byte) (float64, error) {
1919
func bytesToString(b *[]byte) string {
2020
return string(*b)
2121
}
22+
23+
func StringToBytes(s string) []byte {
24+
return []byte(s)
25+
}

bytes_unsafe.go

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
package jsonparser
44

55
import (
6+
"reflect"
67
"strconv"
78
"unsafe"
89
)
@@ -29,3 +30,13 @@ func parseFloat(b *[]byte) (float64, error) {
2930
func bytesToString(b *[]byte) string {
3031
return *(*string)(unsafe.Pointer(b))
3132
}
33+
34+
func StringToBytes(s string) []byte {
35+
sh := (*reflect.StringHeader)(unsafe.Pointer(&s))
36+
bh := reflect.SliceHeader{
37+
Data: sh.Data,
38+
Len: sh.Len,
39+
Cap: sh.Len,
40+
}
41+
return *(*[]byte)(unsafe.Pointer(&bh))
42+
}

0 commit comments

Comments
 (0)