Skip to content

Commit

Permalink
fmt: move thrid-party tests into another module (#299)
Browse files Browse the repository at this point in the history
* fix:move benchmarkTests and unitTests related to external json library

* remove extra spaces

* merge go.mod files

* refactor: add external_jsonlib_test into go.work and CI

Co-authored-by: Yi Duan <[email protected]>
  • Loading branch information
cyn6315 and AsterDY authored Sep 20, 2022
1 parent ccc0f3f commit 5e54c02
Show file tree
Hide file tree
Showing 25 changed files with 1,786 additions and 830 deletions.
5 changes: 4 additions & 1 deletion .github/workflows/benchmark-linux-amd64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,11 @@ jobs:
restore-keys: |
${{ runner.os }}-go-
- name: Benchmark
- name: Benchmark sonic
run: sh bench.sh

# - name: Benchmark third-party
# run: go test -benchmem -run=^$ -bench . -v ./generic_test

# - name: Diff
# run: ./bench.py -b '"^Benchmark.*Sonic"' -c
9 changes: 4 additions & 5 deletions .github/workflows/push-check-go118.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@ jobs:
${{ runner.os }}-go-
- name: Unit Test
run: GOMAXPROCS=4 go test -v -gcflags=-d=checkptr=0 -race ./...
run: |
GOMAXPROCS=4 go test -v -gcflags=-d=checkptr=0 -race ./...
GOMAXPROCS=4 go test -v -gcflags=-d=checkptr=0 -race ./external_jsonlib_test/...
- name: Generic Test
run: GOMAXPROCS=4 go test -v -gcflags=-d=checkptr=0 -race ./generic_test

- name: Benchmark
run: go test -benchmem -run=^$ -bench . -v ./generic_test
run: GOMAXPROCS=4 go test -v -gcflags=-d=checkptr=0 -race ./generic_test
5 changes: 4 additions & 1 deletion .github/workflows/push-check-linux-amd64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,7 @@ jobs:
${{ runner.os }}-go-
- name: Unit Test
run: go test -v -gcflags=-d=checkptr=0 ./...
run: |
go test -v -gcflags=-d=checkptr=0 ./...
cd ./external_jsonlib_test
go test -v -gcflags=-d=checkptr=0 ./...
226 changes: 6 additions & 220 deletions ast/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,15 @@
package ast

import (
`os`
`encoding/json`
`testing`
`time`
`os`
`runtime`
`runtime/debug`
`sync`
`testing`
`time`

jsoniter `github.com/json-iterator/go`
`github.com/stretchr/testify/assert`
`github.com/tidwall/gjson`
)

var (
Expand All @@ -43,7 +41,7 @@ func TestMain(m *testing.M) {
println("Begin GC looping...")
for {
runtime.GC()
debug.FreeOSMemory()
debug.FreeOSMemory()
}
println("stop GC looping!")
}()
Expand Down Expand Up @@ -108,7 +106,7 @@ func runDecoderTestUseNumber(t *testing.T, src string, expect interface{}) {
}
}

func n2f64(i json.Number) float64{
func n2f64(i json.Number) float64{
x, err := i.Float64()
if err != nil {
panic(err)
Expand Down Expand Up @@ -257,39 +255,6 @@ func BenchmarkParser_Sonic(b *testing.B) {
}
}

func BenchmarkParser_Gjson(b *testing.B) {
gjson.Parse(_TwitterJson).ForEach(func(key, value gjson.Result) bool {
if !value.Exists() {
b.Fatal(value.Index)
}
_ = value.Value()
return true
})
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
gjson.Parse(_TwitterJson).ForEach(func(key, value gjson.Result) bool {
if !value.Exists() {
b.Fatal(value.Index)
}
_ = value.Value()
return true
})
}
}

func BenchmarkParser_Jsoniter(b *testing.B) {
v := jsoniter.Get([]byte(_TwitterJson)).GetInterface()
if v == nil {
b.Fatal(v)
}
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = jsoniter.Get([]byte(_TwitterJson)).GetInterface()
}
}

func BenchmarkParser_Parallel_Sonic(b *testing.B) {
r, _ := NewParser(_TwitterJson).Parse()
if err := r.LoadAll(); err != nil {
Expand All @@ -305,40 +270,6 @@ func BenchmarkParser_Parallel_Sonic(b *testing.B) {
})
}

func BenchmarkParser_Parallel_Gjson(b *testing.B) {
gjson.Parse(_TwitterJson).ForEach(func(key, value gjson.Result) bool {
if !value.Exists() {
b.Fatal(value.Index)
}
return true
})
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
gjson.Parse(_TwitterJson).ForEach(func(key, value gjson.Result) bool {
if !value.Exists() {
b.Fatal(value.Index)
}
_ = value.Value()
return true
})
}
})
}

func BenchmarkParser_Parallel_Jsoniter(b *testing.B) {
var bv = []byte(_TwitterJson)
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
var out interface{}
_ = jsoniter.Unmarshal(bv, &out)
}
})
}

func BenchmarkParseOne_Sonic(b *testing.B) {
ast, _ := NewParser(_TwitterJson).Parse()
node, _ := ast.Get("statuses").Index(2).Get("id").Int64()
Expand All @@ -353,37 +284,6 @@ func BenchmarkParseOne_Sonic(b *testing.B) {
}
}

func BenchmarkParseOne_Gjson(b *testing.B) {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.2.id")
v := node.Int()
if v != 249289491129438208 {
b.Fatal(node)
}
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.2.id")
_ = node.Int()
}
}

func BenchmarkParseOne_Jsoniter(b *testing.B) {
data := []byte(_TwitterJson)
ast := jsoniter.Get(data, "statuses", 2, "id")
node := ast.ToInt()
if node != 249289491129438208 {
b.Fail()
}
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
ast := jsoniter.Get(data, "statuses", 2, "id")
_ = ast.ToInt()
}
}

func BenchmarkParseOne_Parallel_Sonic(b *testing.B) {
ast, _ := NewParser(_TwitterJson).Parse()
node, _ := ast.Get("statuses").Index(2).Get("id").Int64()
Expand All @@ -400,42 +300,6 @@ func BenchmarkParseOne_Parallel_Sonic(b *testing.B) {
})
}

func BenchmarkParseOne_Parallel_Gjson(b *testing.B) {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.2.id")
v := node.Int()
if v != 249289491129438208 {
b.Fatal(node)
}
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.2.id")
_ = node.Int()
}
})
}

func BenchmarkParseOne_Parallel_Jsoniter(b *testing.B) {
data := []byte(_TwitterJson)
ast := jsoniter.Get(data, "statuses", 2, "id")
node := ast.ToInt()
if node != 249289491129438208 {
b.Fail()
}
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
data := []byte(_TwitterJson)
ast := jsoniter.Get(data, "statuses", 2, "id")
_ = ast.ToInt()
}
})
}

func BenchmarkParseSeven_Sonic(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
Expand All @@ -454,43 +318,6 @@ func BenchmarkParseSeven_Sonic(b *testing.B) {
}
}

func BenchmarkParseSeven_Gjson(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.3.id")
node = ast.Get( "statuses.3.user.entities.description")
node = ast.Get( "statuses.3.user.entities.url.urls")
node = ast.Get( "statuses.3.user.entities.url")
node = ast.Get( "statuses.3.user.created_at")
node = ast.Get( "statuses.3.user.name")
node = ast.Get( "statuses.3.text")
if node.Value() == nil {
b.Fail()
}
}
}

func BenchmarkParseSeven_Jsoniter(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
data := []byte(_TwitterJson)
for i := 0; i < b.N; i++ {
ast := jsoniter.Get(data)
node := ast.Get("statuses", 3, "id")
node = ast.Get("statuses", 3, "user", "entities","description")
node = ast.Get("statuses", 3, "user", "entities","url","urls")
node = ast.Get("statuses", 3, "user", "entities","url")
node = ast.Get("statuses", 3, "user", "created_at")
node = ast.Get("statuses", 3, "user", "name")
node = ast.Get("statuses", 3, "text")
if node.LastError() != nil {
b.Fail()
}
}
}

func BenchmarkParseSeven_Parallel_Sonic(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
Expand All @@ -509,45 +336,4 @@ func BenchmarkParseSeven_Parallel_Sonic(b *testing.B) {
}
}
})
}

func BenchmarkParseSeven_Parallel_Gjson(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
ast := gjson.Parse(_TwitterJson)
node := ast.Get("statuses.3.id")
node = ast.Get( "statuses.3.user.entities.description")
node = ast.Get( "statuses.3.user.entities.url.urls")
node = ast.Get( "statuses.3.user.entities.url")
node = ast.Get( "statuses.3.user.created_at")
node = ast.Get( "statuses.3.user.name")
node = ast.Get( "statuses.3.text")
if node.Value() == nil {
b.Fail()
}
}
})
}

func BenchmarkParseSeven_Parallel_Jsoniter(b *testing.B) {
b.SetBytes(int64(len(_TwitterJson)))
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
data := []byte(_TwitterJson)
ast := jsoniter.Get(data)
node := ast.Get("statuses", 3, "id")
node = ast.Get("statuses", 3, "user", "entities","description")
node = ast.Get("statuses", 3, "user", "entities","url","urls")
node = ast.Get("statuses", 3, "user", "entities","url")
node = ast.Get("statuses", 3, "user", "created_at")
node = ast.Get("statuses", 3, "user", "name")
node = ast.Get("statuses", 3, "text")
if node.LastError() != nil {
b.Fail()
}
}
})
}
}
Loading

0 comments on commit 5e54c02

Please sign in to comment.