17
17
package main
18
18
19
19
import (
20
+ "encoding/json"
20
21
"fmt"
21
22
"math/rand"
23
+ "os"
22
24
"slices"
23
25
"sort"
24
26
"time"
41
43
}
42
44
)
43
45
44
- const passCount = 1
46
+ const passCount = 3
45
47
46
48
func filterPerfCmd (ctx * cli.Context ) error {
47
49
cfg := testConfigFromCLI (ctx )
@@ -61,7 +63,10 @@ func filterPerfCmd(ctx *cli.Context) error {
61
63
}
62
64
63
65
// Run test queries.
64
- var failed , mismatch int
66
+ var (
67
+ failed , pruned , mismatch int
68
+ errors []* filterQuery
69
+ )
65
70
for i := 1 ; i <= passCount ; i ++ {
66
71
fmt .Println ("Performance test pass" , i , "/" , passCount )
67
72
for len (queries ) > 0 {
@@ -71,27 +76,35 @@ func filterPerfCmd(ctx *cli.Context) error {
71
76
queries = queries [:len (queries )- 1 ]
72
77
start := time .Now ()
73
78
qt .query .run (cfg .client , cfg .historyPruneBlock )
79
+ if qt .query .Err == errPrunedHistory {
80
+ pruned ++
81
+ continue
82
+ }
74
83
qt .runtime = append (qt .runtime , time .Since (start ))
75
84
slices .Sort (qt .runtime )
76
85
qt .medianTime = qt .runtime [len (qt .runtime )/ 2 ]
77
86
if qt .query .Err != nil {
87
+ qt .query .printError ()
88
+ errors = append (errors , qt .query )
78
89
failed ++
79
90
continue
80
91
}
81
92
if rhash := qt .query .calculateHash (); * qt .query .ResultHash != rhash {
82
93
fmt .Printf ("Filter query result mismatch: fromBlock: %d toBlock: %d addresses: %v topics: %v expected hash: %064x calculated hash: %064x\n " , qt .query .FromBlock , qt .query .ToBlock , qt .query .Address , qt .query .Topics , * qt .query .ResultHash , rhash )
94
+ errors = append (errors , qt .query )
95
+ mismatch ++
83
96
continue
84
97
}
85
98
processed = append (processed , qt )
86
99
if len (processed )% 50 == 0 {
87
- fmt .Println (" processed:" , len (processed ), "remaining" , len (queries ), "failed:" , failed , "result mismatch:" , mismatch )
100
+ fmt .Println (" processed:" , len (processed ), "remaining" , len (queries ), "failed:" , failed , "pruned:" , pruned , " result mismatch:" , mismatch )
88
101
}
89
102
}
90
103
queries , processed = processed , nil
91
104
}
92
105
93
106
// Show results and stats.
94
- fmt .Println ("Performance test finished; processed:" , len (queries ), "failed:" , failed , "result mismatch:" , mismatch )
107
+ fmt .Println ("Performance test finished; processed:" , len (queries ), "failed:" , failed , "pruned:" , pruned , " result mismatch:" , mismatch )
95
108
stats := make ([]bucketStats , len (f .queries ))
96
109
var wildcardStats bucketStats
97
110
for _ , qt := range queries {
@@ -114,11 +127,14 @@ func filterPerfCmd(ctx *cli.Context) error {
114
127
sort .Slice (queries , func (i , j int ) bool {
115
128
return queries [i ].medianTime > queries [j ].medianTime
116
129
})
117
- for i := 0 ; i < 10 ; i ++ {
118
- q := queries [i ]
130
+ for i , q := range queries {
131
+ if i >= 10 {
132
+ break
133
+ }
119
134
fmt .Printf ("Most expensive query #%-2d median runtime: %13v max runtime: %13v result count: %4d fromBlock: %9d toBlock: %9d addresses: %v topics: %v\n " ,
120
135
i + 1 , q .medianTime , q .runtime [len (q .runtime )- 1 ], len (q .query .results ), q .query .FromBlock , q .query .ToBlock , q .query .Address , q .query .Topics )
121
136
}
137
+ writeErrors (ctx .String (filterErrorFileFlag .Name ), errors )
122
138
return nil
123
139
}
124
140
@@ -135,3 +151,14 @@ func (st *bucketStats) print(name string) {
135
151
fmt .Printf ("%-20s queries: %4d average block length: %12.2f average log count: %7.2f average runtime: %13v\n " ,
136
152
name , st .count , float64 (st .blocks )/ float64 (st .count ), float64 (st .logs )/ float64 (st .count ), st .runtime / time .Duration (st .count ))
137
153
}
154
+
155
+ // writeQueries serializes the generated errors to the error file.
156
+ func writeErrors (errorFile string , errors []* filterQuery ) {
157
+ file , err := os .Create (errorFile )
158
+ if err != nil {
159
+ exit (fmt .Errorf ("Error creating filter error file %s: %v" , errorFile , err ))
160
+ return
161
+ }
162
+ defer file .Close ()
163
+ json .NewEncoder (file ).Encode (errors )
164
+ }
0 commit comments