Improved tokenizer benchmarks

This commit is contained in:
Eduard Urbach 2024-06-28 19:33:02 +02:00
parent 98f5f021f0
commit 6852cbb24e
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0

View File

@ -1,15 +1,26 @@
package token_test
import (
"bytes"
"testing"
"git.akyoto.dev/cli/q/src/build/token"
)
func BenchmarkTokenize(b *testing.B) {
input := []byte("hello := 123\nworld := 456")
func BenchmarkLines(b *testing.B) {
b.Run("__1", bench(1))
b.Run("_10", bench(10))
b.Run("100", bench(100))
}
for i := 0; i < b.N; i++ {
token.Tokenize(input)
func bench(n int) func(b *testing.B) {
line := []byte("hello := 123\n")
return func(b *testing.B) {
input := bytes.Repeat(line, n)
for i := 0; i < b.N; i++ {
token.Tokenize(input)
}
}
}