From 6852cbb24e866cc382035cb94de844d78e3f00b8 Mon Sep 17 00:00:00 2001 From: Eduard Urbach Date: Fri, 28 Jun 2024 19:33:02 +0200 Subject: [PATCH] Improved tokenizer benchmarks --- src/build/token/bench_test.go | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/build/token/bench_test.go b/src/build/token/bench_test.go index b6db849..3ec6007 100644 --- a/src/build/token/bench_test.go +++ b/src/build/token/bench_test.go @@ -1,15 +1,26 @@ package token_test import ( + "bytes" "testing" "git.akyoto.dev/cli/q/src/build/token" ) -func BenchmarkTokenize(b *testing.B) { - input := []byte("hello := 123\nworld := 456") +func BenchmarkLines(b *testing.B) { + b.Run("__1", bench(1)) + b.Run("_10", bench(10)) + b.Run("100", bench(100)) +} - for i := 0; i < b.N; i++ { - token.Tokenize(input) +func bench(n int) func(b *testing.B) { + line := []byte("hello := 123\n") + + return func(b *testing.B) { + input := bytes.Repeat(line, n) + + for i := 0; i < b.N; i++ { + token.Tokenize(input) + } } }