Improved tokenizer performance

This commit is contained in:
Eduard Urbach 2024-07-24 11:55:35 +02:00
parent 89fbc233eb
commit b9dd5c1b59
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0

View File

@ -1,7 +1,5 @@
package token package token
import "bytes"
// Tokenize turns the file contents into a list of tokens. // Tokenize turns the file contents into a list of tokens.
func Tokenize(buffer []byte) List { func Tokenize(buffer []byte) List {
var ( var (
@ -87,24 +85,15 @@ func Tokenize(buffer []byte) List {
identifier := buffer[position:i] identifier := buffer[position:i]
kind := Identifier kind := Identifier
switch identifier[0] { switch string(identifier) {
case 'i': case "if":
switch { kind = If
case bytes.Equal(identifier, []byte("if")): case "import":
kind = If kind = Import
case bytes.Equal(identifier, []byte("import")): case "loop":
kind = Import kind = Loop
} case "return":
case 'l': kind = Return
switch {
case bytes.Equal(identifier, []byte("loop")):
kind = Loop
}
case 'r':
switch {
case bytes.Equal(identifier, []byte("return")):
kind = Return
}
} }
tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))}) tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})