Improved tokenizer performance

This commit is contained in:
Eduard Urbach 2024-07-24 11:50:23 +02:00
parent 975b4711d3
commit 89fbc233eb
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0
2 changed files with 20 additions and 12 deletions

View File

@ -1,9 +0,0 @@
package token
// Keywords is a map of all keywords used in the language.
var Keywords = map[string]Kind{
"if": If,
"import": Import,
"loop": Loop,
"return": Return,
}

View File

@ -1,5 +1,7 @@
package token
import "bytes"
// Tokenize turns the file contents into a list of tokens.
func Tokenize(buffer []byte) List {
var (
@ -84,10 +86,25 @@ func Tokenize(buffer []byte) List {
identifier := buffer[position:i]
kind := Identifier
keyword, isKeyword := Keywords[string(identifier)]
if isKeyword {
kind = keyword
switch identifier[0] {
case 'i':
switch {
case bytes.Equal(identifier, []byte("if")):
kind = If
case bytes.Equal(identifier, []byte("import")):
kind = Import
}
case 'l':
switch {
case bytes.Equal(identifier, []byte("loop")):
kind = Loop
}
case 'r':
switch {
case bytes.Equal(identifier, []byte("return")):
kind = Return
}
}
tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})