diff --git a/src/build/token/Keywords.go b/src/build/token/Keywords.go deleted file mode 100644 index c75fd75..0000000 --- a/src/build/token/Keywords.go +++ /dev/null @@ -1,9 +0,0 @@ -package token - -// Keywords is a map of all keywords used in the language. -var Keywords = map[string]Kind{ - "if": If, - "import": Import, - "loop": Loop, - "return": Return, -} diff --git a/src/build/token/Tokenize.go b/src/build/token/Tokenize.go index f0d282c..05fa306 100644 --- a/src/build/token/Tokenize.go +++ b/src/build/token/Tokenize.go @@ -1,5 +1,7 @@ package token +import "bytes" + // Tokenize turns the file contents into a list of tokens. func Tokenize(buffer []byte) List { var ( @@ -84,10 +86,25 @@ func Tokenize(buffer []byte) List { identifier := buffer[position:i] kind := Identifier - keyword, isKeyword := Keywords[string(identifier)] - if isKeyword { - kind = keyword + switch identifier[0] { + case 'i': + switch { + case bytes.Equal(identifier, []byte("if")): + kind = If + case bytes.Equal(identifier, []byte("import")): + kind = Import + } + case 'l': + switch { + case bytes.Equal(identifier, []byte("loop")): + kind = Loop + } + case 'r': + switch { + case bytes.Equal(identifier, []byte("return")): + kind = Return + } } tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})