Improved Windows support in the tokenizer

This commit is contained in:
2025-04-18 14:48:59 +02:00
parent a023b058f8
commit 9107a06df5

View File

@ -9,7 +9,7 @@ func Tokenize(buffer []byte) List {
for i < Position(len(buffer)) {
switch buffer[i] {
case ' ', '\t':
case ' ', '\t', '\r':
case ',':
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
case '(':