Implemented error messages

This commit is contained in:
2024-06-13 12:13:32 +02:00
parent 2d990b0bee
commit 9458253f31
16 changed files with 362 additions and 60 deletions

View File

@ -7,6 +7,9 @@ const (
// Invalid represents an invalid token.
Invalid Kind = iota
// EOF represents the end of file.
EOF
// NewLine represents the newline character.
NewLine
@ -54,6 +57,7 @@ const (
func (kind Kind) String() string {
return [...]string{
"Invalid",
"EOF",
"NewLine",
"Identifier",
"Keyword",

View File

@ -35,6 +35,11 @@ func TestFunction(t *testing.T) {
Bytes: []byte("}"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -51,6 +56,11 @@ func TestKeyword(t *testing.T) {
Bytes: []byte("x"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -77,6 +87,11 @@ func TestArray(t *testing.T) {
Bytes: []byte("]"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -93,6 +108,11 @@ func TestNewline(t *testing.T) {
Bytes: []byte("\n"),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
@ -109,6 +129,11 @@ func TestNumber(t *testing.T) {
Bytes: []byte("-456"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -140,6 +165,11 @@ func TestSeparator(t *testing.T) {
Bytes: []byte("c"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 5,
},
})
}
@ -156,6 +186,11 @@ func TestString(t *testing.T) {
Bytes: []byte(`"World"`),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 15,
},
})
}
@ -167,6 +202,11 @@ func TestStringMultiline(t *testing.T) {
Bytes: []byte("\"Hello\nWorld\""),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 13,
},
})
}
@ -178,6 +218,11 @@ func TestStringEOF(t *testing.T) {
Bytes: []byte(`"EOF`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 4,
},
})
}
@ -195,6 +240,7 @@ func TestTokenText(t *testing.T) {
func TestTokenKind(t *testing.T) {
assert.Equal(t, token.Invalid.String(), "Invalid")
assert.Equal(t, token.EOF.String(), "EOF")
assert.Equal(t, token.NewLine.String(), "NewLine")
assert.Equal(t, token.Identifier.String(), "Identifier")
assert.Equal(t, token.Keyword.String(), "Keyword")

View File

@ -31,6 +31,7 @@ func Tokenize(buffer []byte) List {
for i < len(buffer) {
if buffer[i] == '"' {
end = i + 1
i++
break
}
@ -43,6 +44,8 @@ func Tokenize(buffer []byte) List {
buffer[start:end],
})
continue
// Parentheses start
case '(':
tokens = append(tokens, Token{GroupStart, i, groupStartBytes})
@ -121,6 +124,7 @@ func Tokenize(buffer []byte) List {
i++
}
tokens = append(tokens, Token{EOF, i, nil})
return tokens
}