Added more tokenizer tests

This commit is contained in:
Eduard Urbach 2025-01-31 11:42:34 +01:00
parent be1b8723f4
commit eba0176ae6
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0
2 changed files with 89 additions and 7 deletions

View File

@ -228,10 +228,10 @@ func Tokenize(buffer []byte) List {
// kind = ReturnType
case ".":
kind = Period
case "/":
kind = Div
case "/=":
kind = DivAssign
// case "/":
// kind = Div
// case "/=":
// kind = DivAssign
case ":=":
kind = Define
case "<":

View File

@ -25,7 +25,7 @@ func TestFunction(t *testing.T) {
}
func TestKeyword(t *testing.T) {
tokens := token.Tokenize([]byte("assert if import else loop return"))
tokens := token.Tokenize([]byte("assert if import else loop return switch"))
expected := []token.Kind{
token.Assert,
@ -34,6 +34,7 @@ func TestKeyword(t *testing.T) {
token.Else,
token.Loop,
token.Return,
token.Switch,
token.EOF,
}
@ -87,7 +88,7 @@ func TestNumber(t *testing.T) {
}
func TestOperator(t *testing.T) {
tokens := token.Tokenize([]byte(`a + b - c * d / e`))
tokens := token.Tokenize([]byte(`a + b - c * d / e % f << g >> h & i | j ^ k`))
expected := []token.Kind{
token.Identifier,
@ -99,6 +100,18 @@ func TestOperator(t *testing.T) {
token.Identifier,
token.Div,
token.Identifier,
token.Mod,
token.Identifier,
token.Shl,
token.Identifier,
token.Shr,
token.Identifier,
token.And,
token.Identifier,
token.Or,
token.Identifier,
token.Xor,
token.Identifier,
token.EOF,
}
@ -108,7 +121,7 @@ func TestOperator(t *testing.T) {
}
func TestOperatorAssign(t *testing.T) {
tokens := token.Tokenize([]byte(`a += b -= c *= d /= e &= f |= g ^= h <<= i >>= j`))
tokens := token.Tokenize([]byte(`a += b -= c *= d /= e %= f &= g |= h ^= i <<= j >>= k`))
expected := []token.Kind{
token.Identifier,
@ -120,6 +133,8 @@ func TestOperatorAssign(t *testing.T) {
token.Identifier,
token.DivAssign,
token.Identifier,
token.ModAssign,
token.Identifier,
token.AndAssign,
token.Identifier,
token.OrAssign,
@ -138,6 +153,31 @@ func TestOperatorAssign(t *testing.T) {
}
}
func TestOperatorEquality(t *testing.T) {
tokens := token.Tokenize([]byte(`a == b != c <= d >= e < f > g`))
expected := []token.Kind{
token.Identifier,
token.Equal,
token.Identifier,
token.NotEqual,
token.Identifier,
token.LessEqual,
token.Identifier,
token.GreaterEqual,
token.Identifier,
token.Less,
token.Identifier,
token.Greater,
token.Identifier,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestNegateFirstToken(t *testing.T) {
tokens := token.Tokenize([]byte(`-a`))
@ -409,3 +449,45 @@ func TestStringEOF(t *testing.T) {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestReturnType(t *testing.T) {
tokens := token.Tokenize([]byte("()->"))
expected := []token.Kind{
token.GroupStart,
token.GroupEnd,
token.ReturnType,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestMinusAtEOF(t *testing.T) {
tokens := token.Tokenize([]byte("1-"))
expected := []token.Kind{
token.Number,
token.Sub,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestRune(t *testing.T) {
tokens := token.Tokenize([]byte("'a'"))
expected := []token.Kind{
token.Rune,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}