Improved tokenizer test coverage

This commit is contained in:
Eduard Urbach 2024-07-16 23:32:39 +02:00
parent f9d72fe490
commit 8ec0e02dbe
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0
5 changed files with 489 additions and 414 deletions

View File

@ -0,0 +1,16 @@
package token_test
import (
"testing"
"git.akyoto.dev/cli/q/src/build/token"
"git.akyoto.dev/go/assert"
)
func TestCount(t *testing.T) {
tokens := token.Tokenize([]byte(`a b b c c c`))
assert.Equal(t, token.Count(tokens, token.Identifier, "a"), 1)
assert.Equal(t, token.Count(tokens, token.Identifier, "b"), 2)
assert.Equal(t, token.Count(tokens, token.Identifier, "c"), 3)
assert.Equal(t, token.Count(tokens, token.Identifier, "d"), 0)
}

View File

@ -0,0 +1,27 @@
package token_test
import (
"testing"
"git.akyoto.dev/cli/q/src/build/token"
"git.akyoto.dev/go/assert"
)
func TestTokenKind(t *testing.T) {
assert.Equal(t, token.Invalid.String(), "Invalid")
assert.Equal(t, token.EOF.String(), "EOF")
assert.Equal(t, token.NewLine.String(), "NewLine")
assert.Equal(t, token.Identifier.String(), "Identifier")
assert.Equal(t, token.Keyword.String(), "Keyword")
assert.Equal(t, token.String.String(), "String")
assert.Equal(t, token.Number.String(), "Number")
assert.Equal(t, token.Operator.String(), "Operator")
assert.Equal(t, token.Separator.String(), "Separator")
assert.Equal(t, token.Comment.String(), "Comment")
assert.Equal(t, token.GroupStart.String(), "GroupStart")
assert.Equal(t, token.GroupEnd.String(), "GroupEnd")
assert.Equal(t, token.BlockStart.String(), "BlockStart")
assert.Equal(t, token.BlockEnd.String(), "BlockEnd")
assert.Equal(t, token.ArrayStart.String(), "ArrayStart")
assert.Equal(t, token.ArrayEnd.String(), "ArrayEnd")
}

View File

@ -0,0 +1,18 @@
package token_test
import (
"testing"
"git.akyoto.dev/cli/q/src/build/token"
"git.akyoto.dev/go/assert"
)
func TestIndexKind(t *testing.T) {
tokens := token.Tokenize([]byte("a{{}}"))
assert.Equal(t, tokens.IndexKind(token.NewLine), -1)
assert.Equal(t, tokens.LastIndexKind(token.NewLine), -1)
assert.Equal(t, tokens.IndexKind(token.BlockStart), 1)
assert.Equal(t, tokens.LastIndexKind(token.BlockStart), 2)
assert.Equal(t, tokens.IndexKind(token.BlockEnd), 3)
assert.Equal(t, tokens.LastIndexKind(token.BlockEnd), 4)
}

View File

@ -7,406 +7,37 @@ import (
"git.akyoto.dev/go/assert"
)
func TestFunction(t *testing.T) {
tokens := token.Tokenize([]byte("main(){}"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("main"),
Position: 0,
},
{
Kind: token.GroupStart,
Bytes: []byte("("),
Position: 4,
},
{
Kind: token.GroupEnd,
Bytes: []byte(")"),
Position: 5,
},
{
Kind: token.BlockStart,
Bytes: []byte("{"),
Position: 6,
},
{
Kind: token.BlockEnd,
Bytes: []byte("}"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
func TestTokenEnd(t *testing.T) {
hello := token.Token{
Kind: token.Identifier,
Bytes: []byte("hello"),
Position: 0,
}
assert.Equal(t, hello.End(), 5)
}
func TestKeyword(t *testing.T) {
tokens := token.Tokenize([]byte("return x"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Keyword,
Bytes: []byte("return"),
Position: 0,
},
{
Kind: token.Identifier,
Bytes: []byte("x"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
func TestTokenReset(t *testing.T) {
hello := token.Token{
Kind: token.Identifier,
Bytes: []byte("hello"),
Position: 1,
}
hello.Reset()
assert.Nil(t, hello.Bytes)
assert.Equal(t, hello.Position, 0)
assert.Equal(t, hello.Kind, token.Invalid)
}
func TestArray(t *testing.T) {
tokens := token.Tokenize([]byte("array[i]"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("array"),
Position: 0,
},
{
Kind: token.ArrayStart,
Bytes: []byte("["),
Position: 5,
},
{
Kind: token.Identifier,
Bytes: []byte("i"),
Position: 6,
},
{
Kind: token.ArrayEnd,
Bytes: []byte("]"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
func TestTokenString(t *testing.T) {
hello := token.Token{
Kind: token.Identifier,
Bytes: []byte("hello"),
Position: 0,
}
func TestCount(t *testing.T) {
tokens := token.Tokenize([]byte(`a b b c c c`))
assert.Equal(t, token.Count(tokens, token.Identifier, "a"), 1)
assert.Equal(t, token.Count(tokens, token.Identifier, "b"), 2)
assert.Equal(t, token.Count(tokens, token.Identifier, "c"), 3)
assert.Equal(t, token.Count(tokens, token.Identifier, "d"), 0)
}
func TestNewline(t *testing.T) {
tokens := token.Tokenize([]byte("\n\n"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
func TestNumber(t *testing.T) {
tokens := token.Tokenize([]byte(`123 456`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Number,
Bytes: []byte("123"),
Position: 0,
},
{
Kind: token.Number,
Bytes: []byte("456"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 7,
},
})
}
func TestOperator(t *testing.T) {
tokens := token.Tokenize([]byte(`+ - * /`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte("+"),
Position: 0,
},
{
Kind: token.Operator,
Bytes: []byte("-"),
Position: 2,
},
{
Kind: token.Operator,
Bytes: []byte("*"),
Position: 4,
},
{
Kind: token.Operator,
Bytes: []byte("/"),
Position: 6,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 7,
},
})
}
func TestOperatorAssign(t *testing.T) {
tokens := token.Tokenize([]byte(`+= -= *= /= ==`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte("+="),
Position: 0,
},
{
Kind: token.Operator,
Bytes: []byte("-="),
Position: 3,
},
{
Kind: token.Operator,
Bytes: []byte("*="),
Position: 6,
},
{
Kind: token.Operator,
Bytes: []byte("/="),
Position: 9,
},
{
Kind: token.Operator,
Bytes: []byte("=="),
Position: 12,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 14,
},
})
}
func TestSeparator(t *testing.T) {
tokens := token.Tokenize([]byte("a,b,c"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("a"),
Position: 0,
},
{
Kind: token.Separator,
Bytes: []byte(","),
Position: 1,
},
{
Kind: token.Identifier,
Bytes: []byte("b"),
Position: 2,
},
{
Kind: token.Separator,
Bytes: []byte(","),
Position: 3,
},
{
Kind: token.Identifier,
Bytes: []byte("c"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 5,
},
})
}
func TestComment(t *testing.T) {
tokens := token.Tokenize([]byte("// Hello\n// World"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.Comment,
Bytes: []byte(`// World`),
Position: 9,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 17,
},
})
tokens = token.Tokenize([]byte("// Hello\n"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 9,
},
})
tokens = token.Tokenize([]byte(`// Hello`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
tokens = token.Tokenize([]byte(`//`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`//`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
tokens = token.Tokenize([]byte(`/`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte(`/`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 1,
},
})
}
func TestInvalid(t *testing.T) {
tokens := token.Tokenize([]byte(`@#`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Invalid,
Bytes: []byte(`@`),
Position: 0,
},
{
Kind: token.Invalid,
Bytes: []byte(`#`),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
func TestString(t *testing.T) {
tokens := token.Tokenize([]byte(`"Hello" "World"`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte(`"Hello"`),
Position: 0,
},
{
Kind: token.String,
Bytes: []byte(`"World"`),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 15,
},
})
}
func TestStringMultiline(t *testing.T) {
tokens := token.Tokenize([]byte("\"Hello\nWorld\""))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte("\"Hello\nWorld\""),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 13,
},
})
}
func TestStringEOF(t *testing.T) {
tokens := token.Tokenize([]byte(`"EOF`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte(`"EOF`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 4,
},
})
assert.Equal(t, hello.String(), "Identifier hello")
}
func TestTokenText(t *testing.T) {
@ -420,22 +51,3 @@ func TestTokenText(t *testing.T) {
list := token.List{hello, comma, world}
assert.Equal(t, list.String(), "hello, world")
}
func TestTokenKind(t *testing.T) {
assert.Equal(t, token.Invalid.String(), "Invalid")
assert.Equal(t, token.EOF.String(), "EOF")
assert.Equal(t, token.NewLine.String(), "NewLine")
assert.Equal(t, token.Identifier.String(), "Identifier")
assert.Equal(t, token.Keyword.String(), "Keyword")
assert.Equal(t, token.String.String(), "String")
assert.Equal(t, token.Number.String(), "Number")
assert.Equal(t, token.Operator.String(), "Operator")
assert.Equal(t, token.Separator.String(), "Separator")
assert.Equal(t, token.Comment.String(), "Comment")
assert.Equal(t, token.GroupStart.String(), "GroupStart")
assert.Equal(t, token.GroupEnd.String(), "GroupEnd")
assert.Equal(t, token.BlockStart.String(), "BlockStart")
assert.Equal(t, token.BlockEnd.String(), "BlockEnd")
assert.Equal(t, token.ArrayStart.String(), "ArrayStart")
assert.Equal(t, token.ArrayEnd.String(), "ArrayEnd")
}

View File

@ -0,0 +1,402 @@
package token_test
import (
"testing"
"git.akyoto.dev/cli/q/src/build/token"
"git.akyoto.dev/go/assert"
)
func TestFunction(t *testing.T) {
tokens := token.Tokenize([]byte("main(){}"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("main"),
Position: 0,
},
{
Kind: token.GroupStart,
Bytes: []byte("("),
Position: 4,
},
{
Kind: token.GroupEnd,
Bytes: []byte(")"),
Position: 5,
},
{
Kind: token.BlockStart,
Bytes: []byte("{"),
Position: 6,
},
{
Kind: token.BlockEnd,
Bytes: []byte("}"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
func TestKeyword(t *testing.T) {
tokens := token.Tokenize([]byte("return x"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Keyword,
Bytes: []byte("return"),
Position: 0,
},
{
Kind: token.Identifier,
Bytes: []byte("x"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
func TestArray(t *testing.T) {
tokens := token.Tokenize([]byte("array[i]"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("array"),
Position: 0,
},
{
Kind: token.ArrayStart,
Bytes: []byte("["),
Position: 5,
},
{
Kind: token.Identifier,
Bytes: []byte("i"),
Position: 6,
},
{
Kind: token.ArrayEnd,
Bytes: []byte("]"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
func TestNewline(t *testing.T) {
tokens := token.Tokenize([]byte("\n\n"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
func TestNumber(t *testing.T) {
tokens := token.Tokenize([]byte(`123 456`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Number,
Bytes: []byte("123"),
Position: 0,
},
{
Kind: token.Number,
Bytes: []byte("456"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 7,
},
})
}
func TestOperator(t *testing.T) {
tokens := token.Tokenize([]byte(`+ - * /`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte("+"),
Position: 0,
},
{
Kind: token.Operator,
Bytes: []byte("-"),
Position: 2,
},
{
Kind: token.Operator,
Bytes: []byte("*"),
Position: 4,
},
{
Kind: token.Operator,
Bytes: []byte("/"),
Position: 6,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 7,
},
})
}
func TestOperatorAssign(t *testing.T) {
tokens := token.Tokenize([]byte(`+= -= *= /= ==`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte("+="),
Position: 0,
},
{
Kind: token.Operator,
Bytes: []byte("-="),
Position: 3,
},
{
Kind: token.Operator,
Bytes: []byte("*="),
Position: 6,
},
{
Kind: token.Operator,
Bytes: []byte("/="),
Position: 9,
},
{
Kind: token.Operator,
Bytes: []byte("=="),
Position: 12,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 14,
},
})
}
func TestSeparator(t *testing.T) {
tokens := token.Tokenize([]byte("a,b,c"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Identifier,
Bytes: []byte("a"),
Position: 0,
},
{
Kind: token.Separator,
Bytes: []byte(","),
Position: 1,
},
{
Kind: token.Identifier,
Bytes: []byte("b"),
Position: 2,
},
{
Kind: token.Separator,
Bytes: []byte(","),
Position: 3,
},
{
Kind: token.Identifier,
Bytes: []byte("c"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 5,
},
})
}
func TestComment(t *testing.T) {
tokens := token.Tokenize([]byte("// Hello\n// World"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.Comment,
Bytes: []byte(`// World`),
Position: 9,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 17,
},
})
tokens = token.Tokenize([]byte("// Hello\n"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 9,
},
})
tokens = token.Tokenize([]byte(`// Hello`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
tokens = token.Tokenize([]byte(`//`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`//`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
tokens = token.Tokenize([]byte(`/`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte(`/`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 1,
},
})
}
func TestInvalid(t *testing.T) {
tokens := token.Tokenize([]byte(`@#`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Invalid,
Bytes: []byte(`@`),
Position: 0,
},
{
Kind: token.Invalid,
Bytes: []byte(`#`),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
func TestString(t *testing.T) {
tokens := token.Tokenize([]byte(`"Hello" "World"`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte(`"Hello"`),
Position: 0,
},
{
Kind: token.String,
Bytes: []byte(`"World"`),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 15,
},
})
}
func TestStringMultiline(t *testing.T) {
tokens := token.Tokenize([]byte("\"Hello\nWorld\""))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte("\"Hello\nWorld\""),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 13,
},
})
}
func TestStringEOF(t *testing.T) {
tokens := token.Tokenize([]byte(`"EOF`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.String,
Bytes: []byte(`"EOF`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 4,
},
})
}