Added single line comments

This commit is contained in:
Eduard Urbach 2024-06-28 20:29:07 +02:00
parent 3664e74074
commit 2520681ad3
Signed by: akyoto
GPG Key ID: C874F672B1AF20C0
3 changed files with 104 additions and 0 deletions

View File

@ -1,3 +1,4 @@
// Comment
main() {
address := 4194304 + 1
length := (0 + 50 - 20) * 10 / 100
@ -7,10 +8,12 @@ main() {
}
}
// Comment
print(address, length) {
write(length-2, address, length)
}
// Comment
write(fd, address, length) {
syscall(1, fd, address, length)
}

View File

@ -209,6 +209,93 @@ func TestSeparator(t *testing.T) {
})
}
func TestComment(t *testing.T) {
tokens := token.Tokenize([]byte("// Hello\n// World"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.Comment,
Bytes: []byte(`// World`),
Position: 9,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 17,
},
})
tokens = token.Tokenize([]byte("// Hello\n"))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.NewLine,
Bytes: []byte("\n"),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 9,
},
})
tokens = token.Tokenize([]byte(`// Hello`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`// Hello`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
tokens = token.Tokenize([]byte(`//`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Comment,
Bytes: []byte(`//`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
tokens = token.Tokenize([]byte(`/`))
assert.DeepEqual(t, tokens, token.List{
{
Kind: token.Operator,
Bytes: []byte(`/`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 1,
},
})
}
func TestString(t *testing.T) {
tokens := token.Tokenize([]byte(`"Hello" "World"`))
assert.DeepEqual(t, tokens, token.List{

View File

@ -75,6 +75,20 @@ func Tokenize(buffer []byte) List {
case '\n':
tokens = append(tokens, Token{NewLine, i, newLineBytes})
// Comment
case '/':
if i+1 >= len(buffer) || buffer[i+1] != '/' {
tokens = append(tokens, Token{Operator, i, buffer[i : i+1]})
i++
continue
}
for i < len(buffer) && buffer[i] != '\n' {
i++
}
continue
default:
// Identifiers
if isIdentifierStart(buffer[i]) {