Implemented unary operator parsing

This commit is contained in:
2024-07-27 12:49:39 +02:00
parent d001e4e55f
commit 944bacf4e1
8 changed files with 142 additions and 16 deletions

View File

@ -38,10 +38,11 @@ const (
Shr // >>
LogicalAnd // &&
LogicalOr // ||
Not // ! (unary)
Negate // - (unary)
Equal // ==
Less // <
Greater // >
Not // !
NotEqual // !=
LessEqual // <=
GreaterEqual // >=

View File

@ -28,6 +28,11 @@ func (t Token) IsAssignment() bool {
return t.Kind > _assignments && t.Kind < _assignmentsEnd
}
// IsExpressionStart returns true if the token starts an expression.
func (t Token) IsExpressionStart() bool {
return t.Kind == GroupStart || t.Kind == ArrayStart || t.Kind == BlockStart
}
// IsKeyword returns true if the token is a keyword.
func (t Token) IsKeyword() bool {
return t.Kind > _keywords && t.Kind < _keywordsEnd

View File

@ -26,8 +26,28 @@ func Tokenize(buffer []byte) List {
tokens = append(tokens, Token{Kind: ArrayEnd, Position: i, Length: 1})
case '\n':
tokens = append(tokens, Token{Kind: NewLine, Position: i, Length: 1})
case '-':
if len(tokens) == 0 || tokens[len(tokens)-1].IsOperator() || tokens[len(tokens)-1].IsExpressionStart() {
tokens = append(tokens, Token{Kind: Negate, Position: i, Length: 1})
} else {
if i+1 < Position(len(buffer)) && buffer[i+1] == '=' {
tokens = append(tokens, Token{Kind: SubAssign, Position: i, Length: 2})
i++
} else {
tokens = append(tokens, Token{Kind: Sub, Position: i, Length: 1})
}
}
case '/':
if i+1 >= Position(len(buffer)) || buffer[i+1] != '/' {
if i+1 < Position(len(buffer)) && buffer[i+1] == '/' {
position := i
for i < Position(len(buffer)) && buffer[i] != '\n' {
i++
}
tokens = append(tokens, Token{Kind: Comment, Position: position, Length: Length(i - position)})
} else {
position := i
i++
@ -45,14 +65,6 @@ func Tokenize(buffer []byte) List {
}
tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(i - position)})
} else {
position := i
for i < Position(len(buffer)) && buffer[i] != '\n' {
i++
}
tokens = append(tokens, Token{Kind: Comment, Position: position, Length: Length(i - position)})
}
continue

View File

@ -83,13 +83,81 @@ func TestNumber(t *testing.T) {
}
func TestOperator(t *testing.T) {
tokens := token.Tokenize([]byte(`+ - * /`))
tokens := token.Tokenize([]byte(`a + b - c * d / e`))
expected := []token.Kind{
token.Identifier,
token.Add,
token.Identifier,
token.Sub,
token.Identifier,
token.Mul,
token.Identifier,
token.Div,
token.Identifier,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestNegateFirstToken(t *testing.T) {
tokens := token.Tokenize([]byte(`-a`))
expected := []token.Kind{
token.Negate,
token.Identifier,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestNegateAfterGroupStart(t *testing.T) {
tokens := token.Tokenize([]byte(`(-a)`))
expected := []token.Kind{
token.GroupStart,
token.Negate,
token.Identifier,
token.GroupEnd,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestNegateSub(t *testing.T) {
tokens := token.Tokenize([]byte(`-a-b`))
expected := []token.Kind{
token.Negate,
token.Identifier,
token.Sub,
token.Identifier,
token.EOF,
}
for i, kind := range expected {
assert.Equal(t, tokens[i].Kind, kind)
}
}
func TestNegateAfterOperator(t *testing.T) {
tokens := token.Tokenize([]byte(`-a + -b`))
expected := []token.Kind{
token.Negate,
token.Identifier,
token.Add,
token.Negate,
token.Identifier,
token.EOF,
}
@ -99,18 +167,28 @@ func TestOperator(t *testing.T) {
}
func TestOperatorAssign(t *testing.T) {
tokens := token.Tokenize([]byte(`+= -= *= /= &= |= ^= <<= >>=`))
tokens := token.Tokenize([]byte(`a += b -= c *= d /= e &= f |= g ^= h <<= i >>= j`))
expected := []token.Kind{
token.Identifier,
token.AddAssign,
token.Identifier,
token.SubAssign,
token.Identifier,
token.MulAssign,
token.Identifier,
token.DivAssign,
token.Identifier,
token.AndAssign,
token.Identifier,
token.OrAssign,
token.Identifier,
token.XorAssign,
token.Identifier,
token.ShlAssign,
token.Identifier,
token.ShrAssign,
token.Identifier,
token.EOF,
}