Implemented error messages

This commit is contained in:
2024-06-13 12:13:32 +02:00
parent 2d990b0bee
commit 9458253f31
16 changed files with 362 additions and 60 deletions

View File

@ -7,34 +7,20 @@ import (
// Build describes a compiler build.
type Build struct {
Files []string
WriteExecutable bool
Files []string
}
// New creates a new build.
func New(files ...string) *Build {
return &Build{
Files: files,
WriteExecutable: true,
Files: files,
}
}
// Run parses the input files and generates an executable file.
func (build *Build) Run() error {
func (build *Build) Run() (map[string]*Function, error) {
functions, errors := Scan(build.Files)
allFunctions, err := Compile(functions, errors)
if err != nil {
return err
}
if !build.WriteExecutable {
return nil
}
path := build.Executable()
code, data := Finalize(allFunctions)
return Write(path, code, data)
return Compile(functions, errors)
}
// Executable returns the path to the executable.

View File

@ -1,6 +1,7 @@
package build_test
import (
"path/filepath"
"testing"
"git.akyoto.dev/cli/q/src/build"
@ -9,16 +10,17 @@ import (
func TestBuild(t *testing.T) {
b := build.New("../../examples/hello")
assert.Nil(t, b.Run())
_, err := b.Run()
assert.Nil(t, err)
}
func TestSkipExecutable(t *testing.T) {
func TestExecutable(t *testing.T) {
b := build.New("../../examples/hello")
b.WriteExecutable = false
assert.Nil(t, b.Run())
assert.Equal(t, filepath.Base(b.Executable()), "hello")
}
func TestNonExisting(t *testing.T) {
b := build.New("does-not-exist")
assert.NotNil(t, b.Run())
_, err := b.Run()
assert.NotNil(t, err)
}

View File

@ -8,6 +8,7 @@ import (
"git.akyoto.dev/cli/q/src/build/directory"
"git.akyoto.dev/cli/q/src/build/token"
"git.akyoto.dev/cli/q/src/errors"
)
// Scan scans the directory.
@ -86,46 +87,141 @@ func scanFile(path string, functions chan<- *Function) error {
tokens := token.Tokenize(contents)
var (
i = 0
groupLevel = 0
blockLevel = 0
headerStart = -1
nameStart = -1
paramsStart = -1
bodyStart = -1
)
for i, t := range tokens {
switch t.Kind {
case token.Identifier:
if blockLevel == 0 && groupLevel == 0 {
headerStart = i
for {
// Function name
for i < len(tokens) {
if tokens[i].Kind == token.Identifier {
nameStart = i
i++
break
}
case token.GroupStart:
groupLevel++
case token.GroupEnd:
groupLevel--
case token.BlockStart:
blockLevel++
if blockLevel == 1 {
bodyStart = i
if tokens[i].Kind == token.NewLine {
i++
continue
}
case token.BlockEnd:
blockLevel--
if tokens[i].Kind == token.EOF {
return nil
}
if blockLevel == 0 {
function := &Function{
Name: tokens[headerStart].Text(),
Head: tokens[headerStart:bodyStart],
Body: tokens[bodyStart : i+1],
return errors.New(errors.ExpectedFunctionName, path, tokens, i)
}
// Function parameters
for i < len(tokens) {
if tokens[i].Kind == token.GroupStart {
groupLevel++
if groupLevel == 1 {
paramsStart = i
}
functions <- function
i++
continue
}
}
}
return nil
if tokens[i].Kind == token.GroupEnd {
groupLevel--
if groupLevel < 0 {
return errors.New(errors.MissingGroupStart, path, tokens, i)
}
i++
if groupLevel == 0 {
break
}
continue
}
if tokens[i].Kind == token.EOF {
if groupLevel > 0 {
return errors.New(errors.MissingGroupEnd, path, tokens, i)
}
if paramsStart == -1 {
return errors.New(errors.ExpectedFunctionParameters, path, tokens, i)
}
return nil
}
if groupLevel > 0 {
i++
continue
}
return errors.New(errors.ExpectedFunctionParameters, path, tokens, i)
}
// Function definition
for i < len(tokens) {
if tokens[i].Kind == token.BlockStart {
blockLevel++
if blockLevel == 1 {
bodyStart = i
}
i++
continue
}
if tokens[i].Kind == token.BlockEnd {
blockLevel--
if blockLevel < 0 {
return errors.New(errors.MissingBlockStart, path, tokens, i)
}
i++
if blockLevel == 0 {
break
}
continue
}
if tokens[i].Kind == token.EOF {
if blockLevel > 0 {
return errors.New(errors.MissingBlockEnd, path, tokens, i)
}
if bodyStart == -1 {
return errors.New(errors.ExpectedFunctionDefinition, path, tokens, i)
}
return nil
}
if blockLevel > 0 {
i++
continue
}
return errors.New(errors.ExpectedFunctionDefinition, path, tokens, i)
}
functions <- &Function{
Name: tokens[nameStart].Text(),
Head: tokens[paramsStart:bodyStart],
Body: tokens[bodyStart : i+1],
}
nameStart = -1
paramsStart = -1
bodyStart = -1
}
}

View File

@ -7,6 +7,9 @@ const (
// Invalid represents an invalid token.
Invalid Kind = iota
// EOF represents the end of file.
EOF
// NewLine represents the newline character.
NewLine
@ -54,6 +57,7 @@ const (
func (kind Kind) String() string {
return [...]string{
"Invalid",
"EOF",
"NewLine",
"Identifier",
"Keyword",

View File

@ -35,6 +35,11 @@ func TestFunction(t *testing.T) {
Bytes: []byte("}"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -51,6 +56,11 @@ func TestKeyword(t *testing.T) {
Bytes: []byte("x"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -77,6 +87,11 @@ func TestArray(t *testing.T) {
Bytes: []byte("]"),
Position: 7,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -93,6 +108,11 @@ func TestNewline(t *testing.T) {
Bytes: []byte("\n"),
Position: 1,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 2,
},
})
}
@ -109,6 +129,11 @@ func TestNumber(t *testing.T) {
Bytes: []byte("-456"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 8,
},
})
}
@ -140,6 +165,11 @@ func TestSeparator(t *testing.T) {
Bytes: []byte("c"),
Position: 4,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 5,
},
})
}
@ -156,6 +186,11 @@ func TestString(t *testing.T) {
Bytes: []byte(`"World"`),
Position: 8,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 15,
},
})
}
@ -167,6 +202,11 @@ func TestStringMultiline(t *testing.T) {
Bytes: []byte("\"Hello\nWorld\""),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 13,
},
})
}
@ -178,6 +218,11 @@ func TestStringEOF(t *testing.T) {
Bytes: []byte(`"EOF`),
Position: 0,
},
{
Kind: token.EOF,
Bytes: nil,
Position: 4,
},
})
}
@ -195,6 +240,7 @@ func TestTokenText(t *testing.T) {
func TestTokenKind(t *testing.T) {
assert.Equal(t, token.Invalid.String(), "Invalid")
assert.Equal(t, token.EOF.String(), "EOF")
assert.Equal(t, token.NewLine.String(), "NewLine")
assert.Equal(t, token.Identifier.String(), "Identifier")
assert.Equal(t, token.Keyword.String(), "Keyword")

View File

@ -31,6 +31,7 @@ func Tokenize(buffer []byte) List {
for i < len(buffer) {
if buffer[i] == '"' {
end = i + 1
i++
break
}
@ -43,6 +44,8 @@ func Tokenize(buffer []byte) List {
buffer[start:end],
})
continue
// Parentheses start
case '(':
tokens = append(tokens, Token{GroupStart, i, groupStartBytes})
@ -121,6 +124,7 @@ func Tokenize(buffer []byte) List {
i++
}
tokens = append(tokens, Token{EOF, i, nil})
return tokens
}