Implemented struct parser

This commit is contained in:
2025-02-04 14:41:04 +01:00
parent fc1b970f7f
commit 51e3c1ba0e
19 changed files with 388 additions and 252 deletions

View File

@ -2,16 +2,10 @@ package scanner
import (
"os"
"path/filepath"
"git.akyoto.dev/cli/q/src/config"
"git.akyoto.dev/cli/q/src/core"
"git.akyoto.dev/cli/q/src/errors"
"git.akyoto.dev/cli/q/src/fs"
"git.akyoto.dev/cli/q/src/scope"
"git.akyoto.dev/cli/q/src/token"
"git.akyoto.dev/cli/q/src/types"
"git.akyoto.dev/cli/q/src/x64"
)
// scanFile scans a single file.
@ -32,250 +26,31 @@ func (s *Scanner) scanFile(path string, pkg string) error {
}
s.files <- file
i := 0
var (
i = 0
groupLevel = 0
blockLevel = 0
nameStart = -1
paramsStart = -1
paramsEnd = -1
bodyStart = -1
typeStart = -1
typeEnd = -1
)
for {
for i < len(tokens) && tokens[i].Kind == token.Import {
i++
if tokens[i].Kind != token.Identifier {
panic("expected package name")
}
packageName := tokens[i].Text(contents)
if file.Imports == nil {
file.Imports = map[string]*fs.Import{}
}
fullPath := filepath.Join(config.Library, packageName)
file.Imports[packageName] = &fs.Import{
Path: packageName,
FullPath: fullPath,
Position: tokens[i].Position,
}
s.queueDirectory(fullPath, packageName)
i++
if tokens[i].Kind != token.NewLine && tokens[i].Kind != token.EOF {
panic("expected newline or eof")
}
i++
}
// Function name
for i < len(tokens) {
if tokens[i].Kind == token.Identifier {
nameStart = i
i++
break
}
if tokens[i].Kind == token.NewLine || tokens[i].Kind == token.Comment {
i++
continue
}
if tokens[i].Kind == token.Invalid {
return errors.New(&errors.InvalidCharacter{Character: tokens[i].Text(contents)}, file, tokens[i].Position)
}
if tokens[i].Kind == token.EOF {
return nil
}
return errors.New(errors.ExpectedFunctionName, file, tokens[i].Position)
}
// Function parameters
for i < len(tokens) {
if tokens[i].Kind == token.GroupStart {
groupLevel++
i++
if groupLevel == 1 {
paramsStart = i
}
continue
}
if tokens[i].Kind == token.GroupEnd {
groupLevel--
if groupLevel < 0 {
return errors.New(errors.MissingGroupStart, file, tokens[i].Position)
}
if groupLevel == 0 {
paramsEnd = i
i++
break
}
i++
continue
}
if tokens[i].Kind == token.Invalid {
return errors.New(&errors.InvalidCharacter{Character: tokens[i].Text(contents)}, file, tokens[i].Position)
}
if tokens[i].Kind == token.EOF {
if groupLevel > 0 {
return errors.New(errors.MissingGroupEnd, file, tokens[i].Position)
}
if paramsStart == -1 {
return errors.New(errors.ExpectedFunctionParameters, file, tokens[i].Position)
}
return nil
}
if groupLevel > 0 {
i++
continue
}
return errors.New(errors.ExpectedFunctionParameters, file, tokens[i].Position)
}
// Return type
if i < len(tokens) && tokens[i].Kind == token.ReturnType {
typeStart = i + 1
for i < len(tokens) && tokens[i].Kind != token.BlockStart {
i++
}
typeEnd = i
}
// Function definition
for i < len(tokens) {
if tokens[i].Kind == token.ReturnType {
i++
continue
}
if tokens[i].Kind == token.BlockStart {
blockLevel++
i++
if blockLevel == 1 {
bodyStart = i
}
continue
}
if tokens[i].Kind == token.BlockEnd {
blockLevel--
if blockLevel < 0 {
return errors.New(errors.MissingBlockStart, file, tokens[i].Position)
}
if blockLevel == 0 {
break
}
i++
continue
}
if tokens[i].Kind == token.Invalid {
return errors.New(&errors.InvalidCharacter{Character: tokens[i].Text(contents)}, file, tokens[i].Position)
}
if tokens[i].Kind == token.EOF {
if blockLevel > 0 {
return errors.New(errors.MissingBlockEnd, file, tokens[i].Position)
}
if bodyStart == -1 {
return errors.New(errors.ExpectedFunctionDefinition, file, tokens[i].Position)
}
return nil
}
if blockLevel > 0 {
i++
continue
}
return errors.New(errors.ExpectedFunctionDefinition, file, tokens[i].Position)
}
name := tokens[nameStart].Text(contents)
body := tokens[bodyStart:i]
function := core.NewFunction(pkg, name, file, body)
if typeStart != -1 {
if tokens[typeStart].Kind == token.GroupStart && tokens[typeEnd-1].Kind == token.GroupEnd {
typeStart++
typeEnd--
}
function.ReturnTypes = types.ParseList(tokens[typeStart:typeEnd], contents)
}
parameters := tokens[paramsStart:paramsEnd]
count := 0
err := parameters.Split(func(tokens token.List) error {
if len(tokens) < 2 {
return errors.New(errors.MissingType, file, tokens[0].End())
}
name := tokens[0].Text(contents)
dataType := types.Parse(tokens[1:].Text(contents))
register := x64.InputRegisters[count]
uses := token.Count(function.Body, contents, token.Identifier, name)
if uses == 0 && name != "_" {
return errors.New(&errors.UnusedVariable{Name: name}, file, tokens[0].Position)
}
variable := &scope.Variable{
Name: name,
Type: dataType,
Register: register,
Alive: uses,
}
function.Parameters = append(function.Parameters, variable)
function.AddVariable(variable)
count++
for i < len(tokens) {
switch tokens[i].Kind {
case token.NewLine:
case token.Import:
i, err = s.scanImport(file, tokens, i)
case token.Struct:
i, err = s.scanStruct(file, tokens, i)
case token.Identifier:
i, err = s.scanFunction(file, tokens, i)
case token.EOF:
return nil
})
case token.Invalid:
return errors.New(&errors.InvalidCharacter{Character: tokens[i].Text(file.Bytes)}, file, tokens[i].Position)
default:
return errors.New(&errors.InvalidInstruction{Instruction: tokens[i].Text(file.Bytes)}, file, tokens[i].Position)
}
if err != nil {
return err
}
s.functions <- function
nameStart = -1
paramsStart = -1
bodyStart = -1
typeStart = -1
typeEnd = -1
i++
}
return nil
}