Improved tokenizer
This commit is contained in:
@ -33,6 +33,10 @@ func (t *Reader) Next() byte {
|
||||
}
|
||||
|
||||
func (t *Reader) read() error {
|
||||
if t.Position >= t.Size {
|
||||
return nil
|
||||
}
|
||||
|
||||
n, err := t.File.Read(t.Buffer[len(t.Buffer):cap(t.Buffer)])
|
||||
t.Buffer = t.Buffer[:len(t.Buffer)+n]
|
||||
|
||||
@ -66,6 +70,6 @@ func (t *Reader) Open(path string) (err error) {
|
||||
}
|
||||
|
||||
t.Size = Position(info.Size())
|
||||
t.Buffer = make([]byte, 0, t.Size+1)
|
||||
return nil
|
||||
t.Buffer = make([]byte, 0, 4096)
|
||||
return t.read()
|
||||
}
|
||||
|
@ -6,8 +6,6 @@ func Tokenize(reader *Reader) List {
|
||||
tokens = make(List, 0, 8+reader.Size/2)
|
||||
)
|
||||
|
||||
reader.read()
|
||||
|
||||
for reader.Position < reader.Size {
|
||||
switch reader.Current() {
|
||||
case ' ', '\t':
|
||||
|
Reference in New Issue
Block a user