Improved tokenizer
This commit is contained in:
@ -33,6 +33,10 @@ func (t *Reader) Next() byte {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *Reader) read() error {
|
func (t *Reader) read() error {
|
||||||
|
if t.Position >= t.Size {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
n, err := t.File.Read(t.Buffer[len(t.Buffer):cap(t.Buffer)])
|
n, err := t.File.Read(t.Buffer[len(t.Buffer):cap(t.Buffer)])
|
||||||
t.Buffer = t.Buffer[:len(t.Buffer)+n]
|
t.Buffer = t.Buffer[:len(t.Buffer)+n]
|
||||||
|
|
||||||
@ -66,6 +70,6 @@ func (t *Reader) Open(path string) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
t.Size = Position(info.Size())
|
t.Size = Position(info.Size())
|
||||||
t.Buffer = make([]byte, 0, t.Size+1)
|
t.Buffer = make([]byte, 0, 4096)
|
||||||
return nil
|
return t.read()
|
||||||
}
|
}
|
||||||
|
@ -6,8 +6,6 @@ func Tokenize(reader *Reader) List {
|
|||||||
tokens = make(List, 0, 8+reader.Size/2)
|
tokens = make(List, 0, 8+reader.Size/2)
|
||||||
)
|
)
|
||||||
|
|
||||||
reader.read()
|
|
||||||
|
|
||||||
for reader.Position < reader.Size {
|
for reader.Position < reader.Size {
|
||||||
switch reader.Current() {
|
switch reader.Current() {
|
||||||
case ' ', '\t':
|
case ' ', '\t':
|
||||||
|
Reference in New Issue
Block a user