Skip to content

Commit

Permalink
Allow PeekingLexer returning current Token as a pointer for performance
Browse files Browse the repository at this point in the history
The gain is relatively tiny factoring in the other overhead, but it's
there.
  • Loading branch information
Peter Dolak committed Nov 10, 2022
1 parent 98197fe commit fce21ae
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 8 deletions.
13 changes: 9 additions & 4 deletions lexer/peek.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,22 +60,27 @@ func (c Checkpoint) RawCursor() RawCursor {

// Next consumes and returns the next token.
func (p *PeekingLexer) Next() Token {
t := p.tokens[p.nextCursor]
t := &p.tokens[p.nextCursor]
if t.EOF() {
return t
return *t
}
p.nextCursor++
p.rawCursor = p.nextCursor
p.cursor++
p.advanceToNonElided()
return t
return *t
}

// Peek ahead at the next non-elided token.
func (p *PeekingLexer) Peek() Token {
return p.tokens[p.nextCursor]
}

// Current is a version of Peek returning a pointer as it's more efficient in some cases.
func (p *PeekingLexer) Current() *Token {
return &p.tokens[p.nextCursor]
}

// RawPeek peeks ahead at the next raw token.
//
// Unlike Peek, this will include elided tokens.
Expand All @@ -86,7 +91,7 @@ func (p *PeekingLexer) RawPeek() Token {
// advanceToNonElided advances nextCursor to the closest non-elided token
func (p *PeekingLexer) advanceToNonElided() {
for ; ; p.nextCursor++ {
t := p.tokens[p.nextCursor]
t := &p.tokens[p.nextCursor]
if t.EOF() || !p.elide[t.Type] {
return
}
Expand Down
8 changes: 4 additions & 4 deletions lexer/peek_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,18 +56,18 @@ func TestPeekingLexer_Peek_Next_Checkpoint(t *testing.T) {
require.Equal(t, expected[0], plex.Peek(), "should have reverted to pre-Next state")
}

func BenchmarkPeekingLexer_Peek(b *testing.B) {
func BenchmarkPeekingLexer_Current(b *testing.B) {
tokens := []lexer.Token{{Type: 1, Value: "x"}, {Type: 3, Value: " "}, {Type: 2, Value: "y"}}
l, err := lexer.Upgrade(&staticLexer{tokens: tokens}, 3)
require.NoError(b, err)
l.Next()
t := l.Peek()
t := l.Current()
b.ResetTimer()
for i := 0; i < b.N; i++ {
t = l.Peek()
t = l.Current()
if t.EOF() {
return
}
}
require.Equal(b, lexer.Token{Type: 2, Value: "y"}, t)
require.Equal(b, lexer.Token{Type: 2, Value: "y"}, *t)
}

0 comments on commit fce21ae

Please sign in to comment.