generated from ianlewis/repo-template
-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathlexer.go
More file actions
88 lines (71 loc) · 2.54 KB
/
lexer.go
File metadata and controls
88 lines (71 loc) · 2.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
// Copyright 2025 Ian Lewis
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lexparse
import (
"context"
"fmt"
"strconv"
)
// TokenType is a user-defined Token type.
type TokenType int
// Position represents a position in an input.
type Position struct {
// Filename is the name of the file being read. It can be empty if the
// input is not from a file.
Filename string
// Offset is the byte offset in the input stream, starting at 0.
Offset int
// Line is the line number in the input stream, starting at 1.
Line int
// Column is the column number in the line, starting at 1. It counts
// characters in the line, including whitespace and newlines.
Column int
}
// String returns a string representation of the Position.
func (p Position) String() string {
if p.Filename != "" {
return p.Filename + ":" + strconv.Itoa(p.Line) + ":" + strconv.Itoa(p.Column)
}
return strconv.Itoa(p.Line) + ":" + strconv.Itoa(p.Column)
}
// Token is a tokenized input which can be emitted by a Lexer.
type Token struct {
// Type is the Token's type.
Type TokenType
// Value is the Token's value.
Value string
// Start is the start position in the byte stream where the Token was found.
Start Position
// End is the end position in the byte stream where the Token was found.
End Position
}
// String returns a string representation of the Token.
func (t Token) String() string {
value := t.Value
if t.Type == TokenTypeEOF {
value = "<EOF>"
}
return fmt.Sprintf("%s:%s: %s", t.Start, t.End, value)
}
// Lexer is an interface that defines the methods for a lexer that tokenizes
// input streams. It reads from an input stream and emits tokens.
type Lexer interface {
// NextToken returns the next token from the input. If there are no more
// tokens, the context is canceled, or an error occurs, it returns a Token
// with Type set to TokenTypeEOF. Lexing can be canceled by ctx.
NextToken(ctx context.Context) *Token
// Err returns the error encountered by the lexer, if any. If the error
// encountered is io.EOF, it will return nil.
Err() error
}