@@ -78,34 +78,6 @@ func (l *Lexer) Consume() {
78
78
}
79
79
}
80
80
81
- // consumeComment consumes all characters from `#` to the first encountered line terminator.
82
- // The characters are appended to `l.descComment`.
83
- func (l * Lexer ) consumeComment () {
84
- if l .next != '#' {
85
- return
86
- }
87
-
88
- // TODO: count and trim whitespace so we can dedent any following lines.
89
- if l .sc .Peek () == ' ' {
90
- l .sc .Next ()
91
- }
92
-
93
- if l .descComment != "" {
94
- // TODO: use a bytes.Buffer or strings.Builder instead of this.
95
- l .descComment += "\n "
96
- }
97
-
98
- for {
99
- next := l .sc .Next ()
100
- if next == '\r' || next == '\n' || next == scanner .EOF {
101
- break
102
- }
103
-
104
- // TODO: use a bytes.Buffer or strings.Build instead of this.
105
- l .descComment += string (next )
106
- }
107
- }
108
-
109
81
func (l * Lexer ) ConsumeIdent () string {
110
82
name := l .sc .TokenText ()
111
83
l .ConsumeToken (scanner .Ident )
@@ -153,3 +125,31 @@ func (l *Lexer) Location() errors.Location {
153
125
Column : l .sc .Column ,
154
126
}
155
127
}
128
+
129
+ // consumeComment consumes all characters from `#` to the first encountered line terminator.
130
+ // The characters are appended to `l.descComment`.
131
+ func (l * Lexer ) consumeComment () {
132
+ if l .next != '#' {
133
+ return
134
+ }
135
+
136
+ // TODO: count and trim whitespace so we can dedent any following lines.
137
+ if l .sc .Peek () == ' ' {
138
+ l .sc .Next ()
139
+ }
140
+
141
+ if l .descComment != "" {
142
+ // TODO: use a bytes.Buffer or strings.Builder instead of this.
143
+ l .descComment += "\n "
144
+ }
145
+
146
+ for {
147
+ next := l .sc .Next ()
148
+ if next == '\r' || next == '\n' || next == scanner .EOF {
149
+ break
150
+ }
151
+
152
+ // TODO: use a bytes.Buffer or strings.Build instead of this.
153
+ l .descComment += string (next )
154
+ }
155
+ }
0 commit comments