@@ -23,7 +23,7 @@ interface LexToken {
23
23
/**
24
24
* Tokenize input string.
25
25
*/
26
- function lexer ( str : string ) : LexToken [ ] {
26
+ function lexer ( str : string ) {
27
27
const tokens : LexToken [ ] = [ ] ;
28
28
let i = 0 ;
29
29
@@ -125,7 +125,7 @@ function lexer(str: string): LexToken[] {
125
125
126
126
tokens . push ( { type : "END" , index : i , value : "" } ) ;
127
127
128
- return tokens ;
128
+ return new Iter ( tokens ) ;
129
129
}
130
130
131
131
export interface ParseOptions {
@@ -139,6 +139,41 @@ export interface ParseOptions {
139
139
prefixes ?: string ;
140
140
}
141
141
142
+ class Iter {
143
+ index = 0 ;
144
+
145
+ constructor ( private tokens : LexToken [ ] ) { }
146
+
147
+ peek ( ) : LexToken {
148
+ return this . tokens [ this . index ] ;
149
+ }
150
+
151
+ tryConsume ( type : LexToken [ "type" ] ) : string | undefined {
152
+ const token = this . peek ( ) ;
153
+ if ( token . type !== type ) return ;
154
+ this . index ++ ;
155
+ return token . value ;
156
+ }
157
+
158
+ consume ( type : LexToken [ "type" ] ) : string {
159
+ const value = this . tryConsume ( type ) ;
160
+ if ( value !== undefined ) return value ;
161
+ const { type : nextType , index } = this . peek ( ) ;
162
+ throw new TypeError ( `Unexpected ${ nextType } at ${ index } , expected ${ type } ` ) ;
163
+ }
164
+
165
+ text ( ) : string {
166
+ let result = "" ;
167
+ let value : string | undefined ;
168
+ while (
169
+ ( value = this . tryConsume ( "CHAR" ) || this . tryConsume ( "ESCAPED_CHAR" ) )
170
+ ) {
171
+ result += value ;
172
+ }
173
+ return result ;
174
+ }
175
+ }
176
+
142
177
/**
143
178
* Parse a string for the raw tokens.
144
179
*/
@@ -149,33 +184,12 @@ export function parse(str: string, options: ParseOptions = {}): Token[] {
149
184
const result : Token [ ] = [ ] ;
150
185
const tokens = lexer ( str ) ;
151
186
let key = 0 ;
152
- let i = 0 ;
153
187
let path = "" ;
154
188
155
- const tryConsume = ( type : LexToken [ "type" ] ) : string | undefined => {
156
- if ( i < tokens . length && tokens [ i ] . type === type ) return tokens [ i ++ ] . value ;
157
- } ;
158
-
159
- const mustConsume = ( type : LexToken [ "type" ] ) : string => {
160
- const value = tryConsume ( type ) ;
161
- if ( value !== undefined ) return value ;
162
- const { type : nextType , index } = tokens [ i ] ;
163
- throw new TypeError ( `Unexpected ${ nextType } at ${ index } , expected ${ type } ` ) ;
164
- } ;
165
-
166
- const consumeText = ( ) : string => {
167
- let result = "" ;
168
- let value : string | undefined ;
169
- while ( ( value = tryConsume ( "CHAR" ) || tryConsume ( "ESCAPED_CHAR" ) ) ) {
170
- result += value ;
171
- }
172
- return result ;
173
- } ;
174
-
175
- while ( i < tokens . length ) {
176
- const char = tryConsume ( "CHAR" ) ;
177
- const name = tryConsume ( "NAME" ) ;
178
- const pattern = tryConsume ( "PATTERN" ) ;
189
+ do {
190
+ const char = tokens . tryConsume ( "CHAR" ) ;
191
+ const name = tokens . tryConsume ( "NAME" ) ;
192
+ const pattern = tokens . tryConsume ( "PATTERN" ) ;
179
193
180
194
if ( name || pattern ) {
181
195
let prefix = char || "" ;
@@ -195,12 +209,12 @@ export function parse(str: string, options: ParseOptions = {}): Token[] {
195
209
prefix,
196
210
suffix : "" ,
197
211
pattern : pattern || defaultPattern ,
198
- modifier : tryConsume ( "MODIFIER" ) || "" ,
212
+ modifier : tokens . tryConsume ( "MODIFIER" ) || "" ,
199
213
} ) ;
200
214
continue ;
201
215
}
202
216
203
- const value = char || tryConsume ( "ESCAPED_CHAR" ) ;
217
+ const value = char || tokens . tryConsume ( "ESCAPED_CHAR" ) ;
204
218
if ( value ) {
205
219
path += value ;
206
220
continue ;
@@ -211,27 +225,28 @@ export function parse(str: string, options: ParseOptions = {}): Token[] {
211
225
path = "" ;
212
226
}
213
227
214
- const open = tryConsume ( "OPEN" ) ;
228
+ const open = tokens . tryConsume ( "OPEN" ) ;
215
229
if ( open ) {
216
- const prefix = consumeText ( ) ;
217
- const name = tryConsume ( "NAME" ) || "" ;
218
- const pattern = tryConsume ( "PATTERN" ) || "" ;
219
- const suffix = consumeText ( ) ;
230
+ const prefix = tokens . text ( ) ;
231
+ const name = tokens . tryConsume ( "NAME" ) || "" ;
232
+ const pattern = tokens . tryConsume ( "PATTERN" ) || "" ;
233
+ const suffix = tokens . text ( ) ;
220
234
221
- mustConsume ( "CLOSE" ) ;
235
+ tokens . consume ( "CLOSE" ) ;
222
236
223
237
result . push ( {
224
238
name : name || ( pattern ? key ++ : "" ) ,
225
239
pattern : name && ! pattern ? defaultPattern : pattern ,
226
240
prefix,
227
241
suffix,
228
- modifier : tryConsume ( "MODIFIER" ) || "" ,
242
+ modifier : tokens . tryConsume ( "MODIFIER" ) || "" ,
229
243
} ) ;
230
244
continue ;
231
245
}
232
246
233
- mustConsume ( "END" ) ;
234
- }
247
+ tokens . consume ( "END" ) ;
248
+ break ;
249
+ } while ( true ) ;
235
250
236
251
return result ;
237
252
}
@@ -559,15 +574,11 @@ export function tokensToRegexp(
559
574
if ( token . pattern ) {
560
575
if ( keys ) keys . push ( token ) ;
561
576
562
- if ( prefix || suffix ) {
563
- if ( token . modifier === "+" || token . modifier === "*" ) {
564
- const mod = token . modifier === "*" ? "?" : "" ;
565
- route += `(?:${ prefix } ((?:${ token . pattern } )(?:${ suffix } ${ prefix } (?:${ token . pattern } ))*)${ suffix } )${ mod } ` ;
566
- } else {
567
- route += `(?:${ prefix } (${ token . pattern } )${ suffix } )${ token . modifier } ` ;
568
- }
577
+ if ( token . modifier === "+" || token . modifier === "*" ) {
578
+ const mod = token . modifier === "*" ? "?" : "" ;
579
+ route += `(?:${ prefix } ((?:${ token . pattern } )(?:${ suffix } ${ prefix } (?:${ token . pattern } ))*)${ suffix } )${ mod } ` ;
569
580
} else {
570
- route += `(( ?:${ token . pattern } )${ token . modifier } ) ` ;
581
+ route += `(?:${ prefix } ( ${ token . pattern } )${ suffix } ) ${ token . modifier } ` ;
571
582
}
572
583
} else {
573
584
route += `(?:${ prefix } ${ suffix } )${ token . modifier } ` ;
@@ -577,8 +588,7 @@ export function tokensToRegexp(
577
588
578
589
if ( end ) {
579
590
if ( ! strict ) route += `${ delimiterRe } ?` ;
580
-
581
- route += options . endsWith ? `(?=${ endsWithRe } )` : "$" ;
591
+ route += endsWith ? `(?=${ endsWithRe } )` : "$" ;
582
592
} else {
583
593
const endToken = tokens [ tokens . length - 1 ] ;
584
594
const isEndDelimited =
0 commit comments