@@ -210,7 +210,7 @@ pub struct Tokenizer<'a> {
210
210
/// Counted in bytes, not code points. From 0.
211
211
position : usize ,
212
212
/// Cache for `source_location()`
213
- last_known_line_break : Cell < ( usize , usize ) > ,
213
+ last_known_source_location : Cell < ( SourcePosition , SourceLocation ) > ,
214
214
var_functions : VarFunctions ,
215
215
}
216
216
@@ -228,7 +228,8 @@ impl<'a> Tokenizer<'a> {
228
228
Tokenizer {
229
229
input : input,
230
230
position : 0 ,
231
- last_known_line_break : Cell :: new ( ( 1 , 0 ) ) ,
231
+ last_known_source_location : Cell :: new ( ( SourcePosition ( 0 ) ,
232
+ SourceLocation { line : 1 , column : 1 } ) ) ,
232
233
var_functions : VarFunctions :: DontCare ,
233
234
}
234
235
}
@@ -278,19 +279,22 @@ impl<'a> Tokenizer<'a> {
278
279
279
280
pub fn source_location ( & self , position : SourcePosition ) -> SourceLocation {
280
281
let target = position. 0 ;
281
- let mut line_number ;
282
+ let mut location ;
282
283
let mut position;
283
- let ( last_known_line_number , position_after_last_known_newline ) =
284
- self . last_known_line_break . get ( ) ;
285
- if target >= position_after_last_known_newline {
286
- position = position_after_last_known_newline ;
287
- line_number = last_known_line_number ;
284
+ let ( SourcePosition ( last_known_position ) , last_known_location ) =
285
+ self . last_known_source_location . get ( ) ;
286
+ if target >= last_known_position {
287
+ position = last_known_position ;
288
+ location = last_known_location ;
288
289
} else {
290
+ // For now we’re only traversing the source *forwards* to count newlines.
291
+ // So if the requested position is before the last known one,
292
+ // start over from the beginning.
289
293
position = 0 ;
290
- line_number = 1 ;
294
+ location = SourceLocation { line : 1 , column : 1 } ;
291
295
}
292
296
let mut source = & self . input [ position..target] ;
293
- while let Some ( newline_position) = source. find ( & [ '\n' , '\r' , '\x0C' ] [ .. ] ) {
297
+ while let Some ( newline_position) = source. find ( |c| matches ! ( c , '\n' | '\r' | '\x0C' ) ) {
294
298
let offset = newline_position +
295
299
if source[ newline_position..] . starts_with ( "\r \n " ) {
296
300
2
@@ -299,16 +303,13 @@ impl<'a> Tokenizer<'a> {
299
303
} ;
300
304
source = & source[ offset..] ;
301
305
position += offset;
302
- line_number += 1 ;
306
+ location. line += 1 ;
307
+ location. column = 1 ;
303
308
}
304
309
debug_assert ! ( position <= target) ;
305
- self . last_known_line_break . set ( ( line_number, position) ) ;
306
- SourceLocation {
307
- line : line_number,
308
- // `target == position` when `target` is at the beginning of the line,
309
- // so add 1 so that the column numbers start at 1.
310
- column : target - position + 1 ,
311
- }
310
+ location. column += target - position;
311
+ self . last_known_source_location . set ( ( SourcePosition ( target) , location) ) ;
312
+ location
312
313
}
313
314
314
315
#[ inline]
@@ -371,7 +372,7 @@ pub struct SourceLocation {
371
372
/// The line number, starting at 1 for the first line.
372
373
pub line : usize ,
373
374
374
- /// The column number within a line, starting at 1 for the character of the line.
375
+ /// The column number within a line, starting at 1 for first the character of the line.
375
376
pub column : usize ,
376
377
}
377
378
0 commit comments