@@ -18,8 +18,6 @@ pub fn tokenize(input: &str) -> Tokenizer {
18
18
length : input. len ( ) ,
19
19
input : input,
20
20
position : 0 ,
21
- line : 1 ,
22
- last_line_start : 0 ,
23
21
}
24
22
}
25
23
@@ -43,8 +41,6 @@ pub struct Tokenizer {
43
41
input : String ,
44
42
length : uint , // All counted in bytes, not characters
45
43
position : uint , // All counted in bytes, not characters
46
- line : uint ,
47
- last_line_start : uint , // All counted in bytes, not characters
48
44
}
49
45
50
46
@@ -72,15 +68,6 @@ impl Tokenizer {
72
68
fn starts_with ( & self , needle : & str ) -> bool {
73
69
self . input . as_slice ( ) . slice_from ( self . position ) . starts_with ( needle)
74
70
}
75
-
76
- #[ inline]
77
- fn new_line ( & mut self ) {
78
- if cfg ! ( test) {
79
- assert ! ( self . input. as_slice( ) . char_at( self . position - 1 ) == '\n' )
80
- }
81
- self . line += 1 ;
82
- self . last_line_start = self . position ;
83
- }
84
71
}
85
72
86
73
macro_rules! is_match(
@@ -93,27 +80,19 @@ macro_rules! is_match(
93
80
fn next_component_value ( tokenizer : & mut Tokenizer ) -> Option < Node > {
94
81
consume_comments ( tokenizer) ;
95
82
if tokenizer. is_eof ( ) {
96
- if cfg ! ( test) {
97
- assert ! ( tokenizer. line == tokenizer. input. as_slice( ) . split( '\n' ) . count( ) ,
98
- "The tokenizer is missing a tokenizer.new_line() call somewhere." )
99
- }
100
83
return None
101
84
}
102
85
let start_location = SourceLocation {
103
- line : tokenizer . line ,
104
- // The start of the line is column 1:
105
- column : tokenizer. position - tokenizer . last_line_start + 1 ,
86
+ // FIXME
87
+ line : 0 ,
88
+ column : tokenizer. position ,
106
89
} ;
107
90
let c = tokenizer. current_char ( ) ;
108
91
let component_value = match c {
109
92
'\t' | '\n' | ' ' => {
110
93
while !tokenizer. is_eof ( ) {
111
94
match tokenizer. current_char ( ) {
112
- ' ' | '\t' => tokenizer. position += 1 ,
113
- '\n' => {
114
- tokenizer. position += 1 ;
115
- tokenizer. new_line ( ) ;
116
- } ,
95
+ ' ' | '\t' | '\n' => tokenizer. position += 1 ,
117
96
_ => break ,
118
97
}
119
98
}
@@ -251,15 +230,11 @@ fn consume_comments(tokenizer: &mut Tokenizer) {
251
230
while tokenizer. starts_with ( "/*" ) {
252
231
tokenizer. position += 2 ; // +2 to consume "/*"
253
232
while !tokenizer. is_eof ( ) {
254
- match tokenizer. consume_char ( ) {
255
- '*' => {
256
- if !tokenizer. is_eof ( ) && tokenizer. current_char ( ) == '/' {
257
- tokenizer. position += 1 ;
258
- break
259
- }
260
- } ,
261
- '\n' => tokenizer. new_line ( ) ,
262
- _ => ( )
233
+ if tokenizer. consume_char ( ) == '*' &&
234
+ !tokenizer. is_eof ( ) &&
235
+ tokenizer. current_char ( ) == '/' {
236
+ tokenizer. position += 1 ;
237
+ break
263
238
}
264
239
}
265
240
}
@@ -322,7 +297,6 @@ fn consume_quoted_string(tokenizer: &mut Tokenizer, single_quote: bool) -> Resul
322
297
if !tokenizer. is_eof ( ) {
323
298
if tokenizer. current_char ( ) == '\n' { // Escaped newline
324
299
tokenizer. position += 1 ;
325
- tokenizer. new_line ( ) ;
326
300
}
327
301
else { string. push ( consume_escape ( tokenizer) ) }
328
302
}
@@ -461,11 +435,7 @@ fn consume_url(tokenizer: &mut Tokenizer) -> ComponentValue {
461
435
tokenizer. position += 1 ; // Skip the ( of url(
462
436
while !tokenizer. is_eof ( ) {
463
437
match tokenizer. current_char ( ) {
464
- ' ' | '\t' => tokenizer. position += 1 ,
465
- '\n' => {
466
- tokenizer. position += 1 ;
467
- tokenizer. new_line ( ) ;
468
- } ,
438
+ ' ' | '\t' | '\n' => tokenizer. position += 1 ,
469
439
'"' => return consume_quoted_url ( tokenizer, false ) ,
470
440
'\'' => return consume_quoted_url ( tokenizer, true ) ,
471
441
')' => { tokenizer. position += 1 ; break } ,
@@ -485,11 +455,7 @@ fn consume_url(tokenizer: &mut Tokenizer) -> ComponentValue {
485
455
let mut string = String :: new ( ) ;
486
456
while !tokenizer. is_eof ( ) {
487
457
let next_char = match tokenizer. consume_char ( ) {
488
- ' ' | '\t' => return consume_url_end ( tokenizer, string) ,
489
- '\n' => {
490
- tokenizer. new_line ( ) ;
491
- return consume_url_end ( tokenizer, string)
492
- } ,
458
+ ' ' | '\t' | '\n' => return consume_url_end ( tokenizer, string) ,
493
459
')' => break ,
494
460
'\x01' ...'\x08' | '\x0B' | '\x0E' ...'\x1F' | '\x7F' // non-printable
495
461
| '"' | '\'' | '(' => return consume_bad_url ( tokenizer) ,
@@ -510,8 +476,7 @@ fn consume_url(tokenizer: &mut Tokenizer) -> ComponentValue {
510
476
fn consume_url_end ( tokenizer : & mut Tokenizer , string : String ) -> ComponentValue {
511
477
while !tokenizer. is_eof ( ) {
512
478
match tokenizer. consume_char ( ) {
513
- ' ' | '\t' => ( ) ,
514
- '\n' => tokenizer. new_line ( ) ,
479
+ ' ' | '\t' | '\n' => ( ) ,
515
480
')' => break ,
516
481
_ => return consume_bad_url ( tokenizer)
517
482
}
@@ -525,7 +490,6 @@ fn consume_url(tokenizer: &mut Tokenizer) -> ComponentValue {
525
490
match tokenizer. consume_char ( ) {
526
491
')' => break ,
527
492
'\\' => tokenizer. position += 1 , // Skip an escaped ')' or '\'
528
- '\n' => tokenizer. new_line ( ) ,
529
493
_ => ( )
530
494
}
531
495
}
@@ -593,8 +557,7 @@ fn consume_escape(tokenizer: &mut Tokenizer) -> char {
593
557
}
594
558
if !tokenizer. is_eof ( ) {
595
559
match tokenizer. current_char ( ) {
596
- ' ' | '\t' => tokenizer. position += 1 ,
597
- '\n' => { tokenizer. position += 1 ; tokenizer. new_line ( ) } ,
560
+ ' ' | '\t' | '\n' => tokenizer. position += 1 ,
598
561
_ => ( )
599
562
}
600
563
}
0 commit comments