11
11
12
12
import { AtRule , Comment , Declaration , Discarded , Rule , Rulelist , Ruleset , Stylesheet } from './common' ;
13
13
import { NodeFactory } from './node-factory' ;
14
- import { Token } from './token' ;
14
+ import { TokenType } from './token' ;
15
15
import { Tokenizer } from './tokenizer' ;
16
16
17
17
/**
@@ -81,20 +81,20 @@ class Parser {
81
81
if ( token === null ) {
82
82
return null ;
83
83
}
84
- if ( token . is ( Token . type . whitespace ) ) {
84
+ if ( token . is ( TokenType . whitespace ) ) {
85
85
tokenizer . advance ( ) ;
86
86
return null ;
87
87
88
- } else if ( token . is ( Token . type . comment ) ) {
88
+ } else if ( token . is ( TokenType . comment ) ) {
89
89
return this . parseComment ( tokenizer ) ;
90
90
91
- } else if ( token . is ( Token . type . word ) ) {
91
+ } else if ( token . is ( TokenType . word ) ) {
92
92
return this . parseDeclarationOrRuleset ( tokenizer ) ;
93
93
94
- } else if ( token . is ( Token . type . propertyBoundary ) ) {
94
+ } else if ( token . is ( TokenType . propertyBoundary ) ) {
95
95
return this . parseUnknown ( tokenizer ) ;
96
96
97
- } else if ( token . is ( Token . type . at ) ) {
97
+ } else if ( token . is ( TokenType . at ) ) {
98
98
return this . parseAtRule ( tokenizer ) ;
99
99
100
100
} else {
@@ -130,7 +130,7 @@ class Parser {
130
130
}
131
131
132
132
while ( tokenizer . currentToken &&
133
- tokenizer . currentToken . is ( Token . type . boundary ) ) {
133
+ tokenizer . currentToken . is ( TokenType . boundary ) ) {
134
134
end = tokenizer . advance ( ) ;
135
135
}
136
136
@@ -155,27 +155,27 @@ class Parser {
155
155
const start = tokenizer . currentToken . start ;
156
156
157
157
while ( tokenizer . currentToken ) {
158
- if ( tokenizer . currentToken . is ( Token . type . whitespace ) ) {
158
+ if ( tokenizer . currentToken . is ( TokenType . whitespace ) ) {
159
159
tokenizer . advance ( ) ;
160
- } else if ( ! name && tokenizer . currentToken . is ( Token . type . at ) ) {
160
+ } else if ( ! name && tokenizer . currentToken . is ( TokenType . at ) ) {
161
161
// Discard the @:
162
162
tokenizer . advance ( ) ;
163
163
const start = tokenizer . currentToken ;
164
164
let end ;
165
165
166
166
while ( tokenizer . currentToken &&
167
- tokenizer . currentToken . is ( Token . type . word ) ) {
167
+ tokenizer . currentToken . is ( TokenType . word ) ) {
168
168
end = tokenizer . advance ( ) ;
169
169
}
170
170
nameRange = tokenizer . getRange ( start , end ) ;
171
171
name = tokenizer . cssText . slice ( nameRange . start , nameRange . end ) ;
172
- } else if ( tokenizer . currentToken . is ( Token . type . openBrace ) ) {
172
+ } else if ( tokenizer . currentToken . is ( TokenType . openBrace ) ) {
173
173
rulelist = this . parseRulelist ( tokenizer ) ;
174
174
break ;
175
- } else if ( tokenizer . currentToken . is ( Token . type . semicolon ) ) {
175
+ } else if ( tokenizer . currentToken . is ( TokenType . semicolon ) ) {
176
176
tokenizer . advance ( ) ;
177
177
break ;
178
- } else if ( tokenizer . currentToken . is ( Token . type . closeBrace ) ) {
178
+ } else if ( tokenizer . currentToken . is ( TokenType . closeBrace ) ) {
179
179
break ;
180
180
} else {
181
181
if ( parametersStart == null ) {
@@ -217,7 +217,7 @@ class Parser {
217
217
tokenizer . advance ( ) ;
218
218
219
219
while ( tokenizer . currentToken ) {
220
- if ( tokenizer . currentToken . is ( Token . type . closeBrace ) ) {
220
+ if ( tokenizer . currentToken . is ( TokenType . closeBrace ) ) {
221
221
endToken = tokenizer . currentToken ;
222
222
tokenizer . advance ( ) ;
223
223
break ;
@@ -250,20 +250,20 @@ class Parser {
250
250
// property boundary.. though that may be impossible.
251
251
252
252
while ( tokenizer . currentToken ) {
253
- if ( tokenizer . currentToken . is ( Token . type . whitespace ) ) {
253
+ if ( tokenizer . currentToken . is ( TokenType . whitespace ) ) {
254
254
tokenizer . advance ( ) ;
255
- } else if ( tokenizer . currentToken . is ( Token . type . openParenthesis ) ) {
255
+ } else if ( tokenizer . currentToken . is ( TokenType . openParenthesis ) ) {
256
256
// skip until close paren
257
257
while ( tokenizer . currentToken &&
258
- ! tokenizer . currentToken . is ( Token . type . closeParenthesis ) ) {
258
+ ! tokenizer . currentToken . is ( TokenType . closeParenthesis ) ) {
259
259
tokenizer . advance ( ) ;
260
260
}
261
261
} else if (
262
- tokenizer . currentToken . is ( Token . type . openBrace ) ||
263
- tokenizer . currentToken . is ( Token . type . propertyBoundary ) ) {
262
+ tokenizer . currentToken . is ( TokenType . openBrace ) ||
263
+ tokenizer . currentToken . is ( TokenType . propertyBoundary ) ) {
264
264
break ;
265
265
} else {
266
- if ( tokenizer . currentToken . is ( Token . type . colon ) ) {
266
+ if ( tokenizer . currentToken . is ( TokenType . colon ) ) {
267
267
colon = tokenizer . currentToken ;
268
268
}
269
269
@@ -282,7 +282,7 @@ class Parser {
282
282
}
283
283
284
284
// A ruleset never contains or ends with a semi-colon.
285
- if ( tokenizer . currentToken . is ( Token . type . propertyBoundary ) ) {
285
+ if ( tokenizer . currentToken . is ( TokenType . propertyBoundary ) ) {
286
286
const nameRange =
287
287
tokenizer . getRange ( ruleStart ! , colon ? colon . previous : ruleEnd ) ;
288
288
const declarationName =
@@ -298,7 +298,7 @@ class Parser {
298
298
this . nodeFactory . expression ( expressionValue , expressionRange ) ;
299
299
}
300
300
301
- if ( tokenizer . currentToken . is ( Token . type . semicolon ) ) {
301
+ if ( tokenizer . currentToken . is ( TokenType . semicolon ) ) {
302
302
tokenizer . advance ( ) ;
303
303
}
304
304
@@ -313,7 +313,7 @@ class Parser {
313
313
} else if ( colon && colon === ruleEnd ) {
314
314
const rulelist = this . parseRulelist ( tokenizer ) ;
315
315
316
- if ( tokenizer . currentToken . is ( Token . type . semicolon ) ) {
316
+ if ( tokenizer . currentToken . is ( TokenType . semicolon ) ) {
317
317
tokenizer . advance ( ) ;
318
318
}
319
319
0 commit comments