@@ -88,7 +88,8 @@ class Parser {
88
88
} else if ( token . is ( TokenType . comment ) ) {
89
89
return this . parseComment ( tokenizer ) ;
90
90
91
- } else if ( token . is ( TokenType . word ) ) {
91
+ } else if ( token . is ( TokenType . word ) ||
92
+ token . is ( TokenType . openBrace ) ) {
92
93
return this . parseDeclarationOrRuleset ( tokenizer ) ;
93
94
94
95
} else if ( token . is ( TokenType . propertyBoundary ) ) {
@@ -241,13 +242,13 @@ class Parser {
241
242
* @param tokenizer A Tokenizer node.
242
243
*/
243
244
parseDeclarationOrRuleset ( tokenizer : Tokenizer ) : Declaration | Ruleset | null {
244
- let ruleStart = null ;
245
- let ruleEnd = null ;
246
- let colon = null ;
245
+ if ( ! tokenizer . currentToken ) {
246
+ return null ;
247
+ }
247
248
248
- // This code is not obviously correct. e.g. there's what looks to be a
249
- // null-dereference if the declaration starts with an open brace or
250
- // property boundary.. though that may be impossible.
249
+ let ruleStart = tokenizer . currentToken ;
250
+ let ruleEnd = ruleStart . previous ;
251
+ let colon = null ;
251
252
252
253
while ( tokenizer . currentToken ) {
253
254
if ( tokenizer . currentToken . is ( TokenType . whitespace ) ) {
@@ -266,25 +267,15 @@ class Parser {
266
267
if ( tokenizer . currentToken . is ( TokenType . colon ) ) {
267
268
colon = tokenizer . currentToken ;
268
269
}
269
-
270
- if ( ruleStart === null ) {
271
- ruleStart = tokenizer . advance ( ) ;
272
- ruleEnd = ruleStart ;
273
- } else {
274
- ruleEnd = tokenizer . advance ( ) ;
275
- }
270
+ ruleEnd = tokenizer . advance ( ) ;
276
271
}
277
272
}
278
273
279
- if ( tokenizer . currentToken === null ) {
280
- // terminated early
281
- return null ;
282
- }
283
-
284
274
// A ruleset never contains or ends with a semi-colon.
285
- if ( tokenizer . currentToken . is ( TokenType . propertyBoundary ) ) {
275
+ if ( ! tokenizer . currentToken ||
276
+ tokenizer . currentToken . is ( TokenType . propertyBoundary ) ) {
286
277
const nameRange =
287
- tokenizer . getRange ( ruleStart ! , colon ? colon . previous : ruleEnd ) ;
278
+ tokenizer . getRange ( ruleStart , colon ? colon . previous : ruleEnd ) ;
288
279
const declarationName =
289
280
tokenizer . cssText . slice ( nameRange . start , nameRange . end ) ;
290
281
@@ -298,12 +289,13 @@ class Parser {
298
289
this . nodeFactory . expression ( expressionValue , expressionRange ) ;
299
290
}
300
291
301
- if ( tokenizer . currentToken . is ( TokenType . semicolon ) ) {
292
+ if ( tokenizer . currentToken &&
293
+ tokenizer . currentToken . is ( TokenType . semicolon ) ) {
302
294
tokenizer . advance ( ) ;
303
295
}
304
296
305
297
const range = tokenizer . trimRange ( tokenizer . getRange (
306
- ruleStart ! ,
298
+ ruleStart ,
307
299
tokenizer . currentToken && tokenizer . currentToken . previous ||
308
300
ruleEnd ) ) ;
309
301
@@ -313,33 +305,34 @@ class Parser {
313
305
} else if ( colon && colon === ruleEnd ) {
314
306
const rulelist = this . parseRulelist ( tokenizer ) ;
315
307
316
- if ( tokenizer . currentToken . is ( TokenType . semicolon ) ) {
308
+ if ( tokenizer . currentToken &&
309
+ tokenizer . currentToken . is ( TokenType . semicolon ) ) {
317
310
tokenizer . advance ( ) ;
318
311
}
319
312
320
- const nameRange = tokenizer . getRange ( ruleStart ! , ruleEnd . previous ) ;
313
+ const nameRange = tokenizer . getRange ( ruleStart , ruleEnd . previous ) ;
321
314
const declarationName =
322
315
tokenizer . cssText . slice ( nameRange . start , nameRange . end ) ;
323
316
324
317
const range = tokenizer . trimRange ( tokenizer . getRange (
325
- ruleStart ! ,
318
+ ruleStart ,
326
319
tokenizer . currentToken && tokenizer . currentToken . previous ||
327
320
ruleEnd ) ) ;
328
321
329
322
return this . nodeFactory . declaration (
330
323
declarationName , rulelist , nameRange , range ) ;
331
324
// Otherwise, this is a ruleset:
332
325
} else {
333
- const selectorRange = tokenizer . getRange ( ruleStart ! , ruleEnd ) ;
326
+ const selectorRange = tokenizer . getRange ( ruleStart , ruleEnd ) ;
334
327
const selector =
335
328
tokenizer . cssText . slice ( selectorRange . start , selectorRange . end ) ;
336
329
const rulelist = this . parseRulelist ( tokenizer ) ;
337
- const start = ruleStart ! . start ;
330
+ const start = ruleStart . start ;
338
331
let end ;
339
332
if ( tokenizer . currentToken ) {
340
333
end = tokenizer . currentToken . previous ?
341
334
tokenizer . currentToken . previous . end :
342
- ruleStart ! . end ;
335
+ ruleStart . end ;
343
336
} else {
344
337
// no current token? must have reached the end of input, so go up
345
338
// until there
0 commit comments