11
11
12
12
import { expect } from 'chai' ;
13
13
14
- import { Token } from '../shady-css/token' ;
14
+ import { Token , TokenType } from '../shady-css/token' ;
15
15
import { Tokenizer } from '../shady-css/tokenizer' ;
16
16
17
17
import * as fixtures from './fixtures' ;
@@ -27,123 +27,123 @@ describe('Tokenizer', () => {
27
27
28
28
it ( 'can identify comments' , ( ) => {
29
29
expect ( new Tokenizer ( '/*foo*/' ) . flush ( ) ) . to . be . eql ( helpers . linkedTokens ( [
30
- new Token ( Token . type . comment , 0 , 7 )
30
+ new Token ( TokenType . comment , 0 , 7 )
31
31
] ) ) ;
32
32
} ) ;
33
33
34
34
it ( 'can identify words' , ( ) => {
35
35
expect ( new Tokenizer ( 'font-family' ) . flush ( ) )
36
- . to . be . eql ( helpers . linkedTokens ( [ new Token ( Token . type . word , 0 , 11 ) ] ) ) ;
36
+ . to . be . eql ( helpers . linkedTokens ( [ new Token ( TokenType . word , 0 , 11 ) ] ) ) ;
37
37
} ) ;
38
38
39
39
it ( 'can identify boundaries' , ( ) => {
40
40
expect ( new Tokenizer ( '@{};()' ) . flush ( ) ) . to . be . eql ( helpers . linkedTokens ( [
41
- new Token ( Token . type . at , 0 , 1 ) ,
42
- new Token ( Token . type . openBrace , 1 , 2 ) ,
43
- new Token ( Token . type . closeBrace , 2 , 3 ) ,
44
- new Token ( Token . type . semicolon , 3 , 4 ) ,
45
- new Token ( Token . type . openParenthesis , 4 , 5 ) ,
46
- new Token ( Token . type . closeParenthesis , 5 , 6 )
41
+ new Token ( TokenType . at , 0 , 1 ) ,
42
+ new Token ( TokenType . openBrace , 1 , 2 ) ,
43
+ new Token ( TokenType . closeBrace , 2 , 3 ) ,
44
+ new Token ( TokenType . semicolon , 3 , 4 ) ,
45
+ new Token ( TokenType . openParenthesis , 4 , 5 ) ,
46
+ new Token ( TokenType . closeParenthesis , 5 , 6 )
47
47
] ) ) ;
48
48
} ) ;
49
49
} ) ;
50
50
51
51
describe ( 'when tokenizing standard CSS structures' , ( ) => {
52
52
it ( 'can tokenize a basic ruleset' , ( ) => {
53
53
helpers . expectTokenSequence ( new Tokenizer ( fixtures . basicRuleset ) , [
54
- Token . type . whitespace , '\n' , Token . type . word , 'body' ,
55
- Token . type . whitespace , ' ' , Token . type . openBrace , '{' ,
56
- Token . type . whitespace , '\n ' , Token . type . word , 'margin' ,
57
- Token . type . colon , ':' , Token . type . whitespace , ' ' ,
58
- Token . type . word , '0' , Token . type . semicolon , ';' ,
59
- Token . type . whitespace , '\n ' , Token . type . word , 'padding' ,
60
- Token . type . colon , ':' , Token . type . whitespace , ' ' ,
61
- Token . type . word , '0px' , Token . type . whitespace , '\n' ,
62
- Token . type . closeBrace , '}' , Token . type . whitespace , '\n'
54
+ TokenType . whitespace , '\n' , TokenType . word , 'body' ,
55
+ TokenType . whitespace , ' ' , TokenType . openBrace , '{' ,
56
+ TokenType . whitespace , '\n ' , TokenType . word , 'margin' ,
57
+ TokenType . colon , ':' , TokenType . whitespace , ' ' ,
58
+ TokenType . word , '0' , TokenType . semicolon , ';' ,
59
+ TokenType . whitespace , '\n ' , TokenType . word , 'padding' ,
60
+ TokenType . colon , ':' , TokenType . whitespace , ' ' ,
61
+ TokenType . word , '0px' , TokenType . whitespace , '\n' ,
62
+ TokenType . closeBrace , '}' , TokenType . whitespace , '\n'
63
63
] ) ;
64
64
} ) ;
65
65
66
66
it ( 'can tokenize @rules' , ( ) => {
67
67
helpers . expectTokenSequence ( new Tokenizer ( fixtures . atRules ) , [
68
- Token . type . whitespace ,
68
+ TokenType . whitespace ,
69
69
'\n' ,
70
- Token . type . at ,
70
+ TokenType . at ,
71
71
'@' ,
72
- Token . type . word ,
72
+ TokenType . word ,
73
73
'import' ,
74
- Token . type . whitespace ,
74
+ TokenType . whitespace ,
75
75
' ' ,
76
- Token . type . word ,
76
+ TokenType . word ,
77
77
'url' ,
78
- Token . type . openParenthesis ,
78
+ TokenType . openParenthesis ,
79
79
'(' ,
80
- Token . type . string ,
80
+ TokenType . string ,
81
81
'\'foo.css\'' ,
82
- Token . type . closeParenthesis ,
82
+ TokenType . closeParenthesis ,
83
83
')' ,
84
- Token . type . semicolon ,
84
+ TokenType . semicolon ,
85
85
';' ,
86
- Token . type . whitespace ,
86
+ TokenType . whitespace ,
87
87
'\n\n' ,
88
- Token . type . at ,
88
+ TokenType . at ,
89
89
'@' ,
90
- Token . type . word ,
90
+ TokenType . word ,
91
91
'font-face' ,
92
- Token . type . whitespace ,
92
+ TokenType . whitespace ,
93
93
' ' ,
94
- Token . type . openBrace ,
94
+ TokenType . openBrace ,
95
95
'{' ,
96
- Token . type . whitespace ,
96
+ TokenType . whitespace ,
97
97
'\n ' ,
98
- Token . type . word ,
98
+ TokenType . word ,
99
99
'font-family' ,
100
- Token . type . colon ,
100
+ TokenType . colon ,
101
101
':' ,
102
- Token . type . whitespace ,
102
+ TokenType . whitespace ,
103
103
' ' ,
104
- Token . type . word ,
104
+ TokenType . word ,
105
105
'foo' ,
106
- Token . type . semicolon ,
106
+ TokenType . semicolon ,
107
107
';' ,
108
- Token . type . whitespace ,
108
+ TokenType . whitespace ,
109
109
'\n' ,
110
- Token . type . closeBrace ,
110
+ TokenType . closeBrace ,
111
111
'}' ,
112
- Token . type . whitespace ,
112
+ TokenType . whitespace ,
113
113
'\n\n' ,
114
- Token . type . at ,
114
+ TokenType . at ,
115
115
'@' ,
116
- Token . type . word ,
116
+ TokenType . word ,
117
117
'charset' ,
118
- Token . type . whitespace ,
118
+ TokenType . whitespace ,
119
119
' ' ,
120
- Token . type . string ,
120
+ TokenType . string ,
121
121
'\'foo\'' ,
122
- Token . type . semicolon ,
122
+ TokenType . semicolon ,
123
123
';' ,
124
- Token . type . whitespace ,
124
+ TokenType . whitespace ,
125
125
'\n'
126
126
] ) ;
127
127
} ) ;
128
128
129
129
it ( 'navigates pathological boundary usage' , ( ) => {
130
130
helpers . expectTokenSequence ( new Tokenizer ( fixtures . extraSemicolons ) , [
131
- Token . type . whitespace , '\n' , Token . type . colon , ':' ,
132
- Token . type . word , 'host' , Token . type . whitespace , ' ' ,
133
- Token . type . openBrace , '{' , Token . type . whitespace , '\n ' ,
134
- Token . type . word , 'margin' , Token . type . colon , ':' ,
135
- Token . type . whitespace , ' ' , Token . type . word , '0' ,
136
- Token . type . semicolon , ';' , Token . type . semicolon , ';' ,
137
- Token . type . semicolon , ';' , Token . type . whitespace , '\n ' ,
138
- Token . type . word , 'padding' , Token . type . colon , ':' ,
139
- Token . type . whitespace , ' ' , Token . type . word , '0' ,
140
- Token . type . semicolon , ';' , Token . type . semicolon , ';' ,
141
- Token . type . whitespace , '\n ' , Token . type . semicolon , ';' ,
142
- Token . type . word , 'display' , Token . type . colon , ':' ,
143
- Token . type . whitespace , ' ' , Token . type . word , 'block' ,
144
- Token . type . semicolon , ';' , Token . type . whitespace , '\n' ,
145
- Token . type . closeBrace , '}' , Token . type . semicolon , ';' ,
146
- Token . type . whitespace , '\n'
131
+ TokenType . whitespace , '\n' , TokenType . colon , ':' ,
132
+ TokenType . word , 'host' , TokenType . whitespace , ' ' ,
133
+ TokenType . openBrace , '{' , TokenType . whitespace , '\n ' ,
134
+ TokenType . word , 'margin' , TokenType . colon , ':' ,
135
+ TokenType . whitespace , ' ' , TokenType . word , '0' ,
136
+ TokenType . semicolon , ';' , TokenType . semicolon , ';' ,
137
+ TokenType . semicolon , ';' , TokenType . whitespace , '\n ' ,
138
+ TokenType . word , 'padding' , TokenType . colon , ':' ,
139
+ TokenType . whitespace , ' ' , TokenType . word , '0' ,
140
+ TokenType . semicolon , ';' , TokenType . semicolon , ';' ,
141
+ TokenType . whitespace , '\n ' , TokenType . semicolon , ';' ,
142
+ TokenType . word , 'display' , TokenType . colon , ':' ,
143
+ TokenType . whitespace , ' ' , TokenType . word , 'block' ,
144
+ TokenType . semicolon , ';' , TokenType . whitespace , '\n' ,
145
+ TokenType . closeBrace , '}' , TokenType . semicolon , ';' ,
146
+ TokenType . whitespace , '\n'
147
147
] ) ;
148
148
} ) ;
149
149
} ) ;
@@ -152,7 +152,7 @@ describe('Tokenizer', () => {
152
152
it ( 'can slice the string using tokens' , ( ) => {
153
153
const tokenizer = new Tokenizer ( 'foo bar' ) ;
154
154
const substring = tokenizer . slice (
155
- new Token ( Token . type . word , 2 , 3 ) , new Token ( Token . type . word , 5 , 6 ) ) ;
155
+ new Token ( TokenType . word , 2 , 3 ) , new Token ( TokenType . word , 5 , 6 ) ) ;
156
156
expect ( substring ) . to . be . eql ( 'o ba' ) ;
157
157
} ) ;
158
158
} ) ;
0 commit comments