Skip to content

Commit 9b127a8

Browse files
committed
Update tests for TokenType as a const enum
1 parent 21a30f9 commit 9b127a8

File tree

2 files changed

+65
-65
lines changed

2 files changed

+65
-65
lines changed

src/test/helpers.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ function linkedTokens(tokens: Token[]) {
4242
}
4343

4444
return r;
45-
}, new Token(Token.type.none, 0, 0));
45+
}, new Token(TokenType.none, 0, 0));
4646

4747
return tokens;
4848
}

src/test/tokenizer-test.ts

Lines changed: 64 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
import {expect} from 'chai';
1313

14-
import {Token} from '../shady-css/token';
14+
import {Token, TokenType} from '../shady-css/token';
1515
import {Tokenizer} from '../shady-css/tokenizer';
1616

1717
import * as fixtures from './fixtures';
@@ -27,123 +27,123 @@ describe('Tokenizer', () => {
2727

2828
it('can identify comments', () => {
2929
expect(new Tokenizer('/*foo*/').flush()).to.be.eql(helpers.linkedTokens([
30-
new Token(Token.type.comment, 0, 7)
30+
new Token(TokenType.comment, 0, 7)
3131
]));
3232
});
3333

3434
it('can identify words', () => {
3535
expect(new Tokenizer('font-family').flush())
36-
.to.be.eql(helpers.linkedTokens([new Token(Token.type.word, 0, 11)]));
36+
.to.be.eql(helpers.linkedTokens([new Token(TokenType.word, 0, 11)]));
3737
});
3838

3939
it('can identify boundaries', () => {
4040
expect(new Tokenizer('@{};()').flush()).to.be.eql(helpers.linkedTokens([
41-
new Token(Token.type.at, 0, 1),
42-
new Token(Token.type.openBrace, 1, 2),
43-
new Token(Token.type.closeBrace, 2, 3),
44-
new Token(Token.type.semicolon, 3, 4),
45-
new Token(Token.type.openParenthesis, 4, 5),
46-
new Token(Token.type.closeParenthesis, 5, 6)
41+
new Token(TokenType.at, 0, 1),
42+
new Token(TokenType.openBrace, 1, 2),
43+
new Token(TokenType.closeBrace, 2, 3),
44+
new Token(TokenType.semicolon, 3, 4),
45+
new Token(TokenType.openParenthesis, 4, 5),
46+
new Token(TokenType.closeParenthesis, 5, 6)
4747
]));
4848
});
4949
});
5050

5151
describe('when tokenizing standard CSS structures', () => {
5252
it('can tokenize a basic ruleset', () => {
5353
helpers.expectTokenSequence(new Tokenizer(fixtures.basicRuleset), [
54-
Token.type.whitespace, '\n', Token.type.word, 'body',
55-
Token.type.whitespace, ' ', Token.type.openBrace, '{',
56-
Token.type.whitespace, '\n ', Token.type.word, 'margin',
57-
Token.type.colon, ':', Token.type.whitespace, ' ',
58-
Token.type.word, '0', Token.type.semicolon, ';',
59-
Token.type.whitespace, '\n ', Token.type.word, 'padding',
60-
Token.type.colon, ':', Token.type.whitespace, ' ',
61-
Token.type.word, '0px', Token.type.whitespace, '\n',
62-
Token.type.closeBrace, '}', Token.type.whitespace, '\n'
54+
TokenType.whitespace, '\n', TokenType.word, 'body',
55+
TokenType.whitespace, ' ', TokenType.openBrace, '{',
56+
TokenType.whitespace, '\n ', TokenType.word, 'margin',
57+
TokenType.colon, ':', TokenType.whitespace, ' ',
58+
TokenType.word, '0', TokenType.semicolon, ';',
59+
TokenType.whitespace, '\n ', TokenType.word, 'padding',
60+
TokenType.colon, ':', TokenType.whitespace, ' ',
61+
TokenType.word, '0px', TokenType.whitespace, '\n',
62+
TokenType.closeBrace, '}', TokenType.whitespace, '\n'
6363
]);
6464
});
6565

6666
it('can tokenize @rules', () => {
6767
helpers.expectTokenSequence(new Tokenizer(fixtures.atRules), [
68-
Token.type.whitespace,
68+
TokenType.whitespace,
6969
'\n',
70-
Token.type.at,
70+
TokenType.at,
7171
'@',
72-
Token.type.word,
72+
TokenType.word,
7373
'import',
74-
Token.type.whitespace,
74+
TokenType.whitespace,
7575
' ',
76-
Token.type.word,
76+
TokenType.word,
7777
'url',
78-
Token.type.openParenthesis,
78+
TokenType.openParenthesis,
7979
'(',
80-
Token.type.string,
80+
TokenType.string,
8181
'\'foo.css\'',
82-
Token.type.closeParenthesis,
82+
TokenType.closeParenthesis,
8383
')',
84-
Token.type.semicolon,
84+
TokenType.semicolon,
8585
';',
86-
Token.type.whitespace,
86+
TokenType.whitespace,
8787
'\n\n',
88-
Token.type.at,
88+
TokenType.at,
8989
'@',
90-
Token.type.word,
90+
TokenType.word,
9191
'font-face',
92-
Token.type.whitespace,
92+
TokenType.whitespace,
9393
' ',
94-
Token.type.openBrace,
94+
TokenType.openBrace,
9595
'{',
96-
Token.type.whitespace,
96+
TokenType.whitespace,
9797
'\n ',
98-
Token.type.word,
98+
TokenType.word,
9999
'font-family',
100-
Token.type.colon,
100+
TokenType.colon,
101101
':',
102-
Token.type.whitespace,
102+
TokenType.whitespace,
103103
' ',
104-
Token.type.word,
104+
TokenType.word,
105105
'foo',
106-
Token.type.semicolon,
106+
TokenType.semicolon,
107107
';',
108-
Token.type.whitespace,
108+
TokenType.whitespace,
109109
'\n',
110-
Token.type.closeBrace,
110+
TokenType.closeBrace,
111111
'}',
112-
Token.type.whitespace,
112+
TokenType.whitespace,
113113
'\n\n',
114-
Token.type.at,
114+
TokenType.at,
115115
'@',
116-
Token.type.word,
116+
TokenType.word,
117117
'charset',
118-
Token.type.whitespace,
118+
TokenType.whitespace,
119119
' ',
120-
Token.type.string,
120+
TokenType.string,
121121
'\'foo\'',
122-
Token.type.semicolon,
122+
TokenType.semicolon,
123123
';',
124-
Token.type.whitespace,
124+
TokenType.whitespace,
125125
'\n'
126126
]);
127127
});
128128

129129
it('navigates pathological boundary usage', () => {
130130
helpers.expectTokenSequence(new Tokenizer(fixtures.extraSemicolons), [
131-
Token.type.whitespace, '\n', Token.type.colon, ':',
132-
Token.type.word, 'host', Token.type.whitespace, ' ',
133-
Token.type.openBrace, '{', Token.type.whitespace, '\n ',
134-
Token.type.word, 'margin', Token.type.colon, ':',
135-
Token.type.whitespace, ' ', Token.type.word, '0',
136-
Token.type.semicolon, ';', Token.type.semicolon, ';',
137-
Token.type.semicolon, ';', Token.type.whitespace, '\n ',
138-
Token.type.word, 'padding', Token.type.colon, ':',
139-
Token.type.whitespace, ' ', Token.type.word, '0',
140-
Token.type.semicolon, ';', Token.type.semicolon, ';',
141-
Token.type.whitespace, '\n ', Token.type.semicolon, ';',
142-
Token.type.word, 'display', Token.type.colon, ':',
143-
Token.type.whitespace, ' ', Token.type.word, 'block',
144-
Token.type.semicolon, ';', Token.type.whitespace, '\n',
145-
Token.type.closeBrace, '}', Token.type.semicolon, ';',
146-
Token.type.whitespace, '\n'
131+
TokenType.whitespace, '\n', TokenType.colon, ':',
132+
TokenType.word, 'host', TokenType.whitespace, ' ',
133+
TokenType.openBrace, '{', TokenType.whitespace, '\n ',
134+
TokenType.word, 'margin', TokenType.colon, ':',
135+
TokenType.whitespace, ' ', TokenType.word, '0',
136+
TokenType.semicolon, ';', TokenType.semicolon, ';',
137+
TokenType.semicolon, ';', TokenType.whitespace, '\n ',
138+
TokenType.word, 'padding', TokenType.colon, ':',
139+
TokenType.whitespace, ' ', TokenType.word, '0',
140+
TokenType.semicolon, ';', TokenType.semicolon, ';',
141+
TokenType.whitespace, '\n ', TokenType.semicolon, ';',
142+
TokenType.word, 'display', TokenType.colon, ':',
143+
TokenType.whitespace, ' ', TokenType.word, 'block',
144+
TokenType.semicolon, ';', TokenType.whitespace, '\n',
145+
TokenType.closeBrace, '}', TokenType.semicolon, ';',
146+
TokenType.whitespace, '\n'
147147
]);
148148
});
149149
});
@@ -152,7 +152,7 @@ describe('Tokenizer', () => {
152152
it('can slice the string using tokens', () => {
153153
const tokenizer = new Tokenizer('foo bar');
154154
const substring = tokenizer.slice(
155-
new Token(Token.type.word, 2, 3), new Token(Token.type.word, 5, 6));
155+
new Token(TokenType.word, 2, 3), new Token(TokenType.word, 5, 6));
156156
expect(substring).to.be.eql('o ba');
157157
});
158158
});

0 commit comments

Comments
 (0)