@@ -18,7 +18,11 @@ pub enum Element {
18
18
}
19
19
20
20
lazy_static ! {
21
- static ref MARKUP : Regex = Regex :: new( "\\ {%.*?%\\ }|\\ {\\ {.*?\\ }\\ }" ) . unwrap( ) ;
21
+ static ref MARKUP : Regex = {
22
+ let t = "(?:[[:space:]]*\\ {\\ {-|\\ {\\ {).*?(?:-\\ }\\ }[[:space:]]*|\\ }\\ })" ;
23
+ let e = "(?:[[:space:]]*\\ {%-|\\ {%).*?(?:-%\\ }[[:space:]]*|%\\ })" ;
24
+ Regex :: new( & format!( "{}|{}" , t, e) ) . unwrap( )
25
+ } ;
22
26
}
23
27
24
28
fn split_blocks ( text : & str ) -> Vec < & str > {
@@ -42,8 +46,14 @@ fn split_blocks(text: &str) -> Vec<&str> {
42
46
}
43
47
44
48
lazy_static ! {
45
- static ref EXPRESSION : Regex = Regex :: new( "\\ {\\ {(.*?)\\ }\\ }" ) . unwrap( ) ;
46
- static ref TAG : Regex = Regex :: new( "\\ {%(.*?)%\\ }" ) . unwrap( ) ;
49
+ static ref EXPRESSION : Regex = {
50
+ let t = "(?:[[:space:]]*\\ {\\ {-|\\ {\\ {)(.*?)(?:-\\ }\\ }[[:space:]]*|\\ }\\ })" ;
51
+ Regex :: new( t) . unwrap( )
52
+ } ;
53
+ static ref TAG : Regex = {
54
+ let e = "(?:[[:space:]]*\\ {%-|\\ {%)(.*?)(?:-%\\ }[[:space:]]*|%\\ })" ;
55
+ Regex :: new( e) . unwrap( )
56
+ } ;
47
57
}
48
58
49
59
pub fn tokenize ( text : & str ) -> Result < Vec < Element > > {
@@ -150,6 +160,21 @@ fn test_split_blocks() {
150
160
assert_eq ! ( split_blocks( "asdlkjfn\n {%askdljfbalkjsdbf%} asdjlfb" ) ,
151
161
vec![ "asdlkjfn\n " , "{%askdljfbalkjsdbf%}" , " asdjlfb" ] ) ;
152
162
}
163
+ #[ test]
164
+ fn test_whitespace_control ( ) {
165
+ assert_eq ! ( split_blocks( "foo {{ bar }} 2000" ) ,
166
+ vec![ "foo " , "{{ bar }}" , " 2000" ] ) ;
167
+ assert_eq ! ( split_blocks( "foo {{- bar -}} 2000" ) ,
168
+ vec![ "foo" , " {{- bar -}} " , "2000" ] ) ;
169
+ assert_eq ! ( split_blocks( "foo \n {{- bar }} 2000" ) ,
170
+ vec![ "foo" , " \n {{- bar }}" , " 2000" ] ) ;
171
+ assert_eq ! ( split_blocks( "foo {% bar %} 2000" ) ,
172
+ vec![ "foo " , "{% bar %}" , " 2000" ] ) ;
173
+ assert_eq ! ( split_blocks( "foo {%- bar -%} 2000" ) ,
174
+ vec![ "foo" , " {%- bar -%} " , "2000" ] ) ;
175
+ assert_eq ! ( split_blocks( "foo \n {%- bar %} 2000" ) ,
176
+ vec![ "foo" , " \n {%- bar %}" , " 2000" ] ) ;
177
+ }
153
178
154
179
#[ test]
155
180
fn test_split_atom ( ) {
@@ -182,6 +207,12 @@ fn test_tokenize() {
182
207
StringLiteral ( "world" . to_owned( ) ) ] ,
183
208
"{{hello 'world'}}" . to_owned( ) ) ,
184
209
Raw ( " test" . to_owned( ) ) ] ) ;
210
+ assert_eq ! ( tokenize( "wat \n {{-hello 'world'-}} test" ) . unwrap( ) ,
211
+ vec![ Raw ( "wat" . to_owned( ) ) ,
212
+ Expression ( vec![ Identifier ( "hello" . to_owned( ) ) ,
213
+ StringLiteral ( "world" . to_owned( ) ) ] ,
214
+ " \n {{-hello 'world'-}} " . to_owned( ) ) ,
215
+ Raw ( "test" . to_owned( ) ) ] ) ;
185
216
}
186
217
187
218
#[ test]
0 commit comments