|
| 1 | +import contextlib |
1 | 2 | import os
|
2 | 3 | import re
|
| 4 | +import tempfile |
3 | 5 | import token
|
4 | 6 | import tokenize
|
5 | 7 | import unittest
|
@@ -3178,5 +3180,83 @@ def test_newline_at_the_end_of_buffer(self):
|
3178 | 3180 | run_test_script(file_name)
|
3179 | 3181 |
|
3180 | 3182 |
|
| 3183 | +class CommandLineTest(unittest.TestCase): |
| 3184 | + def setUp(self): |
| 3185 | + self.filename = tempfile.mktemp() |
| 3186 | + self.addCleanup(os_helper.unlink, self.filename) |
| 3187 | + |
| 3188 | + @staticmethod |
| 3189 | + def text_normalize(string): |
| 3190 | + """Dedent *string* and strip it from its surrounding whitespaces. |
| 3191 | +
|
| 3192 | + This method is used by the other utility functions so that any |
| 3193 | + string to write or to match against can be freely indented. |
| 3194 | + """ |
| 3195 | + return re.sub(r'\s+', ' ', string).strip() |
| 3196 | + |
| 3197 | + def set_source(self, content): |
| 3198 | + with open(self.filename, 'w') as fp: |
| 3199 | + fp.write(content) |
| 3200 | + |
| 3201 | + def invoke_tokenize(self, *flags): |
| 3202 | + output = StringIO() |
| 3203 | + with contextlib.redirect_stdout(output): |
| 3204 | + tokenize._main(args=[*flags, self.filename]) |
| 3205 | + return self.text_normalize(output.getvalue()) |
| 3206 | + |
| 3207 | + def check_output(self, source, expect, *flags): |
| 3208 | + with self.subTest(source=source, flags=flags): |
| 3209 | + self.set_source(source) |
| 3210 | + res = self.invoke_tokenize(*flags) |
| 3211 | + expect = self.text_normalize(expect) |
| 3212 | + self.assertListEqual(res.splitlines(), expect.splitlines()) |
| 3213 | + |
| 3214 | + def test_invocation(self): |
| 3215 | + # test various combinations of parameters |
| 3216 | + base_flags = ('-e', '--exact') |
| 3217 | + |
| 3218 | + self.set_source(''' |
| 3219 | + def f(): |
| 3220 | + print(x) |
| 3221 | + return None |
| 3222 | + ''') |
| 3223 | + |
| 3224 | + for flag in base_flags: |
| 3225 | + with self.subTest(args=flag): |
| 3226 | + _ = self.invoke_tokenize(flag) |
| 3227 | + |
| 3228 | + with self.assertRaises(SystemExit): |
| 3229 | + # suppress argparse error message |
| 3230 | + with contextlib.redirect_stderr(StringIO()): |
| 3231 | + _ = self.invoke_tokenize('--unknown') |
| 3232 | + |
| 3233 | + def test_without_flag(self): |
| 3234 | + # test 'python -m tokenize source.py' |
| 3235 | + source = 'a = 1' |
| 3236 | + expect = ''' |
| 3237 | + 0,0-0,0: ENCODING 'utf-8' |
| 3238 | + 1,0-1,1: NAME 'a' |
| 3239 | + 1,2-1,3: OP '=' |
| 3240 | + 1,4-1,5: NUMBER '1' |
| 3241 | + 1,5-1,6: NEWLINE '' |
| 3242 | + 2,0-2,0: ENDMARKER '' |
| 3243 | + ''' |
| 3244 | + self.check_output(source, expect) |
| 3245 | + |
| 3246 | + def test_exact_flag(self): |
| 3247 | + # test 'python -m tokenize -e/--exact source.py' |
| 3248 | + source = 'a = 1' |
| 3249 | + expect = ''' |
| 3250 | + 0,0-0,0: ENCODING 'utf-8' |
| 3251 | + 1,0-1,1: NAME 'a' |
| 3252 | + 1,2-1,3: EQUAL '=' |
| 3253 | + 1,4-1,5: NUMBER '1' |
| 3254 | + 1,5-1,6: NEWLINE '' |
| 3255 | + 2,0-2,0: ENDMARKER '' |
| 3256 | + ''' |
| 3257 | + for flag in ['-e', '--exact']: |
| 3258 | + self.check_output(source, expect, flag) |
| 3259 | + |
| 3260 | + |
3181 | 3261 | if __name__ == "__main__":
|
3182 | 3262 | unittest.main()
|
0 commit comments