diff options
| author | Greg Twohig <greg@deepvantage.com> | 2021-08-09 08:16:29 +0200 |
|---|---|---|
| committer | Mariusz Felisiak <felisiak.mariusz@gmail.com> | 2021-08-09 08:21:37 +0200 |
| commit | c99aaf14eef2a18b27bc7c2f2b17ae425ca2be49 (patch) | |
| tree | 404b836e47c5cc54ab9c688a48ad77430227b54c /tests/template_tests/test_base.py | |
| parent | 259b28706e5084f5207792e066a33e35c7a661d4 (diff) | |
Refs #32990 -- Added tests for DebugLexer/Lexer.tokenize().
Diffstat (limited to 'tests/template_tests/test_base.py')
| -rw-r--r-- | tests/template_tests/test_base.py | 40 |
1 files changed, 40 insertions, 0 deletions
diff --git a/tests/template_tests/test_base.py b/tests/template_tests/test_base.py index 2b206a8968..72836337b8 100644 --- a/tests/template_tests/test_base.py +++ b/tests/template_tests/test_base.py @@ -1,8 +1,48 @@ from django.template import Context, Template, Variable, VariableDoesNotExist +from django.template.base import DebugLexer, Lexer, TokenType from django.test import SimpleTestCase from django.utils.translation import gettext_lazy +class LexerTestMixin: + template_string = ( + 'text\n' + '{% if test %}{{ varvalue }}{% endif %}' + '{#comment {{not a var}} %{not a block}% #}' + ) + expected_token_tuples = [ + # (token_type, contents, lineno, position) + (TokenType.TEXT, 'text\n', 1, (0, 5)), + (TokenType.BLOCK, 'if test', 2, (5, 18)), + (TokenType.VAR, 'varvalue', 2, (18, 32)), + (TokenType.BLOCK, 'endif', 2, (32, 43)), + (TokenType.COMMENT, 'comment {{not a var}} %{not a block}%', 2, (43, 85)), + ] + + def test_tokenize(self): + tokens = self.lexer_class(self.template_string).tokenize() + token_tuples = [(t.token_type, t.contents, t.lineno, t.position) for t in tokens] + self.assertEqual(token_tuples, self.make_expected()) + + def make_expected(self): + raise NotImplementedError('This method must be implemented by a subclass.') + + +class LexerTests(LexerTestMixin, SimpleTestCase): + lexer_class = Lexer + + def make_expected(self): + # The non-debug lexer does not record position. + return [t[:-1] + (None,) for t in self.expected_token_tuples] + + +class DebugLexerTests(LexerTestMixin, SimpleTestCase): + lexer_class = DebugLexer + + def make_expected(self): + return self.expected_token_tuples + + class TemplateTests(SimpleTestCase): def test_lazy_template_string(self): template_string = gettext_lazy('lazy string') |
