From 35681ece13f7f372dadfc814ee7c0413ea92d337 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Mazzucotelli?= Date: Thu, 10 Oct 2024 18:19:12 +0200 Subject: [PATCH] fixup! Add option to preserve comments when parsing templates --- CHANGES.rst | 1 + src/jinja2/environment.py | 3 +++ src/jinja2/lexer.py | 9 ++++++++- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index f23b6c96f..a10fa652d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,7 @@ Unreleased - Use modern packaging metadata with ``pyproject.toml`` instead of ``setup.cfg``. :pr:`1793` - Use ``flit_core`` instead of ``setuptools`` as build backend. +- Add the ``preserve_comments`` parameter to ``Environment.parse`` to preserve comments in template ASTs. :pr:`2037` Version 3.1.5 diff --git a/src/jinja2/environment.py b/src/jinja2/environment.py index 672874269..7705913f2 100644 --- a/src/jinja2/environment.py +++ b/src/jinja2/environment.py @@ -609,6 +609,9 @@ def parse( If you are :ref:`developing Jinja extensions ` this gives you a good overview of the node tree generated. + + .. versionchanged:: 3.2 + Added `preserve_comments` parameter. """ try: return self._parse(source, name, filename, preserve_comments) diff --git a/src/jinja2/lexer.py b/src/jinja2/lexer.py index 1b30922a7..6b2980061 100644 --- a/src/jinja2/lexer.py +++ b/src/jinja2/lexer.py @@ -614,7 +614,11 @@ def tokenize( state: t.Optional[str] = None, preserve_comments: bool = False, ) -> TokenStream: - """Calls tokeniter + tokenize and wraps it in a token stream.""" + """Calls tokeniter + tokenize and wraps it in a token stream. + + .. versionchanged:: 3.2 + Added `preserve_comments` parameter. + """ stream = self.tokeniter(source, name, filename, state) return TokenStream( self.wrap(stream, name, filename, preserve_comments), name, filename @@ -629,6 +633,9 @@ def wrap( ) -> t.Iterator[Token]: """This is called with the stream as returned by `tokenize` and wraps every token in a :class:`Token` and converts the value. + + .. versionchanged:: 3.2 + Added `preserve_comments` parameter. """ ignored = ignored_tokens if preserve_comments: