Skip to content

back to Reference (Gold) summary

Reference (Gold): jinja

Pytest Summary for test tests

status count
passed 851
total 851
collected 851

Failed pytests:

Patch diff

diff --git a/src/jinja2/_identifier.py b/src/jinja2/_identifier.py
index 503c0e8..928c150 100644
--- a/src/jinja2/_identifier.py
+++ b/src/jinja2/_identifier.py
@@ -1,4 +1,6 @@
 import re
+
+# generated by scripts/generate_identifier_pattern.py
 pattern = re.compile(
-    '[\\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+'
-    )
+    r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+"  # noqa: B950
+)
diff --git a/src/jinja2/async_utils.py b/src/jinja2/async_utils.py
index f60e3f3..e65219e 100644
--- a/src/jinja2/async_utils.py
+++ b/src/jinja2/async_utils.py
@@ -2,7 +2,83 @@ import inspect
 import typing as t
 from functools import WRAPPER_ASSIGNMENTS
 from functools import wraps
+
 from .utils import _PassArg
 from .utils import pass_eval_context
-V = t.TypeVar('V')
+
+V = t.TypeVar("V")
+
+
+def async_variant(normal_func):  # type: ignore
+    def decorator(async_func):  # type: ignore
+        pass_arg = _PassArg.from_obj(normal_func)
+        need_eval_context = pass_arg is None
+
+        if pass_arg is _PassArg.environment:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].is_async)
+
+        else:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].environment.is_async)
+
+        # Take the doc and annotations from the sync function, but the
+        # name from the async function. Pallets-Sphinx-Themes
+        # build_function_directive expects __wrapped__ to point to the
+        # sync function.
+        async_func_attrs = ("__module__", "__name__", "__qualname__")
+        normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs))
+
+        @wraps(normal_func, assigned=normal_func_attrs)
+        @wraps(async_func, assigned=async_func_attrs, updated=())
+        def wrapper(*args, **kwargs):  # type: ignore
+            b = is_async(args)
+
+            if need_eval_context:
+                args = args[1:]
+
+            if b:
+                return async_func(*args, **kwargs)
+
+            return normal_func(*args, **kwargs)
+
+        if need_eval_context:
+            wrapper = pass_eval_context(wrapper)
+
+        wrapper.jinja_async_variant = True  # type: ignore[attr-defined]
+        return wrapper
+
+    return decorator
+
+
 _common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
+
+
+async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
+    # Avoid a costly call to isawaitable
+    if type(value) in _common_primitives:
+        return t.cast("V", value)
+
+    if inspect.isawaitable(value):
+        return await t.cast("t.Awaitable[V]", value)
+
+    return t.cast("V", value)
+
+
+async def auto_aiter(
+    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> "t.AsyncIterator[V]":
+    if hasattr(iterable, "__aiter__"):
+        async for item in t.cast("t.AsyncIterable[V]", iterable):
+            yield item
+    else:
+        for item in iterable:
+            yield item
+
+
+async def auto_to_list(
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> t.List["V"]:
+    return [x async for x in auto_aiter(value)]
diff --git a/src/jinja2/bccache.py b/src/jinja2/bccache.py
index ae575a3..ada8b09 100644
--- a/src/jinja2/bccache.py
+++ b/src/jinja2/bccache.py
@@ -5,6 +5,7 @@ slows down your application too much.
 Situations where this is useful are often forking web applications that
 are initialized on the first request.
 """
+
 import errno
 import fnmatch
 import marshal
@@ -17,16 +18,29 @@ import typing as t
 from hashlib import sha1
 from io import BytesIO
 from types import CodeType
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
-    from .environment import Environment

+    from .environment import Environment

     class _MemcachedClient(te.Protocol):
-        pass
+        def get(self, key: str) -> bytes: ...
+
+        def set(
+            self, key: str, value: bytes, timeout: t.Optional[int] = None
+        ) -> None: ...
+
+
 bc_version = 5
-bc_magic = b'j2' + pickle.dumps(bc_version, 2) + pickle.dumps(sys.
-    version_info[0] << 24 | sys.version_info[1], 2)
+# Magic bytes to identify Jinja bytecode cache files. Contains the
+# Python major and minor version to avoid loading incompatible bytecode
+# if a project upgrades its Python version.
+bc_magic = (
+    b"j2"
+    + pickle.dumps(bc_version, 2)
+    + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
+)


 class Bucket:
@@ -38,32 +52,52 @@ class Bucket:
     cache subclasses don't have to care about cache invalidation.
     """

-    def __init__(self, environment: 'Environment', key: str, checksum: str
-        ) ->None:
+    def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
         self.environment = environment
         self.key = key
         self.checksum = checksum
         self.reset()

-    def reset(self) ->None:
+    def reset(self) -> None:
         """Resets the bucket (unloads the bytecode)."""
-        pass
+        self.code: t.Optional[CodeType] = None

-    def load_bytecode(self, f: t.BinaryIO) ->None:
+    def load_bytecode(self, f: t.BinaryIO) -> None:
         """Loads bytecode from a file or file like object."""
-        pass
-
-    def write_bytecode(self, f: t.IO[bytes]) ->None:
+        # make sure the magic header is correct
+        magic = f.read(len(bc_magic))
+        if magic != bc_magic:
+            self.reset()
+            return
+        # the source code of the file changed, we need to reload
+        checksum = pickle.load(f)
+        if self.checksum != checksum:
+            self.reset()
+            return
+        # if marshal_load fails then we need to reload
+        try:
+            self.code = marshal.load(f)
+        except (EOFError, ValueError, TypeError):
+            self.reset()
+            return
+
+    def write_bytecode(self, f: t.IO[bytes]) -> None:
         """Dump the bytecode into the file or file like object passed."""
-        pass
+        if self.code is None:
+            raise TypeError("can't write empty bucket")
+        f.write(bc_magic)
+        pickle.dump(self.checksum, f, 2)
+        marshal.dump(self.code, f)

-    def bytecode_from_string(self, string: bytes) ->None:
+    def bytecode_from_string(self, string: bytes) -> None:
         """Load bytecode from bytes."""
-        pass
+        self.load_bytecode(BytesIO(string))

-    def bytecode_to_string(self) ->bytes:
+    def bytecode_to_string(self) -> bytes:
         """Return the bytecode as bytes."""
-        pass
+        out = BytesIO()
+        self.write_bytecode(out)
+        return out.getvalue()


 class BytecodeCache:
@@ -95,46 +129,60 @@ class BytecodeCache:
     Jinja.
     """

-    def load_bytecode(self, bucket: Bucket) ->None:
+    def load_bytecode(self, bucket: Bucket) -> None:
         """Subclasses have to override this method to load bytecode into a
         bucket.  If they are not able to find code in the cache for the
         bucket, it must not do anything.
         """
-        pass
+        raise NotImplementedError()

-    def dump_bytecode(self, bucket: Bucket) ->None:
+    def dump_bytecode(self, bucket: Bucket) -> None:
         """Subclasses have to override this method to write the bytecode
         from a bucket back to the cache.  If it unable to do so it must not
         fail silently but raise an exception.
         """
-        pass
+        raise NotImplementedError()

-    def clear(self) ->None:
+    def clear(self) -> None:
         """Clears the cache.  This method is not used by Jinja but should be
         implemented to allow applications to clear the bytecode cache used
         by a particular environment.
         """
-        pass

-    def get_cache_key(self, name: str, filename: t.Optional[t.Union[str]]=None
-        ) ->str:
+    def get_cache_key(
+        self, name: str, filename: t.Optional[t.Union[str]] = None
+    ) -> str:
         """Returns the unique hash key for this template name."""
-        pass
+        hash = sha1(name.encode("utf-8"))

-    def get_source_checksum(self, source: str) ->str:
-        """Returns a checksum for the source."""
-        pass
+        if filename is not None:
+            hash.update(f"|{filename}".encode())
+
+        return hash.hexdigest()

-    def get_bucket(self, environment: 'Environment', name: str, filename: t
-        .Optional[str], source: str) ->Bucket:
+    def get_source_checksum(self, source: str) -> str:
+        """Returns a checksum for the source."""
+        return sha1(source.encode("utf-8")).hexdigest()
+
+    def get_bucket(
+        self,
+        environment: "Environment",
+        name: str,
+        filename: t.Optional[str],
+        source: str,
+    ) -> Bucket:
         """Return a cache bucket for the given template.  All arguments are
         mandatory but filename may be `None`.
         """
-        pass
+        key = self.get_cache_key(name, filename)
+        checksum = self.get_source_checksum(source)
+        bucket = Bucket(environment, key, checksum)
+        self.load_bytecode(bucket)
+        return bucket

-    def set_bucket(self, bucket: Bucket) ->None:
+    def set_bucket(self, bucket: Bucket) -> None:
         """Put the bucket into the cache."""
-        pass
+        self.dump_bytecode(bucket)


 class FileSystemBytecodeCache(BytecodeCache):
@@ -155,13 +203,130 @@ class FileSystemBytecodeCache(BytecodeCache):
     This bytecode cache supports clearing of the cache using the clear method.
     """

-    def __init__(self, directory: t.Optional[str]=None, pattern: str=
-        '__jinja2_%s.cache') ->None:
+    def __init__(
+        self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
+    ) -> None:
         if directory is None:
             directory = self._get_default_cache_dir()
         self.directory = directory
         self.pattern = pattern

+    def _get_default_cache_dir(self) -> str:
+        def _unsafe_dir() -> "te.NoReturn":
+            raise RuntimeError(
+                "Cannot determine safe temp directory.  You "
+                "need to explicitly provide one."
+            )
+
+        tmpdir = tempfile.gettempdir()
+
+        # On windows the temporary directory is used specific unless
+        # explicitly forced otherwise.  We can just use that.
+        if os.name == "nt":
+            return tmpdir
+        if not hasattr(os, "getuid"):
+            _unsafe_dir()
+
+        dirname = f"_jinja2-cache-{os.getuid()}"
+        actual_dir = os.path.join(tmpdir, dirname)
+
+        try:
+            os.mkdir(actual_dir, stat.S_IRWXU)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+        try:
+            os.chmod(actual_dir, stat.S_IRWXU)
+            actual_dir_stat = os.lstat(actual_dir)
+            if (
+                actual_dir_stat.st_uid != os.getuid()
+                or not stat.S_ISDIR(actual_dir_stat.st_mode)
+                or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+            ):
+                _unsafe_dir()
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+        actual_dir_stat = os.lstat(actual_dir)
+        if (
+            actual_dir_stat.st_uid != os.getuid()
+            or not stat.S_ISDIR(actual_dir_stat.st_mode)
+            or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+        ):
+            _unsafe_dir()
+
+        return actual_dir
+
+    def _get_cache_filename(self, bucket: Bucket) -> str:
+        return os.path.join(self.directory, self.pattern % (bucket.key,))
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        filename = self._get_cache_filename(bucket)
+
+        # Don't test for existence before opening the file, since the
+        # file could disappear after the test before the open.
+        try:
+            f = open(filename, "rb")
+        except (FileNotFoundError, IsADirectoryError, PermissionError):
+            # PermissionError can occur on Windows when an operation is
+            # in progress, such as calling clear().
+            return
+
+        with f:
+            bucket.load_bytecode(f)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        # Write to a temporary file, then rename to the real name after
+        # writing. This avoids another process reading the file before
+        # it is fully written.
+        name = self._get_cache_filename(bucket)
+        f = tempfile.NamedTemporaryFile(
+            mode="wb",
+            dir=os.path.dirname(name),
+            prefix=os.path.basename(name),
+            suffix=".tmp",
+            delete=False,
+        )
+
+        def remove_silent() -> None:
+            try:
+                os.remove(f.name)
+            except OSError:
+                # Another process may have called clear(). On Windows,
+                # another program may be holding the file open.
+                pass
+
+        try:
+            with f:
+                bucket.write_bytecode(f)
+        except BaseException:
+            remove_silent()
+            raise
+
+        try:
+            os.replace(f.name, name)
+        except OSError:
+            # Another process may have called clear(). On Windows,
+            # another program may be holding the file open.
+            remove_silent()
+        except BaseException:
+            remove_silent()
+            raise
+
+    def clear(self) -> None:
+        # imported lazily here because google app-engine doesn't support
+        # write access on the file system and the function does not exist
+        # normally.
+        from os import remove
+
+        files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
+        for filename in files:
+            try:
+                remove(os.path.join(self.directory, filename))
+            except OSError:
+                pass
+

 class MemcachedBytecodeCache(BytecodeCache):
     """This class implements a bytecode cache that uses a memcache cache for
@@ -208,10 +373,36 @@ class MemcachedBytecodeCache(BytecodeCache):
        `ignore_memcache_errors` parameter.
     """

-    def __init__(self, client: '_MemcachedClient', prefix: str=
-        'jinja2/bytecode/', timeout: t.Optional[int]=None,
-        ignore_memcache_errors: bool=True):
+    def __init__(
+        self,
+        client: "_MemcachedClient",
+        prefix: str = "jinja2/bytecode/",
+        timeout: t.Optional[int] = None,
+        ignore_memcache_errors: bool = True,
+    ):
         self.client = client
         self.prefix = prefix
         self.timeout = timeout
         self.ignore_memcache_errors = ignore_memcache_errors
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        try:
+            code = self.client.get(self.prefix + bucket.key)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
+        else:
+            bucket.bytecode_from_string(code)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        key = self.prefix + bucket.key
+        value = bucket.bytecode_to_string()
+
+        try:
+            if self.timeout is not None:
+                self.client.set(key, value, self.timeout)
+            else:
+                self.client.set(key, value)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
diff --git a/src/jinja2/compiler.py b/src/jinja2/compiler.py
index 32df45a..2740717 100644
--- a/src/jinja2/compiler.py
+++ b/src/jinja2/compiler.py
@@ -1,12 +1,15 @@
 """Compiles nodes from the parser into Python code."""
+
 import typing as t
 from contextlib import contextmanager
 from functools import update_wrapper
 from io import StringIO
 from itertools import chain
 from keyword import iskeyword as is_python_keyword
+
 from markupsafe import escape
 from markupsafe import Markup
+
 from . import nodes
 from .exceptions import TemplateAssertionError
 from .idtracking import Symbols
@@ -19,37 +22,140 @@ from .optimizer import Optimizer
 from .utils import _PassArg
 from .utils import concat
 from .visitor import NodeVisitor
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .environment import Environment
-F = t.TypeVar('F', bound=t.Callable[..., t.Any])
-operators = {'eq': '==', 'ne': '!=', 'gt': '>', 'gteq': '>=', 'lt': '<',
-    'lteq': '<=', 'in': 'in', 'notin': 'not in'}

+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+operators = {
+    "eq": "==",
+    "ne": "!=",
+    "gt": ">",
+    "gteq": ">=",
+    "lt": "<",
+    "lteq": "<=",
+    "in": "in",
+    "notin": "not in",
+}
+
+
+def optimizeconst(f: F) -> F:
+    def new_func(
+        self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any
+    ) -> t.Any:
+        # Only optimize if the frame is not volatile
+        if self.optimizer is not None and not frame.eval_ctx.volatile:
+            new_node = self.optimizer.visit(node, frame.eval_ctx)
+
+            if new_node != node:
+                return self.visit(new_node, frame)
+
+        return f(self, node, frame, **kwargs)
+
+    return update_wrapper(t.cast(F, new_func), f)
+
+
+def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed and op in self.environment.intercepted_binops  # type: ignore
+        ):
+            self.write(f"environment.call_binop(context, {op!r}, ")
+            self.visit(node.left, frame)
+            self.write(", ")
+            self.visit(node.right, frame)
+        else:
+            self.write("(")
+            self.visit(node.left, frame)
+            self.write(f" {op} ")
+            self.visit(node.right, frame)
+
+        self.write(")")
+
+    return visitor
+
+
+def _make_unop(
+    op: str,
+) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed and op in self.environment.intercepted_unops  # type: ignore
+        ):
+            self.write(f"environment.call_unop(context, {op!r}, ")
+            self.visit(node.node, frame)
+        else:
+            self.write("(" + op)
+            self.visit(node.node, frame)
+
+        self.write(")")
+
+    return visitor

-def generate(node: nodes.Template, environment: 'Environment', name: t.
-    Optional[str], filename: t.Optional[str], stream: t.Optional[t.TextIO]=
-    None, defer_init: bool=False, optimized: bool=True) ->t.Optional[str]:
+
+def generate(
+    node: nodes.Template,
+    environment: "Environment",
+    name: t.Optional[str],
+    filename: t.Optional[str],
+    stream: t.Optional[t.TextIO] = None,
+    defer_init: bool = False,
+    optimized: bool = True,
+) -> t.Optional[str]:
     """Generate the python source for a node tree."""
-    pass
+    if not isinstance(node, nodes.Template):
+        raise TypeError("Can't compile non template nodes")
+
+    generator = environment.code_generator_class(
+        environment, name, filename, stream, defer_init, optimized
+    )
+    generator.visit(node)
+
+    if stream is None:
+        return generator.stream.getvalue()  # type: ignore

+    return None

-def has_safe_repr(value: t.Any) ->bool:
+
+def has_safe_repr(value: t.Any) -> bool:
     """Does the node have a safe representation?"""
-    pass
+    if value is None or value is NotImplemented or value is Ellipsis:
+        return True
+
+    if type(value) in {bool, int, float, complex, range, str, Markup}:
+        return True

+    if type(value) in {tuple, list, set, frozenset}:
+        return all(has_safe_repr(v) for v in value)

-def find_undeclared(nodes: t.Iterable[nodes.Node], names: t.Iterable[str]
-    ) ->t.Set[str]:
+    if type(value) is dict:  # noqa E721
+        return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items())
+
+    return False
+
+
+def find_undeclared(
+    nodes: t.Iterable[nodes.Node], names: t.Iterable[str]
+) -> t.Set[str]:
     """Check if the names passed are accessed undeclared.  The return value
     is a set of all the undeclared names from the sequence of names found.
     """
-    pass
+    visitor = UndeclaredNameVisitor(names)
+    try:
+        for node in nodes:
+            visitor.visit(node)
+    except VisitorExit:
+        pass
+    return visitor.undeclared


 class MacroRef:
-
-    def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) ->None:
+    def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None:
         self.node = node
         self.accesses_caller = False
         self.accesses_kwargs = False
@@ -59,35 +165,71 @@ class MacroRef:
 class Frame:
     """Holds compile time information for us."""

-    def __init__(self, eval_ctx: EvalContext, parent: t.Optional['Frame']=
-        None, level: t.Optional[int]=None) ->None:
+    def __init__(
+        self,
+        eval_ctx: EvalContext,
+        parent: t.Optional["Frame"] = None,
+        level: t.Optional[int] = None,
+    ) -> None:
         self.eval_ctx = eval_ctx
+
+        # the parent of this frame
         self.parent = parent
+
         if parent is None:
             self.symbols = Symbols(level=level)
+
+            # in some dynamic inheritance situations the compiler needs to add
+            # write tests around output statements.
             self.require_output_check = False
+
+            # inside some tags we are using a buffer rather than yield statements.
+            # this for example affects {% filter %} or {% macro %}.  If a frame
+            # is buffered this variable points to the name of the list used as
+            # buffer.
             self.buffer: t.Optional[str] = None
+
+            # the name of the block we're in, otherwise None.
             self.block: t.Optional[str] = None
+
         else:
             self.symbols = Symbols(parent.symbols, level=level)
             self.require_output_check = parent.require_output_check
             self.buffer = parent.buffer
             self.block = parent.block
+
+        # a toplevel frame is the root + soft frames such as if conditions.
         self.toplevel = False
+
+        # the root frame is basically just the outermost frame, so no if
+        # conditions.  This information is used to optimize inheritance
+        # situations.
         self.rootlevel = False
+
+        # variables set inside of loops and blocks should not affect outer frames,
+        # but they still needs to be kept track of as part of the active context.
         self.loop_frame = False
         self.block_frame = False
+
+        # track whether the frame is being used in an if-statement or conditional
+        # expression as it determines which errors should be raised during runtime
+        # or compile time.
         self.soft_frame = False

-    def copy(self) ->'Frame':
+    def copy(self) -> "Frame":
         """Create a copy of the current one."""
-        pass
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.symbols = self.symbols.copy()
+        return rv

-    def inner(self, isolated: bool=False) ->'Frame':
+    def inner(self, isolated: bool = False) -> "Frame":
         """Return an inner frame."""
-        pass
+        if isolated:
+            return Frame(self.eval_ctx, level=self.symbols.level + 1)
+        return Frame(self.eval_ctx, self)

-    def soft(self) ->'Frame':
+    def soft(self) -> "Frame":
         """Return a soft frame.  A soft frame may not be modified as
         standalone thing as it shares the resources with the frame it
         was created of, but it's not a rootlevel frame any longer.
@@ -95,7 +237,11 @@ class Frame:
         This is only used to implement if-statements and conditional
         expressions.
         """
-        pass
+        rv = self.copy()
+        rv.rootlevel = False
+        rv.soft_frame = True
+        return rv
+
     __copy__ = copy


@@ -106,13 +252,20 @@ class VisitorExit(RuntimeError):
 class DependencyFinderVisitor(NodeVisitor):
     """A visitor that collects filter and test calls."""

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self.filters: t.Set[str] = set()
         self.tests: t.Set[str] = set()

-    def visit_Block(self, node: nodes.Block) ->None:
+    def visit_Filter(self, node: nodes.Filter) -> None:
+        self.generic_visit(node)
+        self.filters.add(node.name)
+
+    def visit_Test(self, node: nodes.Test) -> None:
+        self.generic_visit(node)
+        self.tests.add(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
         """Stop visiting at blocks."""
-        pass


 class UndeclaredNameVisitor(NodeVisitor):
@@ -121,13 +274,20 @@ class UndeclaredNameVisitor(NodeVisitor):
     not stop at closure frames.
     """

-    def __init__(self, names: t.Iterable[str]) ->None:
+    def __init__(self, names: t.Iterable[str]) -> None:
         self.names = set(names)
         self.undeclared: t.Set[str] = set()

-    def visit_Block(self, node: nodes.Block) ->None:
+    def visit_Name(self, node: nodes.Name) -> None:
+        if node.ctx == "load" and node.name in self.names:
+            self.undeclared.add(node.name)
+            if self.undeclared == self.names:
+                raise VisitorExit()
+        else:
+            self.names.discard(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
         """Stop visiting a blocks."""
-        pass


 class CompilerExit(Exception):
@@ -138,10 +298,15 @@ class CompilerExit(Exception):


 class CodeGenerator(NodeVisitor):
-
-    def __init__(self, environment: 'Environment', name: t.Optional[str],
-        filename: t.Optional[str], stream: t.Optional[t.TextIO]=None,
-        defer_init: bool=False, optimized: bool=True) ->None:
+    def __init__(
+        self,
+        environment: "Environment",
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        stream: t.Optional[t.TextIO] = None,
+        defer_init: bool = False,
+        optimized: bool = True,
+    ) -> None:
         if stream is None:
             stream = StringIO()
         self.environment = environment
@@ -151,96 +316,226 @@ class CodeGenerator(NodeVisitor):
         self.created_block_context = False
         self.defer_init = defer_init
         self.optimizer: t.Optional[Optimizer] = None
+
         if optimized:
             self.optimizer = Optimizer(environment)
+
+        # aliases for imports
         self.import_aliases: t.Dict[str, str] = {}
+
+        # a registry for all blocks.  Because blocks are moved out
+        # into the global python scope they are registered here
         self.blocks: t.Dict[str, nodes.Block] = {}
+
+        # the number of extends statements so far
         self.extends_so_far = 0
+
+        # some templates have a rootlevel extends.  In this case we
+        # can safely assume that we're a child template and do some
+        # more optimizations.
         self.has_known_extends = False
+
+        # the current line number
         self.code_lineno = 1
+
+        # registry of all filters and tests (global, not block local)
         self.tests: t.Dict[str, str] = {}
         self.filters: t.Dict[str, str] = {}
+
+        # the debug information
         self.debug_info: t.List[t.Tuple[int, int]] = []
         self._write_debug_info: t.Optional[int] = None
+
+        # the number of new lines before the next write()
         self._new_lines = 0
+
+        # the line number of the last written statement
         self._last_line = 0
+
+        # true if nothing was written so far.
         self._first_write = True
+
+        # used by the `temporary_identifier` method to get new
+        # unique, temporary identifier
         self._last_identifier = 0
+
+        # the current indentation
         self._indentation = 0
+
+        # Tracks toplevel assignments
         self._assign_stack: t.List[t.Set[str]] = []
+
+        # Tracks parameter definition blocks
         self._param_def_block: t.List[t.Set[str]] = []
-        self._context_reference_stack = ['context']

-    def fail(self, msg: str, lineno: int) ->'te.NoReturn':
+        # Tracks the current context.
+        self._context_reference_stack = ["context"]
+
+    @property
+    def optimized(self) -> bool:
+        return self.optimizer is not None
+
+    # -- Various compilation helpers
+
+    def fail(self, msg: str, lineno: int) -> "te.NoReturn":
         """Fail with a :exc:`TemplateAssertionError`."""
-        pass
+        raise TemplateAssertionError(msg, lineno, self.name, self.filename)

-    def temporary_identifier(self) ->str:
+    def temporary_identifier(self) -> str:
         """Get a new unique identifier."""
-        pass
+        self._last_identifier += 1
+        return f"t_{self._last_identifier}"

-    def buffer(self, frame: Frame) ->None:
+    def buffer(self, frame: Frame) -> None:
         """Enable buffering for the frame from that point onwards."""
-        pass
+        frame.buffer = self.temporary_identifier()
+        self.writeline(f"{frame.buffer} = []")

-    def return_buffer_contents(self, frame: Frame, force_unescaped: bool=False
-        ) ->None:
+    def return_buffer_contents(
+        self, frame: Frame, force_unescaped: bool = False
+    ) -> None:
         """Return the buffer contents of the frame."""
-        pass
-
-    def indent(self) ->None:
+        if not force_unescaped:
+            if frame.eval_ctx.volatile:
+                self.writeline("if context.eval_ctx.autoescape:")
+                self.indent()
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                self.outdent()
+                self.writeline("else:")
+                self.indent()
+                self.writeline(f"return concat({frame.buffer})")
+                self.outdent()
+                return
+            elif frame.eval_ctx.autoescape:
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                return
+        self.writeline(f"return concat({frame.buffer})")
+
+    def indent(self) -> None:
         """Indent by one."""
-        pass
+        self._indentation += 1

-    def outdent(self, step: int=1) ->None:
+    def outdent(self, step: int = 1) -> None:
         """Outdent by step."""
-        pass
+        self._indentation -= step

-    def start_write(self, frame: Frame, node: t.Optional[nodes.Node]=None
-        ) ->None:
+    def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None:
         """Yield or write into the frame buffer."""
-        pass
+        if frame.buffer is None:
+            self.writeline("yield ", node)
+        else:
+            self.writeline(f"{frame.buffer}.append(", node)

-    def end_write(self, frame: Frame) ->None:
+    def end_write(self, frame: Frame) -> None:
         """End the writing process started by `start_write`."""
-        pass
+        if frame.buffer is not None:
+            self.write(")")

-    def simple_write(self, s: str, frame: Frame, node: t.Optional[nodes.
-        Node]=None) ->None:
+    def simple_write(
+        self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None
+    ) -> None:
         """Simple shortcut for start_write + write + end_write."""
-        pass
+        self.start_write(frame, node)
+        self.write(s)
+        self.end_write(frame)

-    def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) ->None:
+    def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None:
         """Visit a list of nodes as block in a frame.  If the current frame
         is no buffer a dummy ``if 0: yield None`` is written automatically.
         """
-        pass
-
-    def write(self, x: str) ->None:
+        try:
+            self.writeline("pass")
+            for node in nodes:
+                self.visit(node, frame)
+        except CompilerExit:
+            pass
+
+    def write(self, x: str) -> None:
         """Write a string into the output stream."""
-        pass
-
-    def writeline(self, x: str, node: t.Optional[nodes.Node]=None, extra: int=0
-        ) ->None:
+        if self._new_lines:
+            if not self._first_write:
+                self.stream.write("\n" * self._new_lines)
+                self.code_lineno += self._new_lines
+                if self._write_debug_info is not None:
+                    self.debug_info.append((self._write_debug_info, self.code_lineno))
+                    self._write_debug_info = None
+            self._first_write = False
+            self.stream.write("    " * self._indentation)
+            self._new_lines = 0
+        self.stream.write(x)
+
+    def writeline(
+        self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0
+    ) -> None:
         """Combination of newline and write."""
-        pass
+        self.newline(node, extra)
+        self.write(x)

-    def newline(self, node: t.Optional[nodes.Node]=None, extra: int=0) ->None:
+    def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None:
         """Add one or more newlines before the next write."""
-        pass
-
-    def signature(self, node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
-        frame: Frame, extra_kwargs: t.Optional[t.Mapping[str, t.Any]]=None
-        ) ->None:
+        self._new_lines = max(self._new_lines, 1 + extra)
+        if node is not None and node.lineno != self._last_line:
+            self._write_debug_info = node.lineno
+            self._last_line = node.lineno
+
+    def signature(
+        self,
+        node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
+        frame: Frame,
+        extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> None:
         """Writes a function call to the stream for the current node.
         A leading comma is added automatically.  The extra keyword
         arguments may not include python keywords otherwise a syntax
         error could occur.  The extra keyword arguments should be given
         as python dict.
         """
-        pass
-
-    def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) ->None:
+        # if any of the given keyword arguments is a python keyword
+        # we have to make sure that no invalid call is created.
+        kwarg_workaround = any(
+            is_python_keyword(t.cast(str, k))
+            for k in chain((x.key for x in node.kwargs), extra_kwargs or ())
+        )
+
+        for arg in node.args:
+            self.write(", ")
+            self.visit(arg, frame)
+
+        if not kwarg_workaround:
+            for kwarg in node.kwargs:
+                self.write(", ")
+                self.visit(kwarg, frame)
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f", {key}={value}")
+        if node.dyn_args:
+            self.write(", *")
+            self.visit(node.dyn_args, frame)
+
+        if kwarg_workaround:
+            if node.dyn_kwargs is not None:
+                self.write(", **dict({")
+            else:
+                self.write(", **{")
+            for kwarg in node.kwargs:
+                self.write(f"{kwarg.key!r}: ")
+                self.visit(kwarg.value, frame)
+                self.write(", ")
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f"{key!r}: {value}, ")
+            if node.dyn_kwargs is not None:
+                self.write("}, **")
+                self.visit(node.dyn_kwargs, frame)
+                self.write(")")
+            else:
+                self.write("}")
+
+        elif node.dyn_kwargs is not None:
+            self.write(", **")
+            self.visit(node.dyn_kwargs, frame)
+
+    def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None:
         """Find all filter and test names used in the template and
         assign them to variables in the compiled namespace. Checking
         that the names are registered with the environment is done when
@@ -251,96 +546,837 @@ class CodeGenerator(NodeVisitor):
             Filters and tests in If and CondExpr nodes are checked at
             runtime instead of compile time.
         """
-        pass
-
-    def macro_body(self, node: t.Union[nodes.Macro, nodes.CallBlock], frame:
-        Frame) ->t.Tuple[Frame, MacroRef]:
+        visitor = DependencyFinderVisitor()
+
+        for node in nodes:
+            visitor.visit(node)
+
+        for id_map, names, dependency in (
+            (self.filters, visitor.filters, "filters"),
+            (
+                self.tests,
+                visitor.tests,
+                "tests",
+            ),
+        ):
+            for name in sorted(names):
+                if name not in id_map:
+                    id_map[name] = self.temporary_identifier()
+
+                # add check during runtime that dependencies used inside of executed
+                # blocks are defined, as this step may be skipped during compile time
+                self.writeline("try:")
+                self.indent()
+                self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]")
+                self.outdent()
+                self.writeline("except KeyError:")
+                self.indent()
+                self.writeline("@internalcode")
+                self.writeline(f"def {id_map[name]}(*unused):")
+                self.indent()
+                self.writeline(
+                    f'raise TemplateRuntimeError("No {dependency[:-1]}'
+                    f' named {name!r} found.")'
+                )
+                self.outdent()
+                self.outdent()
+
+    def enter_frame(self, frame: Frame) -> None:
+        undefs = []
+        for target, (action, param) in frame.symbols.loads.items():
+            if action == VAR_LOAD_PARAMETER:
+                pass
+            elif action == VAR_LOAD_RESOLVE:
+                self.writeline(f"{target} = {self.get_resolve_func()}({param!r})")
+            elif action == VAR_LOAD_ALIAS:
+                self.writeline(f"{target} = {param}")
+            elif action == VAR_LOAD_UNDEFINED:
+                undefs.append(target)
+            else:
+                raise NotImplementedError("unknown load instruction")
+        if undefs:
+            self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None:
+        if not with_python_scope:
+            undefs = []
+            for target in frame.symbols.loads:
+                undefs.append(target)
+            if undefs:
+                self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str:
+        return async_value if self.environment.is_async else sync_value
+
+    def func(self, name: str) -> str:
+        return f"{self.choose_async()}def {name}"
+
+    def macro_body(
+        self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame
+    ) -> t.Tuple[Frame, MacroRef]:
         """Dump the function def of a macro or call block."""
-        pass
-
-    def macro_def(self, macro_ref: MacroRef, frame: Frame) ->None:
+        frame = frame.inner()
+        frame.symbols.analyze_node(node)
+        macro_ref = MacroRef(node)
+
+        explicit_caller = None
+        skip_special_params = set()
+        args = []
+
+        for idx, arg in enumerate(node.args):
+            if arg.name == "caller":
+                explicit_caller = idx
+            if arg.name in ("kwargs", "varargs"):
+                skip_special_params.add(arg.name)
+            args.append(frame.symbols.ref(arg.name))
+
+        undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
+
+        if "caller" in undeclared:
+            # In older Jinja versions there was a bug that allowed caller
+            # to retain the special behavior even if it was mentioned in
+            # the argument list.  However thankfully this was only really
+            # working if it was the last argument.  So we are explicitly
+            # checking this now and error out if it is anywhere else in
+            # the argument list.
+            if explicit_caller is not None:
+                try:
+                    node.defaults[explicit_caller - len(node.args)]
+                except IndexError:
+                    self.fail(
+                        "When defining macros or call blocks the "
+                        'special "caller" argument must be omitted '
+                        "or be given a default.",
+                        node.lineno,
+                    )
+            else:
+                args.append(frame.symbols.declare_parameter("caller"))
+            macro_ref.accesses_caller = True
+        if "kwargs" in undeclared and "kwargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("kwargs"))
+            macro_ref.accesses_kwargs = True
+        if "varargs" in undeclared and "varargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("varargs"))
+            macro_ref.accesses_varargs = True
+
+        # macros are delayed, they never require output checks
+        frame.require_output_check = False
+        frame.symbols.analyze_node(node)
+        self.writeline(f"{self.func('macro')}({', '.join(args)}):", node)
+        self.indent()
+
+        self.buffer(frame)
+        self.enter_frame(frame)
+
+        self.push_parameter_definitions(frame)
+        for idx, arg in enumerate(node.args):
+            ref = frame.symbols.ref(arg.name)
+            self.writeline(f"if {ref} is missing:")
+            self.indent()
+            try:
+                default = node.defaults[idx - len(node.args)]
+            except IndexError:
+                self.writeline(
+                    f'{ref} = undefined("parameter {arg.name!r} was not provided",'
+                    f" name={arg.name!r})"
+                )
+            else:
+                self.writeline(f"{ref} = ")
+                self.visit(default, frame)
+            self.mark_parameter_stored(ref)
+            self.outdent()
+        self.pop_parameter_definitions()
+
+        self.blockvisit(node.body, frame)
+        self.return_buffer_contents(frame, force_unescaped=True)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        return frame, macro_ref
+
+    def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None:
         """Dump the macro definition for the def created by macro_body."""
-        pass
-
-    def position(self, node: nodes.Node) ->str:
+        arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
+        name = getattr(macro_ref.node, "name", None)
+        if len(macro_ref.node.args) == 1:
+            arg_tuple += ","
+        self.write(
+            f"Macro(environment, macro, {name!r}, ({arg_tuple}),"
+            f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r},"
+            f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)"
+        )
+
+    def position(self, node: nodes.Node) -> str:
         """Return a human readable position for the node."""
-        pass
-
-    def write_commons(self) ->None:
+        rv = f"line {node.lineno}"
+        if self.name is not None:
+            rv = f"{rv} in {self.name!r}"
+        return rv
+
+    def dump_local_context(self, frame: Frame) -> str:
+        items_kv = ", ".join(
+            f"{name!r}: {target}"
+            for name, target in frame.symbols.dump_stores().items()
+        )
+        return f"{{{items_kv}}}"
+
+    def write_commons(self) -> None:
         """Writes a common preamble that is used by root and block functions.
         Primarily this sets up common local helpers and enforces a generator
         through a dead branch.
         """
-        pass
-
-    def push_parameter_definitions(self, frame: Frame) ->None:
+        self.writeline("resolve = context.resolve_or_missing")
+        self.writeline("undefined = environment.undefined")
+        self.writeline("concat = environment.concat")
+        # always use the standard Undefined class for the implicit else of
+        # conditional expressions
+        self.writeline("cond_expr_undefined = Undefined")
+        self.writeline("if 0: yield None")
+
+    def push_parameter_definitions(self, frame: Frame) -> None:
         """Pushes all parameter targets from the given frame into a local
         stack that permits tracking of yet to be assigned parameters.  In
         particular this enables the optimization from `visit_Name` to skip
         undefined expressions for parameters in macros as macros can reference
         otherwise unbound parameters.
         """
-        pass
+        self._param_def_block.append(frame.symbols.dump_param_targets())

-    def pop_parameter_definitions(self) ->None:
+    def pop_parameter_definitions(self) -> None:
         """Pops the current parameter definitions set."""
-        pass
+        self._param_def_block.pop()

-    def mark_parameter_stored(self, target: str) ->None:
+    def mark_parameter_stored(self, target: str) -> None:
         """Marks a parameter in the current parameter definitions as stored.
         This will skip the enforced undefined checks.
         """
-        pass
+        if self._param_def_block:
+            self._param_def_block[-1].discard(target)
+
+    def push_context_reference(self, target: str) -> None:
+        self._context_reference_stack.append(target)
+
+    def pop_context_reference(self) -> None:
+        self._context_reference_stack.pop()
+
+    def get_context_ref(self) -> str:
+        return self._context_reference_stack[-1]
+
+    def get_resolve_func(self) -> str:
+        target = self._context_reference_stack[-1]
+        if target == "context":
+            return "resolve"
+        return f"{target}.resolve"

-    def parameter_is_undeclared(self, target: str) ->bool:
+    def derive_context(self, frame: Frame) -> str:
+        return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})"
+
+    def parameter_is_undeclared(self, target: str) -> bool:
         """Checks if a given target is an undeclared parameter."""
-        pass
+        if not self._param_def_block:
+            return False
+        return target in self._param_def_block[-1]

-    def push_assign_tracking(self) ->None:
+    def push_assign_tracking(self) -> None:
         """Pushes a new layer for assignment tracking."""
-        pass
+        self._assign_stack.append(set())

-    def pop_assign_tracking(self, frame: Frame) ->None:
+    def pop_assign_tracking(self, frame: Frame) -> None:
         """Pops the topmost level for assignment tracking and updates the
         context variables if necessary.
         """
-        pass
-
-    def visit_Block(self, node: nodes.Block, frame: Frame) ->None:
+        vars = self._assign_stack.pop()
+        if (
+            not frame.block_frame
+            and not frame.loop_frame
+            and not frame.toplevel
+            or not vars
+        ):
+            return
+        public_names = [x for x in vars if x[:1] != "_"]
+        if len(vars) == 1:
+            name = next(iter(vars))
+            ref = frame.symbols.ref(name)
+            if frame.loop_frame:
+                self.writeline(f"_loop_vars[{name!r}] = {ref}")
+                return
+            if frame.block_frame:
+                self.writeline(f"_block_vars[{name!r}] = {ref}")
+                return
+            self.writeline(f"context.vars[{name!r}] = {ref}")
+        else:
+            if frame.loop_frame:
+                self.writeline("_loop_vars.update({")
+            elif frame.block_frame:
+                self.writeline("_block_vars.update({")
+            else:
+                self.writeline("context.vars.update({")
+            for idx, name in enumerate(vars):
+                if idx:
+                    self.write(", ")
+                ref = frame.symbols.ref(name)
+                self.write(f"{name!r}: {ref}")
+            self.write("})")
+        if not frame.block_frame and not frame.loop_frame and public_names:
+            if len(public_names) == 1:
+                self.writeline(f"context.exported_vars.add({public_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, public_names))
+                self.writeline(f"context.exported_vars.update(({names_str}))")
+
+    # -- Statement Visitors
+
+    def visit_Template(
+        self, node: nodes.Template, frame: t.Optional[Frame] = None
+    ) -> None:
+        assert frame is None, "no root frame allowed"
+        eval_ctx = EvalContext(self.environment, self.name)
+
+        from .runtime import async_exported
+        from .runtime import exported
+
+        if self.environment.is_async:
+            exported_names = sorted(exported + async_exported)
+        else:
+            exported_names = sorted(exported)
+
+        self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
+
+        # if we want a deferred initialization we cannot move the
+        # environment into a local name
+        envenv = "" if self.defer_init else ", environment=environment"
+
+        # do we have an extends tag at all?  If not, we can save some
+        # overhead by just not processing any inheritance code.
+        have_extends = node.find(nodes.Extends) is not None
+
+        # find all blocks
+        for block in node.find_all(nodes.Block):
+            if block.name in self.blocks:
+                self.fail(f"block {block.name!r} defined twice", block.lineno)
+            self.blocks[block.name] = block
+
+        # find all imports and import them
+        for import_ in node.find_all(nodes.ImportedName):
+            if import_.importname not in self.import_aliases:
+                imp = import_.importname
+                self.import_aliases[imp] = alias = self.temporary_identifier()
+                if "." in imp:
+                    module, obj = imp.rsplit(".", 1)
+                    self.writeline(f"from {module} import {obj} as {alias}")
+                else:
+                    self.writeline(f"import {imp} as {alias}")
+
+        # add the load name
+        self.writeline(f"name = {self.name!r}")
+
+        # generate the root render function.
+        self.writeline(
+            f"{self.func('root')}(context, missing=missing{envenv}):", extra=1
+        )
+        self.indent()
+        self.write_commons()
+
+        # process the root
+        frame = Frame(eval_ctx)
+        if "self" in find_undeclared(node.body, ("self",)):
+            ref = frame.symbols.declare_parameter("self")
+            self.writeline(f"{ref} = TemplateReference(context)")
+        frame.symbols.analyze_node(node)
+        frame.toplevel = frame.rootlevel = True
+        frame.require_output_check = have_extends and not self.has_known_extends
+        if have_extends:
+            self.writeline("parent_template = None")
+        self.enter_frame(frame)
+        self.pull_dependencies(node.body)
+        self.blockvisit(node.body, frame)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        # make sure that the parent root is called.
+        if have_extends:
+            if not self.has_known_extends:
+                self.indent()
+                self.writeline("if parent_template is not None:")
+            self.indent()
+            if not self.environment.is_async:
+                self.writeline("yield from parent_template.root_render_func(context)")
+            else:
+                self.writeline(
+                    "async for event in parent_template.root_render_func(context):"
+                )
+                self.indent()
+                self.writeline("yield event")
+                self.outdent()
+            self.outdent(1 + (not self.has_known_extends))
+
+        # at this point we now have the blocks collected and can visit them too.
+        for name, block in self.blocks.items():
+            self.writeline(
+                f"{self.func('block_' + name)}(context, missing=missing{envenv}):",
+                block,
+                1,
+            )
+            self.indent()
+            self.write_commons()
+            # It's important that we do not make this frame a child of the
+            # toplevel template.  This would cause a variety of
+            # interesting issues with identifier tracking.
+            block_frame = Frame(eval_ctx)
+            block_frame.block_frame = True
+            undeclared = find_undeclared(block.body, ("self", "super"))
+            if "self" in undeclared:
+                ref = block_frame.symbols.declare_parameter("self")
+                self.writeline(f"{ref} = TemplateReference(context)")
+            if "super" in undeclared:
+                ref = block_frame.symbols.declare_parameter("super")
+                self.writeline(f"{ref} = context.super({name!r}, block_{name})")
+            block_frame.symbols.analyze_node(block)
+            block_frame.block = name
+            self.writeline("_block_vars = {}")
+            self.enter_frame(block_frame)
+            self.pull_dependencies(block.body)
+            self.blockvisit(block.body, block_frame)
+            self.leave_frame(block_frame, with_python_scope=True)
+            self.outdent()
+
+        blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks)
+        self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1)
+        debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info)
+        self.writeline(f"debug_info = {debug_kv_str!r}")
+
+    def visit_Block(self, node: nodes.Block, frame: Frame) -> None:
         """Call a block and register it for the template."""
-        pass
-
-    def visit_Extends(self, node: nodes.Extends, frame: Frame) ->None:
+        level = 0
+        if frame.toplevel:
+            # if we know that we are a child template, there is no need to
+            # check if we are one
+            if self.has_known_extends:
+                return
+            if self.extends_so_far > 0:
+                self.writeline("if parent_template is None:")
+                self.indent()
+                level += 1
+
+        if node.scoped:
+            context = self.derive_context(frame)
+        else:
+            context = self.get_context_ref()
+
+        if node.required:
+            self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node)
+            self.indent()
+            self.writeline(
+                f'raise TemplateRuntimeError("Required block {node.name!r} not found")',
+                node,
+            )
+            self.outdent()
+
+        if not self.environment.is_async and frame.buffer is None:
+            self.writeline(
+                f"yield from context.blocks[{node.name!r}][0]({context})", node
+            )
+        else:
+            self.writeline(
+                f"{self.choose_async()}for event in"
+                f" context.blocks[{node.name!r}][0]({context}):",
+                node,
+            )
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        self.outdent(level)
+
+    def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None:
         """Calls the extender."""
-        pass
-
-    def visit_Include(self, node: nodes.Include, frame: Frame) ->None:
+        if not frame.toplevel:
+            self.fail("cannot use extend from a non top-level scope", node.lineno)
+
+        # if the number of extends statements in general is zero so
+        # far, we don't have to add a check if something extended
+        # the template before this one.
+        if self.extends_so_far > 0:
+            # if we have a known extends we just add a template runtime
+            # error into the generated code.  We could catch that at compile
+            # time too, but i welcome it not to confuse users by throwing the
+            # same error at different times just "because we can".
+            if not self.has_known_extends:
+                self.writeline("if parent_template is not None:")
+                self.indent()
+            self.writeline('raise TemplateRuntimeError("extended multiple times")')
+
+            # if we have a known extends already we don't need that code here
+            # as we know that the template execution will end here.
+            if self.has_known_extends:
+                raise CompilerExit()
+            else:
+                self.outdent()
+
+        self.writeline("parent_template = environment.get_template(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        self.writeline("for name, parent_block in parent_template.blocks.items():")
+        self.indent()
+        self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
+        self.outdent()
+
+        # if this extends statement was in the root level we can take
+        # advantage of that information and simplify the generated code
+        # in the top level from this point onwards
+        if frame.rootlevel:
+            self.has_known_extends = True
+
+        # and now we have one more
+        self.extends_so_far += 1
+
+    def visit_Include(self, node: nodes.Include, frame: Frame) -> None:
         """Handles includes."""
-        pass
+        if node.ignore_missing:
+            self.writeline("try:")
+            self.indent()
+
+        func_name = "get_or_select_template"
+        if isinstance(node.template, nodes.Const):
+            if isinstance(node.template.value, str):
+                func_name = "get_template"
+            elif isinstance(node.template.value, (tuple, list)):
+                func_name = "select_template"
+        elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+            func_name = "select_template"
+
+        self.writeline(f"template = environment.{func_name}(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        if node.ignore_missing:
+            self.outdent()
+            self.writeline("except TemplateNotFound:")
+            self.indent()
+            self.writeline("pass")
+            self.outdent()
+            self.writeline("else:")
+            self.indent()
+
+        skip_event_yield = False
+        if node.with_context:
+            self.writeline(
+                f"{self.choose_async()}for event in template.root_render_func("
+                "template.new_context(context.get_all(), True,"
+                f" {self.dump_local_context(frame)})):"
+            )
+        elif self.environment.is_async:
+            self.writeline(
+                "for event in (await template._get_default_module_async())"
+                "._body_stream:"
+            )
+        else:
+            self.writeline("yield from template._get_default_module()._body_stream")
+            skip_event_yield = True
+
+        if not skip_event_yield:
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        if node.ignore_missing:
+            self.outdent()
+
+    def _import_common(
+        self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame
+    ) -> None:
+        self.write(f"{self.choose_async('await ')}environment.get_template(")
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r}).")
+
+        if node.with_context:
+            f_name = f"make_module{self.choose_async('_async')}"
+            self.write(
+                f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})"
+            )
+        else:
+            self.write(f"_get_default_module{self.choose_async('_async')}(context)")

-    def visit_Import(self, node: nodes.Import, frame: Frame) ->None:
+    def visit_Import(self, node: nodes.Import, frame: Frame) -> None:
         """Visit regular imports."""
-        pass
+        self.writeline(f"{frame.symbols.ref(node.target)} = ", node)
+        if frame.toplevel:
+            self.write(f"context.vars[{node.target!r}] = ")
+
+        self._import_common(node, frame)

-    def visit_FromImport(self, node: nodes.FromImport, frame: Frame) ->None:
+        if frame.toplevel and not node.target.startswith("_"):
+            self.writeline(f"context.exported_vars.discard({node.target!r})")
+
+    def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None:
         """Visit named imports."""
-        pass
+        self.newline(node)
+        self.write("included_template = ")
+        self._import_common(node, frame)
+        var_names = []
+        discarded_names = []
+        for name in node.names:
+            if isinstance(name, tuple):
+                name, alias = name
+            else:
+                alias = name
+            self.writeline(
+                f"{frame.symbols.ref(alias)} ="
+                f" getattr(included_template, {name!r}, missing)"
+            )
+            self.writeline(f"if {frame.symbols.ref(alias)} is missing:")
+            self.indent()
+            message = (
+                "the template {included_template.__name__!r}"
+                f" (imported on {self.position(node)})"
+                f" does not export the requested name {name!r}"
+            )
+            self.writeline(
+                f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})"
+            )
+            self.outdent()
+            if frame.toplevel:
+                var_names.append(alias)
+                if not alias.startswith("_"):
+                    discarded_names.append(alias)
+
+        if var_names:
+            if len(var_names) == 1:
+                name = var_names[0]
+                self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}")
+            else:
+                names_kv = ", ".join(
+                    f"{name!r}: {frame.symbols.ref(name)}" for name in var_names
+                )
+                self.writeline(f"context.vars.update({{{names_kv}}})")
+        if discarded_names:
+            if len(discarded_names) == 1:
+                self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, discarded_names))
+                self.writeline(
+                    f"context.exported_vars.difference_update(({names_str}))"
+                )
+
+    def visit_For(self, node: nodes.For, frame: Frame) -> None:
+        loop_frame = frame.inner()
+        loop_frame.loop_frame = True
+        test_frame = frame.inner()
+        else_frame = frame.inner()
+
+        # try to figure out if we have an extended loop.  An extended loop
+        # is necessary if the loop is in recursive mode if the special loop
+        # variable is accessed in the body if the body is a scoped block.
+        extended_loop = (
+            node.recursive
+            or "loop"
+            in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",))
+            or any(block.scoped for block in node.find_all(nodes.Block))
+        )
+
+        loop_ref = None
+        if extended_loop:
+            loop_ref = loop_frame.symbols.declare_parameter("loop")
+
+        loop_frame.symbols.analyze_node(node, for_branch="body")
+        if node.else_:
+            else_frame.symbols.analyze_node(node, for_branch="else")
+
+        if node.test:
+            loop_filter_func = self.temporary_identifier()
+            test_frame.symbols.analyze_node(node, for_branch="test")
+            self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test)
+            self.indent()
+            self.enter_frame(test_frame)
+            self.writeline(self.choose_async("async for ", "for "))
+            self.visit(node.target, loop_frame)
+            self.write(" in ")
+            self.write(self.choose_async("auto_aiter(fiter)", "fiter"))
+            self.write(":")
+            self.indent()
+            self.writeline("if ", node.test)
+            self.visit(node.test, test_frame)
+            self.write(":")
+            self.indent()
+            self.writeline("yield ")
+            self.visit(node.target, loop_frame)
+            self.outdent(3)
+            self.leave_frame(test_frame, with_python_scope=True)
+
+        # if we don't have an recursive loop we have to find the shadowed
+        # variables at that point.  Because loops can be nested but the loop
+        # variable is a special one we have to enforce aliasing for it.
+        if node.recursive:
+            self.writeline(
+                f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node
+            )
+            self.indent()
+            self.buffer(loop_frame)
+
+            # Use the same buffer for the else frame
+            else_frame.buffer = loop_frame.buffer
+
+        # make sure the loop variable is a special one and raise a template
+        # assertion error if a loop tries to write to loop
+        if extended_loop:
+            self.writeline(f"{loop_ref} = missing")
+
+        for name in node.find_all(nodes.Name):
+            if name.ctx == "store" and name.name == "loop":
+                self.fail(
+                    "Can't assign to special loop variable in for-loop target",
+                    name.lineno,
+                )
+
+        if node.else_:
+            iteration_indicator = self.temporary_identifier()
+            self.writeline(f"{iteration_indicator} = 1")
+
+        self.writeline(self.choose_async("async for ", "for "), node)
+        self.visit(node.target, loop_frame)
+        if extended_loop:
+            self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(")
+        else:
+            self.write(" in ")

+        if node.test:
+            self.write(f"{loop_filter_func}(")
+        if node.recursive:
+            self.write("reciter")
+        else:
+            if self.environment.is_async and not extended_loop:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async and not extended_loop:
+                self.write(")")
+        if node.test:
+            self.write(")")
+
+        if node.recursive:
+            self.write(", undefined, loop_render_func, depth):")
+        else:
+            self.write(", undefined):" if extended_loop else ":")
+
+        self.indent()
+        self.enter_frame(loop_frame)
+
+        self.writeline("_loop_vars = {}")
+        self.blockvisit(node.body, loop_frame)
+        if node.else_:
+            self.writeline(f"{iteration_indicator} = 0")
+        self.outdent()
+        self.leave_frame(
+            loop_frame, with_python_scope=node.recursive and not node.else_
+        )
+
+        if node.else_:
+            self.writeline(f"if {iteration_indicator}:")
+            self.indent()
+            self.enter_frame(else_frame)
+            self.blockvisit(node.else_, else_frame)
+            self.leave_frame(else_frame)
+            self.outdent()
+
+        # if the node was recursive we have to return the buffer contents
+        # and start the iteration code
+        if node.recursive:
+            self.return_buffer_contents(loop_frame)
+            self.outdent()
+            self.start_write(frame, node)
+            self.write(f"{self.choose_async('await ')}loop(")
+            if self.environment.is_async:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async:
+                self.write(")")
+            self.write(", loop)")
+            self.end_write(frame)
+
+        # at the end of the iteration, clear any assignments made in the
+        # loop from the top level
+        if self._assign_stack:
+            self._assign_stack[-1].difference_update(loop_frame.symbols.stores)
+
+    def visit_If(self, node: nodes.If, frame: Frame) -> None:
+        if_frame = frame.soft()
+        self.writeline("if ", node)
+        self.visit(node.test, if_frame)
+        self.write(":")
+        self.indent()
+        self.blockvisit(node.body, if_frame)
+        self.outdent()
+        for elif_ in node.elif_:
+            self.writeline("elif ", elif_)
+            self.visit(elif_.test, if_frame)
+            self.write(":")
+            self.indent()
+            self.blockvisit(elif_.body, if_frame)
+            self.outdent()
+        if node.else_:
+            self.writeline("else:")
+            self.indent()
+            self.blockvisit(node.else_, if_frame)
+            self.outdent()
+
+    def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None:
+        macro_frame, macro_ref = self.macro_body(node, frame)
+        self.newline()
+        if frame.toplevel:
+            if not node.name.startswith("_"):
+                self.write(f"context.exported_vars.add({node.name!r})")
+            self.writeline(f"context.vars[{node.name!r}] = ")
+        self.write(f"{frame.symbols.ref(node.name)} = ")
+        self.macro_def(macro_ref, macro_frame)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None:
+        call_frame, macro_ref = self.macro_body(node, frame)
+        self.writeline("caller = ")
+        self.macro_def(macro_ref, call_frame)
+        self.start_write(frame, node)
+        self.visit_Call(node.call, frame, forward_caller=True)
+        self.end_write(frame)
+
+    def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None:
+        filter_frame = frame.inner()
+        filter_frame.symbols.analyze_node(node)
+        self.enter_frame(filter_frame)
+        self.buffer(filter_frame)
+        self.blockvisit(node.body, filter_frame)
+        self.start_write(frame, node)
+        self.visit_Filter(node.filter, filter_frame)
+        self.end_write(frame)
+        self.leave_frame(filter_frame)
+
+    def visit_With(self, node: nodes.With, frame: Frame) -> None:
+        with_frame = frame.inner()
+        with_frame.symbols.analyze_node(node)
+        self.enter_frame(with_frame)
+        for target, expr in zip(node.targets, node.values):
+            self.newline()
+            self.visit(target, with_frame)
+            self.write(" = ")
+            self.visit(expr, frame)
+        self.blockvisit(node.body, with_frame)
+        self.leave_frame(with_frame)
+
+    def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None:
+        self.newline(node)
+        self.visit(node.node, frame)

     class _FinalizeInfo(t.NamedTuple):
         const: t.Optional[t.Callable[..., str]]
         src: t.Optional[str]

     @staticmethod
-    def _default_finalize(value: t.Any) ->t.Any:
+    def _default_finalize(value: t.Any) -> t.Any:
         """The default finalize function if the environment isn't
         configured with one. Or, if the environment has one, this is
         called on that function's output for constants.
         """
-        pass
+        return str(value)
+
     _finalize: t.Optional[_FinalizeInfo] = None

-    def _make_finalize(self) ->_FinalizeInfo:
+    def _make_finalize(self) -> _FinalizeInfo:
         """Build the finalize function to be used on constants and at
         runtime. Cached so it's only created once for all output nodes.

@@ -353,17 +1389,51 @@ class CodeGenerator(NodeVisitor):
             Source code to output around nodes to be evaluated at
             runtime.
         """
-        pass
+        if self._finalize is not None:
+            return self._finalize
+
+        finalize: t.Optional[t.Callable[..., t.Any]]
+        finalize = default = self._default_finalize
+        src = None
+
+        if self.environment.finalize:
+            src = "environment.finalize("
+            env_finalize = self.environment.finalize
+            pass_arg = {
+                _PassArg.context: "context",
+                _PassArg.eval_context: "context.eval_ctx",
+                _PassArg.environment: "environment",
+            }.get(
+                _PassArg.from_obj(env_finalize)  # type: ignore
+            )
+            finalize = None
+
+            if pass_arg is None:
+
+                def finalize(value: t.Any) -> t.Any:  # noqa: F811
+                    return default(env_finalize(value))
+
+            else:
+                src = f"{src}{pass_arg}, "

-    def _output_const_repr(self, group: t.Iterable[t.Any]) ->str:
+                if pass_arg == "environment":
+
+                    def finalize(value: t.Any) -> t.Any:  # noqa: F811
+                        return default(env_finalize(self.environment, value))
+
+        self._finalize = self._FinalizeInfo(finalize, src)
+        return self._finalize
+
+    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
         """Given a group of constant values converted from ``Output``
         child nodes, produce a string to write to the template module
         source.
         """
-        pass
+        return repr(concat(group))

-    def _output_child_to_const(self, node: nodes.Expr, frame: Frame,
-        finalize: _FinalizeInfo) ->str:
+    def _output_child_to_const(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> str:
         """Try to optimize a child of an ``Output`` node by trying to
         convert it to constant, finalized data at compile time.

@@ -371,30 +1441,520 @@ class CodeGenerator(NodeVisitor):
         will be evaluated at runtime. Any other exception will also be
         evaluated at runtime for easier debugging.
         """
-        pass
+        const = node.as_const(frame.eval_ctx)

-    def _output_child_pre(self, node: nodes.Expr, frame: Frame, finalize:
-        _FinalizeInfo) ->None:
+        if frame.eval_ctx.autoescape:
+            const = escape(const)
+
+        # Template data doesn't go through finalize.
+        if isinstance(node, nodes.TemplateData):
+            return str(const)
+
+        return finalize.const(const)  # type: ignore
+
+    def _output_child_pre(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
         """Output extra source code before visiting a child of an
         ``Output`` node.
         """
-        pass
+        if frame.eval_ctx.volatile:
+            self.write("(escape if context.eval_ctx.autoescape else str)(")
+        elif frame.eval_ctx.autoescape:
+            self.write("escape(")
+        else:
+            self.write("str(")
+
+        if finalize.src is not None:
+            self.write(finalize.src)

-    def _output_child_post(self, node: nodes.Expr, frame: Frame, finalize:
-        _FinalizeInfo) ->None:
+    def _output_child_post(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
         """Output extra source code after visiting a child of an
         ``Output`` node.
         """
-        pass
-    visit_Add = _make_binop('+')
-    visit_Sub = _make_binop('-')
-    visit_Mul = _make_binop('*')
-    visit_Div = _make_binop('/')
-    visit_FloorDiv = _make_binop('//')
-    visit_Pow = _make_binop('**')
-    visit_Mod = _make_binop('%')
-    visit_And = _make_binop('and')
-    visit_Or = _make_binop('or')
-    visit_Pos = _make_unop('+')
-    visit_Neg = _make_unop('-')
-    visit_Not = _make_unop('not ')
+        self.write(")")
+
+        if finalize.src is not None:
+            self.write(")")
+
+    def visit_Output(self, node: nodes.Output, frame: Frame) -> None:
+        # If an extends is active, don't render outside a block.
+        if frame.require_output_check:
+            # A top-level extends is known to exist at compile time.
+            if self.has_known_extends:
+                return
+
+            self.writeline("if parent_template is None:")
+            self.indent()
+
+        finalize = self._make_finalize()
+        body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = []
+
+        # Evaluate constants at compile time if possible. Each item in
+        # body will be either a list of static data or a node to be
+        # evaluated at runtime.
+        for child in node.nodes:
+            try:
+                if not (
+                    # If the finalize function requires runtime context,
+                    # constants can't be evaluated at compile time.
+                    finalize.const
+                    # Unless it's basic template data that won't be
+                    # finalized anyway.
+                    or isinstance(child, nodes.TemplateData)
+                ):
+                    raise nodes.Impossible()
+
+                const = self._output_child_to_const(child, frame, finalize)
+            except (nodes.Impossible, Exception):
+                # The node was not constant and needs to be evaluated at
+                # runtime. Or another error was raised, which is easier
+                # to debug at runtime.
+                body.append(child)
+                continue
+
+            if body and isinstance(body[-1], list):
+                body[-1].append(const)
+            else:
+                body.append([const])
+
+        if frame.buffer is not None:
+            if len(body) == 1:
+                self.writeline(f"{frame.buffer}.append(")
+            else:
+                self.writeline(f"{frame.buffer}.extend((")
+
+            self.indent()
+
+        for item in body:
+            if isinstance(item, list):
+                # A group of constant data to join and output.
+                val = self._output_const_repr(item)
+
+                if frame.buffer is None:
+                    self.writeline("yield " + val)
+                else:
+                    self.writeline(val + ",")
+            else:
+                if frame.buffer is None:
+                    self.writeline("yield ", item)
+                else:
+                    self.newline(item)
+
+                # A node to be evaluated at runtime.
+                self._output_child_pre(item, frame, finalize)
+                self.visit(item, frame)
+                self._output_child_post(item, frame, finalize)
+
+                if frame.buffer is not None:
+                    self.write(",")
+
+        if frame.buffer is not None:
+            self.outdent()
+            self.writeline(")" if len(body) == 1 else "))")
+
+        if frame.require_output_check:
+            self.outdent()
+
+    def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None:
+        self.push_assign_tracking()
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = ")
+        self.visit(node.node, frame)
+        self.pop_assign_tracking(frame)
+
+    def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None:
+        self.push_assign_tracking()
+        block_frame = frame.inner()
+        # This is a special case.  Since a set block always captures we
+        # will disable output checks.  This way one can use set blocks
+        # toplevel even in extended templates.
+        block_frame.require_output_check = False
+        block_frame.symbols.analyze_node(node)
+        self.enter_frame(block_frame)
+        self.buffer(block_frame)
+        self.blockvisit(node.body, block_frame)
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
+        if node.filter is not None:
+            self.visit_Filter(node.filter, block_frame)
+        else:
+            self.write(f"concat({block_frame.buffer})")
+        self.write(")")
+        self.pop_assign_tracking(frame)
+        self.leave_frame(block_frame)
+
+    # -- Expression Visitors
+
+    def visit_Name(self, node: nodes.Name, frame: Frame) -> None:
+        if node.ctx == "store" and (
+            frame.toplevel or frame.loop_frame or frame.block_frame
+        ):
+            if self._assign_stack:
+                self._assign_stack[-1].add(node.name)
+        ref = frame.symbols.ref(node.name)
+
+        # If we are looking up a variable we might have to deal with the
+        # case where it's undefined.  We can skip that case if the load
+        # instruction indicates a parameter which are always defined.
+        if node.ctx == "load":
+            load = frame.symbols.find_load(ref)
+            if not (
+                load is not None
+                and load[0] == VAR_LOAD_PARAMETER
+                and not self.parameter_is_undeclared(ref)
+            ):
+                self.write(
+                    f"(undefined(name={node.name!r}) if {ref} is missing else {ref})"
+                )
+                return
+
+        self.write(ref)
+
+    def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None:
+        # NSRefs can only be used to store values; since they use the normal
+        # `foo.bar` notation they will be parsed as a normal attribute access
+        # when used anywhere but in a `set` context
+        ref = frame.symbols.ref(node.name)
+        self.writeline(f"if not isinstance({ref}, Namespace):")
+        self.indent()
+        self.writeline(
+            "raise TemplateRuntimeError"
+            '("cannot assign attribute on non-namespace object")'
+        )
+        self.outdent()
+        self.writeline(f"{ref}[{node.attr!r}]")
+
+    def visit_Const(self, node: nodes.Const, frame: Frame) -> None:
+        val = node.as_const(frame.eval_ctx)
+        if isinstance(val, float):
+            self.write(str(val))
+        else:
+            self.write(repr(val))
+
+    def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None:
+        try:
+            self.write(repr(node.as_const(frame.eval_ctx)))
+        except nodes.Impossible:
+            self.write(
+                f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})"
+            )
+
+    def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None:
+        self.write("(")
+        idx = -1
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write(",)" if idx == 0 else ")")
+
+    def visit_List(self, node: nodes.List, frame: Frame) -> None:
+        self.write("[")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write("]")
+
+    def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None:
+        self.write("{")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item.key, frame)
+            self.write(": ")
+            self.visit(item.value, frame)
+        self.write("}")
+
+    visit_Add = _make_binop("+")
+    visit_Sub = _make_binop("-")
+    visit_Mul = _make_binop("*")
+    visit_Div = _make_binop("/")
+    visit_FloorDiv = _make_binop("//")
+    visit_Pow = _make_binop("**")
+    visit_Mod = _make_binop("%")
+    visit_And = _make_binop("and")
+    visit_Or = _make_binop("or")
+    visit_Pos = _make_unop("+")
+    visit_Neg = _make_unop("-")
+    visit_Not = _make_unop("not ")
+
+    @optimizeconst
+    def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None:
+        if frame.eval_ctx.volatile:
+            func_name = "(markup_join if context.eval_ctx.volatile else str_join)"
+        elif frame.eval_ctx.autoescape:
+            func_name = "markup_join"
+        else:
+            func_name = "str_join"
+        self.write(f"{func_name}((")
+        for arg in node.nodes:
+            self.visit(arg, frame)
+            self.write(", ")
+        self.write("))")
+
+    @optimizeconst
+    def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None:
+        self.write("(")
+        self.visit(node.expr, frame)
+        for op in node.ops:
+            self.visit(op, frame)
+        self.write(")")
+
+    def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None:
+        self.write(f" {operators[node.op]} ")
+        self.visit(node.expr, frame)
+
+    @optimizeconst
+    def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        self.write("environment.getattr(")
+        self.visit(node.node, frame)
+        self.write(f", {node.attr!r})")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None:
+        # slices bypass the environment getitem method.
+        if isinstance(node.arg, nodes.Slice):
+            self.visit(node.node, frame)
+            self.write("[")
+            self.visit(node.arg, frame)
+            self.write("]")
+        else:
+            if self.environment.is_async:
+                self.write("(await auto_await(")
+
+            self.write("environment.getitem(")
+            self.visit(node.node, frame)
+            self.write(", ")
+            self.visit(node.arg, frame)
+            self.write(")")
+
+            if self.environment.is_async:
+                self.write("))")
+
+    def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None:
+        if node.start is not None:
+            self.visit(node.start, frame)
+        self.write(":")
+        if node.stop is not None:
+            self.visit(node.stop, frame)
+        if node.step is not None:
+            self.write(":")
+            self.visit(node.step, frame)
+
+    @contextmanager
+    def _filter_test_common(
+        self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool
+    ) -> t.Iterator[None]:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        if is_filter:
+            self.write(f"{self.filters[node.name]}(")
+            func = self.environment.filters.get(node.name)
+        else:
+            self.write(f"{self.tests[node.name]}(")
+            func = self.environment.tests.get(node.name)
+
+        # When inside an If or CondExpr frame, allow the filter to be
+        # undefined at compile time and only raise an error if it's
+        # actually called at runtime. See pull_dependencies.
+        if func is None and not frame.soft_frame:
+            type_name = "filter" if is_filter else "test"
+            self.fail(f"No {type_name} named {node.name!r}.", node.lineno)
+
+        pass_arg = {
+            _PassArg.context: "context",
+            _PassArg.eval_context: "context.eval_ctx",
+            _PassArg.environment: "environment",
+        }.get(
+            _PassArg.from_obj(func)  # type: ignore
+        )
+
+        if pass_arg is not None:
+            self.write(f"{pass_arg}, ")
+
+        # Back to the visitor function to handle visiting the target of
+        # the filter or test.
+        yield
+
+        self.signature(node, frame)
+        self.write(")")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, True):
+            # if the filter node is None we are inside a filter block
+            # and want to write to the current buffer
+            if node.node is not None:
+                self.visit(node.node, frame)
+            elif frame.eval_ctx.volatile:
+                self.write(
+                    f"(Markup(concat({frame.buffer}))"
+                    f" if context.eval_ctx.autoescape else concat({frame.buffer}))"
+                )
+            elif frame.eval_ctx.autoescape:
+                self.write(f"Markup(concat({frame.buffer}))")
+            else:
+                self.write(f"concat({frame.buffer})")
+
+    @optimizeconst
+    def visit_Test(self, node: nodes.Test, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, False):
+            self.visit(node.node, frame)
+
+    @optimizeconst
+    def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None:
+        frame = frame.soft()
+
+        def write_expr2() -> None:
+            if node.expr2 is not None:
+                self.visit(node.expr2, frame)
+                return
+
+            self.write(
+                f'cond_expr_undefined("the inline if-expression on'
+                f" {self.position(node)} evaluated to false and no else"
+                f' section was defined.")'
+            )
+
+        self.write("(")
+        self.visit(node.expr1, frame)
+        self.write(" if ")
+        self.visit(node.test, frame)
+        self.write(" else ")
+        write_expr2()
+        self.write(")")
+
+    @optimizeconst
+    def visit_Call(
+        self, node: nodes.Call, frame: Frame, forward_caller: bool = False
+    ) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+        if self.environment.sandboxed:
+            self.write("environment.call(context, ")
+        else:
+            self.write("context.call(")
+        self.visit(node.node, frame)
+        extra_kwargs = {"caller": "caller"} if forward_caller else None
+        loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {}
+        block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {}
+        if extra_kwargs:
+            extra_kwargs.update(loop_kwargs, **block_kwargs)
+        elif loop_kwargs or block_kwargs:
+            extra_kwargs = dict(loop_kwargs, **block_kwargs)
+        self.signature(node, frame, extra_kwargs)
+        self.write(")")
+        if self.environment.is_async:
+            self.write("))")
+
+    def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None:
+        self.write(node.key + "=")
+        self.visit(node.value, frame)
+
+    # -- Unused nodes for extensions
+
+    def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None:
+        self.write("Markup(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_MarkSafeIfAutoescape(
+        self, node: nodes.MarkSafeIfAutoescape, frame: Frame
+    ) -> None:
+        self.write("(Markup if context.eval_ctx.autoescape else identity)(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_EnvironmentAttribute(
+        self, node: nodes.EnvironmentAttribute, frame: Frame
+    ) -> None:
+        self.write("environment." + node.name)
+
+    def visit_ExtensionAttribute(
+        self, node: nodes.ExtensionAttribute, frame: Frame
+    ) -> None:
+        self.write(f"environment.extensions[{node.identifier!r}].{node.name}")
+
+    def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None:
+        self.write(self.import_aliases[node.importname])
+
+    def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None:
+        self.write(node.name)
+
+    def visit_ContextReference(
+        self, node: nodes.ContextReference, frame: Frame
+    ) -> None:
+        self.write("context")
+
+    def visit_DerivedContextReference(
+        self, node: nodes.DerivedContextReference, frame: Frame
+    ) -> None:
+        self.write(self.derive_context(frame))
+
+    def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None:
+        self.writeline("continue", node)
+
+    def visit_Break(self, node: nodes.Break, frame: Frame) -> None:
+        self.writeline("break", node)
+
+    def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None:
+        scope_frame = frame.inner()
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None:
+        ctx = self.temporary_identifier()
+        self.writeline(f"{ctx} = {self.derive_context(frame)}")
+        self.writeline(f"{ctx}.vars = ")
+        self.visit(node.context, frame)
+        self.push_context_reference(ctx)
+
+        scope_frame = frame.inner(isolated=True)
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+        self.pop_context_reference()
+
+    def visit_EvalContextModifier(
+        self, node: nodes.EvalContextModifier, frame: Frame
+    ) -> None:
+        for keyword in node.options:
+            self.writeline(f"context.eval_ctx.{keyword.key} = ")
+            self.visit(keyword.value, frame)
+            try:
+                val = keyword.value.as_const(frame.eval_ctx)
+            except nodes.Impossible:
+                frame.eval_ctx.volatile = True
+            else:
+                setattr(frame.eval_ctx, keyword.key, val)
+
+    def visit_ScopedEvalContextModifier(
+        self, node: nodes.ScopedEvalContextModifier, frame: Frame
+    ) -> None:
+        old_ctx_name = self.temporary_identifier()
+        saved_ctx = frame.eval_ctx.save()
+        self.writeline(f"{old_ctx_name} = context.eval_ctx.save()")
+        self.visit_EvalContextModifier(node, frame)
+        for child in node.body:
+            self.visit(child, frame)
+        frame.eval_ctx.revert(saved_ctx)
+        self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
diff --git a/src/jinja2/constants.py b/src/jinja2/constants.py
index e3262f1..41a1c23 100644
--- a/src/jinja2/constants.py
+++ b/src/jinja2/constants.py
@@ -1,4 +1,6 @@
-LOREM_IPSUM_WORDS = """a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+#: list of lorem ipsum words used by the lipsum() helper function
+LOREM_IPSUM_WORDS = """\
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
 auctor augue bibendum blandit class commodo condimentum congue consectetuer
 consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
 diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
diff --git a/src/jinja2/debug.py b/src/jinja2/debug.py
index 412f2c2..7ed7e92 100644
--- a/src/jinja2/debug.py
+++ b/src/jinja2/debug.py
@@ -2,14 +2,16 @@ import sys
 import typing as t
 from types import CodeType
 from types import TracebackType
+
 from .exceptions import TemplateSyntaxError
 from .utils import internal_code
 from .utils import missing
+
 if t.TYPE_CHECKING:
     from .runtime import Context


-def rewrite_traceback_stack(source: t.Optional[str]=None) ->BaseException:
+def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
     """Rewrite the current exception to replace any tracebacks from
     within compiled template code with tracebacks that look like they
     came from the template source.
@@ -20,11 +22,60 @@ def rewrite_traceback_stack(source: t.Optional[str]=None) ->BaseException:
         known.
     :return: The original exception with the rewritten traceback.
     """
-    pass
+    _, exc_value, tb = sys.exc_info()
+    exc_value = t.cast(BaseException, exc_value)
+    tb = t.cast(TracebackType, tb)
+
+    if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
+        exc_value.translated = True
+        exc_value.source = source
+        # Remove the old traceback, otherwise the frames from the
+        # compiler still show up.
+        exc_value.with_traceback(None)
+        # Outside of runtime, so the frame isn't executing template
+        # code, but it still needs to point at the template.
+        tb = fake_traceback(
+            exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
+        )
+    else:
+        # Skip the frame for the render function.
+        tb = tb.tb_next
+
+    stack = []
+
+    # Build the stack of traceback object, replacing any in template
+    # code with the source file and line information.
+    while tb is not None:
+        # Skip frames decorated with @internalcode. These are internal
+        # calls that aren't useful in template debugging output.
+        if tb.tb_frame.f_code in internal_code:
+            tb = tb.tb_next
+            continue
+
+        template = tb.tb_frame.f_globals.get("__jinja_template__")
+
+        if template is not None:
+            lineno = template.get_corresponding_lineno(tb.tb_lineno)
+            fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
+            stack.append(fake_tb)
+        else:
+            stack.append(tb)

+        tb = tb.tb_next

-def fake_traceback(exc_value: BaseException, tb: t.Optional[TracebackType],
-    filename: str, lineno: int) ->TracebackType:
+    tb_next = None
+
+    # Assign tb_next in reverse to avoid circular references.
+    for tb in reversed(stack):
+        tb.tb_next = tb_next
+        tb_next = tb
+
+    return exc_value.with_traceback(tb_next)
+
+
+def fake_traceback(  # type: ignore
+    exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
+) -> TracebackType:
     """Produce a new traceback object that looks like it came from the
     template source instead of the compiled code. The filename, line
     number, and location name will point to the template, and the local
@@ -37,12 +88,104 @@ def fake_traceback(exc_value: BaseException, tb: t.Optional[TracebackType],
     :param filename: The template filename.
     :param lineno: The line number in the template source.
     """
-    pass
+    if tb is not None:
+        # Replace the real locals with the context that would be
+        # available at that point in the template.
+        locals = get_template_locals(tb.tb_frame.f_locals)
+        locals.pop("__jinja_exception__", None)
+    else:
+        locals = {}
+
+    globals = {
+        "__name__": filename,
+        "__file__": filename,
+        "__jinja_exception__": exc_value,
+    }
+    # Raise an exception at the correct line number.
+    code: CodeType = compile(
+        "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
+    )
+
+    # Build a new code object that points to the template file and
+    # replaces the location with a block name.
+    location = "template"

+    if tb is not None:
+        function = tb.tb_frame.f_code.co_name

-def get_template_locals(real_locals: t.Mapping[str, t.Any]) ->t.Dict[str, t.Any
-    ]:
+        if function == "root":
+            location = "top-level template code"
+        elif function.startswith("block_"):
+            location = f"block {function[6:]!r}"
+
+    if sys.version_info >= (3, 8):
+        code = code.replace(co_name=location)
+    else:
+        code = CodeType(
+            code.co_argcount,
+            code.co_kwonlyargcount,
+            code.co_nlocals,
+            code.co_stacksize,
+            code.co_flags,
+            code.co_code,
+            code.co_consts,
+            code.co_names,
+            code.co_varnames,
+            code.co_filename,
+            location,
+            code.co_firstlineno,
+            code.co_lnotab,
+            code.co_freevars,
+            code.co_cellvars,
+        )
+
+    # Execute the new code, which is guaranteed to raise, and return
+    # the new traceback without this frame.
+    try:
+        exec(code, globals, locals)
+    except BaseException:
+        return sys.exc_info()[2].tb_next  # type: ignore
+
+
+def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
     """Based on the runtime locals, get the context that would be
     available at that point in the template.
     """
-    pass
+    # Start with the current template context.
+    ctx: "t.Optional[Context]" = real_locals.get("context")
+
+    if ctx is not None:
+        data: t.Dict[str, t.Any] = ctx.get_all().copy()
+    else:
+        data = {}
+
+    # Might be in a derived context that only sets local variables
+    # rather than pushing a context. Local variables follow the scheme
+    # l_depth_name. Find the highest-depth local that has a value for
+    # each name.
+    local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
+
+    for name, value in real_locals.items():
+        if not name.startswith("l_") or value is missing:
+            # Not a template variable, or no longer relevant.
+            continue
+
+        try:
+            _, depth_str, name = name.split("_", 2)
+            depth = int(depth_str)
+        except ValueError:
+            continue
+
+        cur_depth = local_overrides.get(name, (-1,))[0]
+
+        if cur_depth < depth:
+            local_overrides[name] = (depth, value)
+
+    # Modify the context with any derived context.
+    for name, (_, value) in local_overrides.items():
+        if value is missing:
+            data.pop(name, None)
+        else:
+            data[name] = value
+
+    return data
diff --git a/src/jinja2/defaults.py b/src/jinja2/defaults.py
index 07ecd67..638cad3 100644
--- a/src/jinja2/defaults.py
+++ b/src/jinja2/defaults.py
@@ -1,28 +1,48 @@
 import typing as t
-from .filters import FILTERS as DEFAULT_FILTERS
-from .tests import TESTS as DEFAULT_TESTS
+
+from .filters import FILTERS as DEFAULT_FILTERS  # noqa: F401
+from .tests import TESTS as DEFAULT_TESTS  # noqa: F401
 from .utils import Cycler
 from .utils import generate_lorem_ipsum
 from .utils import Joiner
 from .utils import Namespace
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
-BLOCK_START_STRING = '{%'
-BLOCK_END_STRING = '%}'
-VARIABLE_START_STRING = '{{'
-VARIABLE_END_STRING = '}}'
-COMMENT_START_STRING = '{#'
-COMMENT_END_STRING = '#}'
+
+# defaults for the parser / lexer
+BLOCK_START_STRING = "{%"
+BLOCK_END_STRING = "%}"
+VARIABLE_START_STRING = "{{"
+VARIABLE_END_STRING = "}}"
+COMMENT_START_STRING = "{#"
+COMMENT_END_STRING = "#}"
 LINE_STATEMENT_PREFIX: t.Optional[str] = None
 LINE_COMMENT_PREFIX: t.Optional[str] = None
 TRIM_BLOCKS = False
 LSTRIP_BLOCKS = False
-NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = '\n'
+NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
 KEEP_TRAILING_NEWLINE = False
-DEFAULT_NAMESPACE = {'range': range, 'dict': dict, 'lipsum':
-    generate_lorem_ipsum, 'cycler': Cycler, 'joiner': Joiner, 'namespace':
-    Namespace}
-DEFAULT_POLICIES: t.Dict[str, t.Any] = {'compiler.ascii_str': True,
-    'urlize.rel': 'noopener', 'urlize.target': None, 'urlize.extra_schemes':
-    None, 'truncate.leeway': 5, 'json.dumps_function': None,
-    'json.dumps_kwargs': {'sort_keys': True}, 'ext.i18n.trimmed': False}
+
+# default filters, tests and namespace
+
+DEFAULT_NAMESPACE = {
+    "range": range,
+    "dict": dict,
+    "lipsum": generate_lorem_ipsum,
+    "cycler": Cycler,
+    "joiner": Joiner,
+    "namespace": Namespace,
+}
+
+# default policies
+DEFAULT_POLICIES: t.Dict[str, t.Any] = {
+    "compiler.ascii_str": True,
+    "urlize.rel": "noopener",
+    "urlize.target": None,
+    "urlize.extra_schemes": None,
+    "truncate.leeway": 5,
+    "json.dumps_function": None,
+    "json.dumps_kwargs": {"sort_keys": True},
+    "ext.i18n.trimmed": False,
+}
diff --git a/src/jinja2/environment.py b/src/jinja2/environment.py
index aae9f98..1d3be0b 100644
--- a/src/jinja2/environment.py
+++ b/src/jinja2/environment.py
@@ -1,6 +1,7 @@
 """Classes for managing templates and their runtime and compile time
 options.
 """
+
 import os
 import typing
 import typing as t
@@ -10,7 +11,9 @@ from functools import lru_cache
 from functools import partial
 from functools import reduce
 from types import CodeType
+
 from markupsafe import Markup
+
 from . import nodes
 from .compiler import CodeGenerator
 from .compiler import generate
@@ -18,10 +21,10 @@ from .defaults import BLOCK_END_STRING
 from .defaults import BLOCK_START_STRING
 from .defaults import COMMENT_END_STRING
 from .defaults import COMMENT_START_STRING
-from .defaults import DEFAULT_FILTERS
+from .defaults import DEFAULT_FILTERS  # type: ignore[attr-defined]
 from .defaults import DEFAULT_NAMESPACE
 from .defaults import DEFAULT_POLICIES
-from .defaults import DEFAULT_TESTS
+from .defaults import DEFAULT_TESTS  # type: ignore[attr-defined]
 from .defaults import KEEP_TRAILING_NEWLINE
 from .defaults import LINE_COMMENT_PREFIX
 from .defaults import LINE_STATEMENT_PREFIX
@@ -50,17 +53,20 @@ from .utils import import_string
 from .utils import internalcode
 from .utils import LRUCache
 from .utils import missing
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .bccache import BytecodeCache
     from .ext import Extension
     from .loaders import BaseLoader
-_env_bound = t.TypeVar('_env_bound', bound='Environment')
+
+_env_bound = t.TypeVar("_env_bound", bound="Environment")


+# for direct template usage we have up to ten living environments
 @lru_cache(maxsize=10)
-def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any
-    ) ->_env_bound:
+def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound:
     """Return a new spontaneous environment. A spontaneous environment
     is used for templates created directly rather than through an
     existing environment.
@@ -68,36 +74,75 @@ def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any
     :param cls: Environment class to create.
     :param args: Positional arguments passed to environment.
     """
-    pass
+    env = cls(*args)
+    env.shared = True
+    return env


-def create_cache(size: int) ->t.Optional[t.MutableMapping[t.Tuple[
-    'weakref.ref[t.Any]', str], 'Template']]:
+def create_cache(
+    size: int,
+) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]:
     """Return the cache class for the given size."""
-    pass
+    if size == 0:
+        return None

+    if size < 0:
+        return {}

-def copy_cache(cache: t.Optional[t.MutableMapping[t.Any, t.Any]]) ->t.Optional[
-    t.MutableMapping[t.Tuple['weakref.ref[t.Any]', str], 'Template']]:
+    return LRUCache(size)  # type: ignore
+
+
+def copy_cache(
+    cache: t.Optional[t.MutableMapping[t.Any, t.Any]],
+) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]:
     """Create an empty copy of the given cache."""
-    pass
+    if cache is None:
+        return None
+
+    if type(cache) is dict:  # noqa E721
+        return {}

+    return LRUCache(cache.capacity)  # type: ignore

-def load_extensions(environment: 'Environment', extensions: t.Sequence[t.
-    Union[str, t.Type['Extension']]]) ->t.Dict[str, 'Extension']:
+
+def load_extensions(
+    environment: "Environment",
+    extensions: t.Sequence[t.Union[str, t.Type["Extension"]]],
+) -> t.Dict[str, "Extension"]:
     """Load the extensions from the list and bind it to the environment.
     Returns a dict of instantiated extensions.
     """
-    pass
+    result = {}
+
+    for extension in extensions:
+        if isinstance(extension, str):
+            extension = t.cast(t.Type["Extension"], import_string(extension))
+
+        result[extension.identifier] = extension(environment)

+    return result

-def _environment_config_check(environment: 'Environment') ->'Environment':
+
+def _environment_config_check(environment: "Environment") -> "Environment":
     """Perform a sanity check on the environment."""
-    pass
+    assert issubclass(
+        environment.undefined, Undefined
+    ), "'undefined' must be a subclass of 'jinja2.Undefined'."
+    assert (
+        environment.block_start_string
+        != environment.variable_start_string
+        != environment.comment_start_string
+    ), "block, variable and comment start strings must be different."
+    assert environment.newline_sequence in {
+        "\r",
+        "\r\n",
+        "\n",
+    }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'."
+    return environment


 class Environment:
-    """The core component of Jinja is the `Environment`.  It contains
+    r"""The core component of Jinja is the `Environment`.  It contains
     important shared variables like configuration, filters, tests,
     globals and others.  Instances of this class may be modified if
     they are not shared and if no template was loaded so far.
@@ -145,8 +190,8 @@ class Environment:
             from the start of a line to a block.  Defaults to `False`.

         `newline_sequence`
-            The sequence that starts a newline.  Must be one of ``'\\r'``,
-            ``'\\n'`` or ``'\\r\\n'``.  The default is ``'\\n'`` which is a
+            The sequence that starts a newline.  Must be one of ``'\r'``,
+            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a
             useful default for Linux and OS X systems as well as web
             applications.

@@ -217,31 +262,72 @@ class Environment:
             If set to true this enables async template execution which
             allows using async functions and generators.
     """
+
+    #: if this environment is sandboxed.  Modifying this variable won't make
+    #: the environment sandboxed though.  For a real sandboxed environment
+    #: have a look at jinja2.sandbox.  This flag alone controls the code
+    #: generation by the compiler.
     sandboxed = False
+
+    #: True if the environment is just an overlay
     overlayed = False
-    linked_to: t.Optional['Environment'] = None
+
+    #: the environment this environment is linked to if it is an overlay
+    linked_to: t.Optional["Environment"] = None
+
+    #: shared environments have this set to `True`.  A shared environment
+    #: must not be modified
     shared = False
-    code_generator_class: t.Type['CodeGenerator'] = CodeGenerator
-    concat = ''.join
+
+    #: the class that is used for code generation.  See
+    #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+    code_generator_class: t.Type["CodeGenerator"] = CodeGenerator
+
+    concat = "".join
+
+    #: the context class that is used for templates.  See
+    #: :class:`~jinja2.runtime.Context` for more information.
     context_class: t.Type[Context] = Context
-    template_class: t.Type['Template']
-
-    def __init__(self, block_start_string: str=BLOCK_START_STRING,
-        block_end_string: str=BLOCK_END_STRING, variable_start_string: str=
-        VARIABLE_START_STRING, variable_end_string: str=VARIABLE_END_STRING,
-        comment_start_string: str=COMMENT_START_STRING, comment_end_string:
-        str=COMMENT_END_STRING, line_statement_prefix: t.Optional[str]=
-        LINE_STATEMENT_PREFIX, line_comment_prefix: t.Optional[str]=
-        LINE_COMMENT_PREFIX, trim_blocks: bool=TRIM_BLOCKS, lstrip_blocks:
-        bool=LSTRIP_BLOCKS, newline_sequence:
-        "te.Literal['\\n', '\\r\\n', '\\r']"=NEWLINE_SEQUENCE,
-        keep_trailing_newline: bool=KEEP_TRAILING_NEWLINE, extensions: t.
-        Sequence[t.Union[str, t.Type['Extension']]]=(), optimized: bool=
-        True, undefined: t.Type[Undefined]=Undefined, finalize: t.Optional[
-        t.Callable[..., t.Any]]=None, autoescape: t.Union[bool, t.Callable[
-        [t.Optional[str]], bool]]=False, loader: t.Optional['BaseLoader']=
-        None, cache_size: int=400, auto_reload: bool=True, bytecode_cache:
-        t.Optional['BytecodeCache']=None, enable_async: bool=False):
+
+    template_class: t.Type["Template"]
+
+    def __init__(
+        self,
+        block_start_string: str = BLOCK_START_STRING,
+        block_end_string: str = BLOCK_END_STRING,
+        variable_start_string: str = VARIABLE_START_STRING,
+        variable_end_string: str = VARIABLE_END_STRING,
+        comment_start_string: str = COMMENT_START_STRING,
+        comment_end_string: str = COMMENT_END_STRING,
+        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
+        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+        trim_blocks: bool = TRIM_BLOCKS,
+        lstrip_blocks: bool = LSTRIP_BLOCKS,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
+        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (),
+        optimized: bool = True,
+        undefined: t.Type[Undefined] = Undefined,
+        finalize: t.Optional[t.Callable[..., t.Any]] = None,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+        loader: t.Optional["BaseLoader"] = None,
+        cache_size: int = 400,
+        auto_reload: bool = True,
+        bytecode_cache: t.Optional["BytecodeCache"] = None,
+        enable_async: bool = False,
+    ):
+        # !!Important notice!!
+        #   The constructor accepts quite a few arguments that should be
+        #   passed by keyword rather than position.  However it's important to
+        #   not change the order of arguments because it's used at least
+        #   internally in those cases:
+        #       -   spontaneous environments (i18n extension and Template)
+        #       -   unittests
+        #   If parameter changes are required only add parameters at the end
+        #   and don't change the arguments (or the defaults!) of the arguments
+        #   existing already.
+
+        # lexer / parser information
         self.block_start_string = block_start_string
         self.block_end_string = block_end_string
         self.variable_start_string = variable_start_string
@@ -254,52 +340,74 @@ class Environment:
         self.lstrip_blocks = lstrip_blocks
         self.newline_sequence = newline_sequence
         self.keep_trailing_newline = keep_trailing_newline
+
+        # runtime information
         self.undefined: t.Type[Undefined] = undefined
         self.optimized = optimized
         self.finalize = finalize
         self.autoescape = autoescape
+
+        # defaults
         self.filters = DEFAULT_FILTERS.copy()
         self.tests = DEFAULT_TESTS.copy()
         self.globals = DEFAULT_NAMESPACE.copy()
+
+        # set the loader provided
         self.loader = loader
         self.cache = create_cache(cache_size)
         self.bytecode_cache = bytecode_cache
         self.auto_reload = auto_reload
+
+        # configurable policies
         self.policies = DEFAULT_POLICIES.copy()
+
+        # load extensions
         self.extensions = load_extensions(self, extensions)
+
         self.is_async = enable_async
         _environment_config_check(self)

-    def add_extension(self, extension: t.Union[str, t.Type['Extension']]
-        ) ->None:
+    def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None:
         """Adds an extension after the environment was created.

         .. versionadded:: 2.5
         """
-        pass
+        self.extensions.update(load_extensions(self, [extension]))

-    def extend(self, **attributes: t.Any) ->None:
+    def extend(self, **attributes: t.Any) -> None:
         """Add the items to the instance of the environment if they do not exist
         yet.  This is used by :ref:`extensions <writing-extensions>` to register
         callbacks and configuration values without breaking inheritance.
         """
-        pass
-
-    def overlay(self, block_start_string: str=missing, block_end_string:
-        str=missing, variable_start_string: str=missing,
-        variable_end_string: str=missing, comment_start_string: str=missing,
-        comment_end_string: str=missing, line_statement_prefix: t.Optional[
-        str]=missing, line_comment_prefix: t.Optional[str]=missing,
-        trim_blocks: bool=missing, lstrip_blocks: bool=missing,
-        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']"=missing,
-        keep_trailing_newline: bool=missing, extensions: t.Sequence[t.Union
-        [str, t.Type['Extension']]]=missing, optimized: bool=missing,
-        undefined: t.Type[Undefined]=missing, finalize: t.Optional[t.
-        Callable[..., t.Any]]=missing, autoescape: t.Union[bool, t.Callable
-        [[t.Optional[str]], bool]]=missing, loader: t.Optional['BaseLoader'
-        ]=missing, cache_size: int=missing, auto_reload: bool=missing,
-        bytecode_cache: t.Optional['BytecodeCache']=missing, enable_async:
-        bool=False) ->'Environment':
+        for key, value in attributes.items():
+            if not hasattr(self, key):
+                setattr(self, key, value)
+
+    def overlay(
+        self,
+        block_start_string: str = missing,
+        block_end_string: str = missing,
+        variable_start_string: str = missing,
+        variable_end_string: str = missing,
+        comment_start_string: str = missing,
+        comment_end_string: str = missing,
+        line_statement_prefix: t.Optional[str] = missing,
+        line_comment_prefix: t.Optional[str] = missing,
+        trim_blocks: bool = missing,
+        lstrip_blocks: bool = missing,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing,
+        keep_trailing_newline: bool = missing,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing,
+        optimized: bool = missing,
+        undefined: t.Type[Undefined] = missing,
+        finalize: t.Optional[t.Callable[..., t.Any]] = missing,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing,
+        loader: t.Optional["BaseLoader"] = missing,
+        cache_size: int = missing,
+        auto_reload: bool = missing,
+        bytecode_cache: t.Optional["BytecodeCache"] = missing,
+        enable_async: bool = False,
+    ) -> "Environment":
         """Create a new overlay environment that shares all the data with the
         current environment except for cache and the overridden attributes.
         Extensions cannot be removed for an overlayed environment.  An overlayed
@@ -315,32 +423,138 @@ class Environment:
             Added the ``newline_sequence``,, ``keep_trailing_newline``,
             and ``enable_async`` parameters to match ``__init__``.
         """
-        pass
+        args = dict(locals())
+        del args["self"], args["cache_size"], args["extensions"], args["enable_async"]
+
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.overlayed = True
+        rv.linked_to = self
+
+        for key, value in args.items():
+            if value is not missing:
+                setattr(rv, key, value)
+
+        if cache_size is not missing:
+            rv.cache = create_cache(cache_size)
+        else:
+            rv.cache = copy_cache(self.cache)
+
+        rv.extensions = {}
+        for key, value in self.extensions.items():
+            rv.extensions[key] = value.bind(rv)
+        if extensions is not missing:
+            rv.extensions.update(load_extensions(rv, extensions))
+
+        if enable_async is not missing:
+            rv.is_async = enable_async
+
+        return _environment_config_check(rv)

     @property
-    def lexer(self) ->Lexer:
+    def lexer(self) -> Lexer:
         """The lexer for this environment."""
-        pass
+        return get_lexer(self)

-    def iter_extensions(self) ->t.Iterator['Extension']:
+    def iter_extensions(self) -> t.Iterator["Extension"]:
         """Iterates over the extensions by priority."""
-        pass
+        return iter(sorted(self.extensions.values(), key=lambda x: x.priority))

-    def getitem(self, obj: t.Any, argument: t.Union[str, t.Any]) ->t.Union[
-        t.Any, Undefined]:
+    def getitem(
+        self, obj: t.Any, argument: t.Union[str, t.Any]
+    ) -> t.Union[t.Any, Undefined]:
         """Get an item or attribute of an object but prefer the item."""
-        pass
-
-    def getattr(self, obj: t.Any, attribute: str) ->t.Any:
+        try:
+            return obj[argument]
+        except (AttributeError, TypeError, LookupError):
+            if isinstance(argument, str):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        return getattr(obj, attr)
+                    except AttributeError:
+                        pass
+            return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj: t.Any, attribute: str) -> t.Any:
         """Get an item or attribute of an object but prefer the attribute.
         Unlike :meth:`getitem` the attribute *must* be a string.
         """
-        pass
-
-    def call_filter(self, name: str, value: t.Any, args: t.Optional[t.
-        Sequence[t.Any]]=None, kwargs: t.Optional[t.Mapping[str, t.Any]]=
-        None, context: t.Optional[Context]=None, eval_ctx: t.Optional[
-        EvalContext]=None) ->t.Any:
+        try:
+            return getattr(obj, attribute)
+        except AttributeError:
+            pass
+        try:
+            return obj[attribute]
+        except (TypeError, LookupError, AttributeError):
+            return self.undefined(obj=obj, name=attribute)
+
+    def _filter_test_common(
+        self,
+        name: t.Union[str, Undefined],
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]],
+        kwargs: t.Optional[t.Mapping[str, t.Any]],
+        context: t.Optional[Context],
+        eval_ctx: t.Optional[EvalContext],
+        is_filter: bool,
+    ) -> t.Any:
+        if is_filter:
+            env_map = self.filters
+            type_name = "filter"
+        else:
+            env_map = self.tests
+            type_name = "test"
+
+        func = env_map.get(name)  # type: ignore
+
+        if func is None:
+            msg = f"No {type_name} named {name!r}."
+
+            if isinstance(name, Undefined):
+                try:
+                    name._fail_with_undefined_error()
+                except Exception as e:
+                    msg = f"{msg} ({e}; did you forget to quote the callable name?)"
+
+            raise TemplateRuntimeError(msg)
+
+        args = [value, *(args if args is not None else ())]
+        kwargs = kwargs if kwargs is not None else {}
+        pass_arg = _PassArg.from_obj(func)
+
+        if pass_arg is _PassArg.context:
+            if context is None:
+                raise TemplateRuntimeError(
+                    f"Attempted to invoke a context {type_name} without context."
+                )
+
+            args.insert(0, context)
+        elif pass_arg is _PassArg.eval_context:
+            if eval_ctx is None:
+                if context is not None:
+                    eval_ctx = context.eval_ctx
+                else:
+                    eval_ctx = EvalContext(self)
+
+            args.insert(0, eval_ctx)
+        elif pass_arg is _PassArg.environment:
+            args.insert(0, self)
+
+        return func(*args, **kwargs)
+
+    def call_filter(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
         """Invoke a filter on a value the same way the compiler does.

         This might return a coroutine if the filter is running from an
@@ -349,12 +563,19 @@ class Environment:

         .. versionadded:: 2.7
         """
-        pass
-
-    def call_test(self, name: str, value: t.Any, args: t.Optional[t.
-        Sequence[t.Any]]=None, kwargs: t.Optional[t.Mapping[str, t.Any]]=
-        None, context: t.Optional[Context]=None, eval_ctx: t.Optional[
-        EvalContext]=None) ->t.Any:
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, True
+        )
+
+    def call_test(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
         """Invoke a test on a value the same way the compiler does.

         This might return a coroutine if the test is running from an
@@ -367,11 +588,17 @@ class Environment:

         .. versionadded:: 2.7
         """
-        pass
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, False
+        )

     @internalcode
-    def parse(self, source: str, name: t.Optional[str]=None, filename: t.
-        Optional[str]=None) ->nodes.Template:
+    def parse(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> nodes.Template:
         """Parse the sourcecode and return the abstract syntax tree.  This
         tree of nodes is used by the compiler to convert the template into
         executable source- or bytecode.  This is useful for debugging or to
@@ -380,15 +607,23 @@ class Environment:
         If you are :ref:`developing Jinja extensions <writing-extensions>`
         this gives you a good overview of the node tree generated.
         """
-        pass
-
-    def _parse(self, source: str, name: t.Optional[str], filename: t.
-        Optional[str]) ->nodes.Template:
+        try:
+            return self._parse(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def _parse(
+        self, source: str, name: t.Optional[str], filename: t.Optional[str]
+    ) -> nodes.Template:
         """Internal parsing function used by `parse` and `compile`."""
-        pass
-
-    def lex(self, source: str, name: t.Optional[str]=None, filename: t.
-        Optional[str]=None) ->t.Iterator[t.Tuple[int, str, str]]:
+        return Parser(self, source, name, filename).parse()
+
+    def lex(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> t.Iterator[t.Tuple[int, str, str]]:
         """Lex the given sourcecode and return a generator that yields
         tokens as tuples in the form ``(lineno, token_type, value)``.
         This can be useful for :ref:`extension development <writing-extensions>`
@@ -398,44 +633,107 @@ class Environment:
         of the extensions to be applied you have to filter source through
         the :meth:`preprocess` method.
         """
-        pass
-
-    def preprocess(self, source: str, name: t.Optional[str]=None, filename:
-        t.Optional[str]=None) ->str:
+        source = str(source)
+        try:
+            return self.lexer.tokeniter(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def preprocess(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> str:
         """Preprocesses the source with all extensions.  This is automatically
         called for all parsing and compiling methods but *not* for :meth:`lex`
         because there you usually only want the actual source tokenized.
         """
-        pass
-
-    def _tokenize(self, source: str, name: t.Optional[str], filename: t.
-        Optional[str]=None, state: t.Optional[str]=None) ->TokenStream:
+        return reduce(
+            lambda s, e: e.preprocess(s, name, filename),
+            self.iter_extensions(),
+            str(source),
+        )
+
+    def _tokenize(
+        self,
+        source: str,
+        name: t.Optional[str],
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> TokenStream:
         """Called by the parser to do the preprocessing and filtering
         for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`.
         """
-        pass
+        source = self.preprocess(source, name, filename)
+        stream = self.lexer.tokenize(source, name, filename, state)

-    def _generate(self, source: nodes.Template, name: t.Optional[str],
-        filename: t.Optional[str], defer_init: bool=False) ->str:
+        for ext in self.iter_extensions():
+            stream = ext.filter_stream(stream)  # type: ignore
+
+            if not isinstance(stream, TokenStream):
+                stream = TokenStream(stream, name, filename)
+
+        return stream
+
+    def _generate(
+        self,
+        source: nodes.Template,
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        defer_init: bool = False,
+    ) -> str:
         """Internal hook that can be overridden to hook a different generate
         method in.

         .. versionadded:: 2.5
         """
-        pass
-
-    def _compile(self, source: str, filename: str) ->CodeType:
+        return generate(  # type: ignore
+            source,
+            self,
+            name,
+            filename,
+            defer_init=defer_init,
+            optimized=self.optimized,
+        )
+
+    def _compile(self, source: str, filename: str) -> CodeType:
         """Internal hook that can be overridden to hook a different compile
         method in.

         .. versionadded:: 2.5
         """
-        pass
+        return compile(source, filename, "exec")
+
+    @typing.overload
+    def compile(  # type: ignore
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[False]" = False,
+        defer_init: bool = False,
+    ) -> CodeType: ...
+
+    @typing.overload
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[True]" = ...,
+        defer_init: bool = False,
+    ) -> str: ...

     @internalcode
-    def compile(self, source: t.Union[str, nodes.Template], name: t.
-        Optional[str]=None, filename: t.Optional[str]=None, raw: bool=False,
-        defer_init: bool=False) ->t.Union[str, CodeType]:
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: bool = False,
+        defer_init: bool = False,
+    ) -> t.Union[str, CodeType]:
         """Compile a node or template source code.  The `name` parameter is
         the load name of the template after it was joined using
         :meth:`join_path` if necessary, not the filename on the file system.
@@ -455,10 +753,23 @@ class Environment:
         .. versionadded:: 2.4
            `defer_init` parameter added.
         """
-        pass
-
-    def compile_expression(self, source: str, undefined_to_none: bool=True
-        ) ->'TemplateExpression':
+        source_hint = None
+        try:
+            if isinstance(source, str):
+                source_hint = source
+                source = self._parse(source, name, filename)
+            source = self._generate(source, name, filename, defer_init=defer_init)
+            if raw:
+                return source
+            if filename is None:
+                filename = "<template>"
+            return self._compile(source, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source_hint)
+
+    def compile_expression(
+        self, source: str, undefined_to_none: bool = True
+    ) -> "TemplateExpression":
         """A handy helper method that returns a callable that accepts keyword
         arguments that appear as variables in the expression.  If called it
         returns the result of the expression.
@@ -486,13 +797,30 @@ class Environment:

         .. versionadded:: 2.1
         """
-        pass
-
-    def compile_templates(self, target: t.Union[str, 'os.PathLike[str]'],
-        extensions: t.Optional[t.Collection[str]]=None, filter_func: t.
-        Optional[t.Callable[[str], bool]]=None, zip: t.Optional[str]=
-        'deflated', log_function: t.Optional[t.Callable[[str], None]]=None,
-        ignore_errors: bool=True) ->None:
+        parser = Parser(self, source, state="variable")
+        try:
+            expr = parser.parse_expression()
+            if not parser.stream.eos:
+                raise TemplateSyntaxError(
+                    "chunk after expression", parser.stream.current.lineno, None, None
+                )
+            expr.set_environment(self)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+        body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
+        template = self.from_string(nodes.Template(body, lineno=1))
+        return TemplateExpression(template, undefined_to_none)
+
+    def compile_templates(
+        self,
+        target: t.Union[str, "os.PathLike[str]"],
+        extensions: t.Optional[t.Collection[str]] = None,
+        filter_func: t.Optional[t.Callable[[str], bool]] = None,
+        zip: t.Optional[str] = "deflated",
+        log_function: t.Optional[t.Callable[[str], None]] = None,
+        ignore_errors: bool = True,
+    ) -> None:
         """Finds all the templates the loader can find, compiles them
         and stores them in `target`.  If `zip` is `None`, instead of in a
         zipfile, the templates will be stored in a directory.
@@ -510,10 +838,66 @@ class Environment:

         .. versionadded:: 2.4
         """
-        pass
-
-    def list_templates(self, extensions: t.Optional[t.Collection[str]]=None,
-        filter_func: t.Optional[t.Callable[[str], bool]]=None) ->t.List[str]:
+        from .loaders import ModuleLoader
+
+        if log_function is None:
+
+            def log_function(x: str) -> None:
+                pass
+
+        assert log_function is not None
+        assert self.loader is not None, "No loader configured."
+
+        def write_file(filename: str, data: str) -> None:
+            if zip:
+                info = ZipInfo(filename)
+                info.external_attr = 0o755 << 16
+                zip_file.writestr(info, data)
+            else:
+                with open(os.path.join(target, filename), "wb") as f:
+                    f.write(data.encode("utf8"))
+
+        if zip is not None:
+            from zipfile import ZIP_DEFLATED
+            from zipfile import ZIP_STORED
+            from zipfile import ZipFile
+            from zipfile import ZipInfo
+
+            zip_file = ZipFile(
+                target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
+            )
+            log_function(f"Compiling into Zip archive {target!r}")
+        else:
+            if not os.path.isdir(target):
+                os.makedirs(target)
+            log_function(f"Compiling into folder {target!r}")
+
+        try:
+            for name in self.list_templates(extensions, filter_func):
+                source, filename, _ = self.loader.get_source(self, name)
+                try:
+                    code = self.compile(source, name, filename, True, True)
+                except TemplateSyntaxError as e:
+                    if not ignore_errors:
+                        raise
+                    log_function(f'Could not compile "{name}": {e}')
+                    continue
+
+                filename = ModuleLoader.get_module_filename(name)
+
+                write_file(filename, code)
+                log_function(f'Compiled "{name}" as {filename}')
+        finally:
+            if zip:
+                zip_file.close()
+
+        log_function("Finished compiling templates")
+
+    def list_templates(
+        self,
+        extensions: t.Optional[t.Collection[str]] = None,
+        filter_func: t.Optional[t.Callable[[str], bool]] = None,
+    ) -> t.List[str]:
         """Returns a list of templates for this environment.  This requires
         that the loader supports the loader's
         :meth:`~BaseLoader.list_templates` method.
@@ -529,15 +913,32 @@ class Environment:

         .. versionadded:: 2.4
         """
-        pass
+        assert self.loader is not None, "No loader configured."
+        names = self.loader.list_templates()
+
+        if extensions is not None:
+            if filter_func is not None:
+                raise TypeError(
+                    "either extensions or filter_func can be passed, but not both"
+                )
+
+            def filter_func(x: str) -> bool:
+                return "." in x and x.rsplit(".", 1)[1] in extensions
+
+        if filter_func is not None:
+            names = [name for name in names if filter_func(name)]

-    def handle_exception(self, source: t.Optional[str]=None) ->'te.NoReturn':
+        return names
+
+    def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn":
         """Exception handling helper.  This is used internally to either raise
         rewritten exceptions or return a rendered traceback for the template.
         """
-        pass
+        from .debug import rewrite_traceback_stack
+
+        raise rewrite_traceback_stack(source=source)

-    def join_path(self, template: str, parent: str) ->str:
+    def join_path(self, template: str, parent: str) -> str:
         """Join a template with the parent.  By default all the lookups are
         relative to the loader root so this method returns the `template`
         parameter unchanged, but if the paths should be relative to the
@@ -547,12 +948,40 @@ class Environment:
         Subclasses may override this method and implement template path
         joining here.
         """
-        pass
+        return template
+
+    @internalcode
+    def _load_template(
+        self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]]
+    ) -> "Template":
+        if self.loader is None:
+            raise TypeError("no loader for this environment specified")
+        cache_key = (weakref.ref(self.loader), name)
+        if self.cache is not None:
+            template = self.cache.get(cache_key)
+            if template is not None and (
+                not self.auto_reload or template.is_up_to_date
+            ):
+                # template.globals is a ChainMap, modifying it will only
+                # affect the template, not the environment globals.
+                if globals:
+                    template.globals.update(globals)
+
+                return template
+
+        template = self.loader.load(self, name, self.make_globals(globals))
+
+        if self.cache is not None:
+            self.cache[cache_key] = template
+        return template

     @internalcode
-    def get_template(self, name: t.Union[str, 'Template'], parent: t.
-        Optional[str]=None, globals: t.Optional[t.MutableMapping[str, t.Any
-        ]]=None) ->'Template':
+    def get_template(
+        self,
+        name: t.Union[str, "Template"],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
         """Load a template by name with :attr:`loader` and return a
         :class:`Template`. If the template does not exist a
         :exc:`TemplateNotFound` exception is raised.
@@ -576,12 +1005,20 @@ class Environment:
             If ``name`` is a :class:`Template` object it is returned
             unchanged.
         """
-        pass
+        if isinstance(name, Template):
+            return name
+        if parent is not None:
+            name = self.join_path(name, parent)
+
+        return self._load_template(name, globals)

     @internalcode
-    def select_template(self, names: t.Iterable[t.Union[str, 'Template']],
-        parent: t.Optional[str]=None, globals: t.Optional[t.MutableMapping[
-        str, t.Any]]=None) ->'Template':
+    def select_template(
+        self,
+        names: t.Iterable[t.Union[str, "Template"]],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
         """Like :meth:`get_template`, but tries loading multiple names.
         If none of the names can be loaded a :exc:`TemplatesNotFound`
         exception is raised.
@@ -610,23 +1047,51 @@ class Environment:

         .. versionadded:: 2.3
         """
-        pass
+        if isinstance(names, Undefined):
+            names._fail_with_undefined_error()
+
+        if not names:
+            raise TemplatesNotFound(
+                message="Tried to select from an empty list of templates."
+            )
+
+        for name in names:
+            if isinstance(name, Template):
+                return name
+            if parent is not None:
+                name = self.join_path(name, parent)
+            try:
+                return self._load_template(name, globals)
+            except (TemplateNotFound, UndefinedError):
+                pass
+        raise TemplatesNotFound(names)  # type: ignore

     @internalcode
-    def get_or_select_template(self, template_name_or_list: t.Union[str,
-        'Template', t.List[t.Union[str, 'Template']]], parent: t.Optional[
-        str]=None, globals: t.Optional[t.MutableMapping[str, t.Any]]=None
-        ) ->'Template':
+    def get_or_select_template(
+        self,
+        template_name_or_list: t.Union[
+            str, "Template", t.List[t.Union[str, "Template"]]
+        ],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
         """Use :meth:`select_template` if an iterable of template names
         is given, or :meth:`get_template` if one name is given.

         .. versionadded:: 2.3
         """
-        pass
-
-    def from_string(self, source: t.Union[str, nodes.Template], globals: t.
-        Optional[t.MutableMapping[str, t.Any]]=None, template_class: t.
-        Optional[t.Type['Template']]=None) ->'Template':
+        if isinstance(template_name_or_list, (str, Undefined)):
+            return self.get_template(template_name_or_list, parent, globals)
+        elif isinstance(template_name_or_list, Template):
+            return template_name_or_list
+        return self.select_template(template_name_or_list, parent, globals)
+
+    def from_string(
+        self,
+        source: t.Union[str, nodes.Template],
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+        template_class: t.Optional[t.Type["Template"]] = None,
+    ) -> "Template":
         """Load a template from a source string without using
         :attr:`loader`.

@@ -638,10 +1103,13 @@ class Environment:
         :param template_class: Return an instance of this
             :class:`Template` class.
         """
-        pass
+        gs = self.make_globals(globals)
+        cls = template_class or self.template_class
+        return cls.from_code(self, self.compile(source), gs, None)

-    def make_globals(self, d: t.Optional[t.MutableMapping[str, t.Any]]
-        ) ->t.MutableMapping[str, t.Any]:
+    def make_globals(
+        self, d: t.Optional[t.MutableMapping[str, t.Any]]
+    ) -> t.MutableMapping[str, t.Any]:
         """Make the globals map for a template. Any given template
         globals overlay the environment :attr:`globals`.

@@ -656,7 +1124,10 @@ class Environment:
             Use :class:`collections.ChainMap` to always prevent mutating
             environment globals.
         """
-        pass
+        if d is None:
+            d = {}
+
+        return ChainMap(d, self.globals)


 class Template:
@@ -675,62 +1146,130 @@ class Template:
     A template object should be considered immutable. Modifications on
     the object are not supported.
     """
+
+    #: Type of environment to create when creating a template directly
+    #: rather than through an existing environment.
     environment_class: t.Type[Environment] = Environment
+
     environment: Environment
     globals: t.MutableMapping[str, t.Any]
     name: t.Optional[str]
     filename: t.Optional[str]
     blocks: t.Dict[str, t.Callable[[Context], t.Iterator[str]]]
     root_render_func: t.Callable[[Context], t.Iterator[str]]
-    _module: t.Optional['TemplateModule']
+    _module: t.Optional["TemplateModule"]
     _debug_info: str
     _uptodate: t.Optional[t.Callable[[], bool]]

-    def __new__(cls, source: t.Union[str, nodes.Template],
-        block_start_string: str=BLOCK_START_STRING, block_end_string: str=
-        BLOCK_END_STRING, variable_start_string: str=VARIABLE_START_STRING,
-        variable_end_string: str=VARIABLE_END_STRING, comment_start_string:
-        str=COMMENT_START_STRING, comment_end_string: str=
-        COMMENT_END_STRING, line_statement_prefix: t.Optional[str]=
-        LINE_STATEMENT_PREFIX, line_comment_prefix: t.Optional[str]=
-        LINE_COMMENT_PREFIX, trim_blocks: bool=TRIM_BLOCKS, lstrip_blocks:
-        bool=LSTRIP_BLOCKS, newline_sequence:
-        "te.Literal['\\n', '\\r\\n', '\\r']"=NEWLINE_SEQUENCE,
-        keep_trailing_newline: bool=KEEP_TRAILING_NEWLINE, extensions: t.
-        Sequence[t.Union[str, t.Type['Extension']]]=(), optimized: bool=
-        True, undefined: t.Type[Undefined]=Undefined, finalize: t.Optional[
-        t.Callable[..., t.Any]]=None, autoescape: t.Union[bool, t.Callable[
-        [t.Optional[str]], bool]]=False, enable_async: bool=False) ->t.Any:
-        env = get_spontaneous_environment(cls.environment_class,
-            block_start_string, block_end_string, variable_start_string,
-            variable_end_string, comment_start_string, comment_end_string,
-            line_statement_prefix, line_comment_prefix, trim_blocks,
-            lstrip_blocks, newline_sequence, keep_trailing_newline,
-            frozenset(extensions), optimized, undefined, finalize,
-            autoescape, None, 0, False, None, enable_async)
+    def __new__(
+        cls,
+        source: t.Union[str, nodes.Template],
+        block_start_string: str = BLOCK_START_STRING,
+        block_end_string: str = BLOCK_END_STRING,
+        variable_start_string: str = VARIABLE_START_STRING,
+        variable_end_string: str = VARIABLE_END_STRING,
+        comment_start_string: str = COMMENT_START_STRING,
+        comment_end_string: str = COMMENT_END_STRING,
+        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
+        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+        trim_blocks: bool = TRIM_BLOCKS,
+        lstrip_blocks: bool = LSTRIP_BLOCKS,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
+        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (),
+        optimized: bool = True,
+        undefined: t.Type[Undefined] = Undefined,
+        finalize: t.Optional[t.Callable[..., t.Any]] = None,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+        enable_async: bool = False,
+    ) -> t.Any:  # it returns a `Template`, but this breaks the sphinx build...
+        env = get_spontaneous_environment(
+            cls.environment_class,  # type: ignore
+            block_start_string,
+            block_end_string,
+            variable_start_string,
+            variable_end_string,
+            comment_start_string,
+            comment_end_string,
+            line_statement_prefix,
+            line_comment_prefix,
+            trim_blocks,
+            lstrip_blocks,
+            newline_sequence,
+            keep_trailing_newline,
+            frozenset(extensions),
+            optimized,
+            undefined,  # type: ignore
+            finalize,
+            autoescape,
+            None,
+            0,
+            False,
+            None,
+            enable_async,
+        )
         return env.from_string(source, template_class=cls)

     @classmethod
-    def from_code(cls, environment: Environment, code: CodeType, globals: t
-        .MutableMapping[str, t.Any], uptodate: t.Optional[t.Callable[[],
-        bool]]=None) ->'Template':
+    def from_code(
+        cls,
+        environment: Environment,
+        code: CodeType,
+        globals: t.MutableMapping[str, t.Any],
+        uptodate: t.Optional[t.Callable[[], bool]] = None,
+    ) -> "Template":
         """Creates a template object from compiled code and the globals.  This
         is used by the loaders and environment to create a template object.
         """
-        pass
+        namespace = {"environment": environment, "__file__": code.co_filename}
+        exec(code, namespace)
+        rv = cls._from_namespace(environment, namespace, globals)
+        rv._uptodate = uptodate
+        return rv

     @classmethod
-    def from_module_dict(cls, environment: Environment, module_dict: t.
-        MutableMapping[str, t.Any], globals: t.MutableMapping[str, t.Any]
-        ) ->'Template':
+    def from_module_dict(
+        cls,
+        environment: Environment,
+        module_dict: t.MutableMapping[str, t.Any],
+        globals: t.MutableMapping[str, t.Any],
+    ) -> "Template":
         """Creates a template object from a module.  This is used by the
         module loader to create a template object.

         .. versionadded:: 2.4
         """
-        pass
+        return cls._from_namespace(environment, module_dict, globals)

-    def render(self, *args: t.Any, **kwargs: t.Any) ->str:
+    @classmethod
+    def _from_namespace(
+        cls,
+        environment: Environment,
+        namespace: t.MutableMapping[str, t.Any],
+        globals: t.MutableMapping[str, t.Any],
+    ) -> "Template":
+        t: "Template" = object.__new__(cls)
+        t.environment = environment
+        t.globals = globals
+        t.name = namespace["name"]
+        t.filename = namespace["__file__"]
+        t.blocks = namespace["blocks"]
+
+        # render function and module
+        t.root_render_func = namespace["root"]
+        t._module = None
+
+        # debug and loader helpers
+        t._debug_info = namespace["debug_info"]
+        t._uptodate = None
+
+        # store the reference
+        namespace["environment"] = environment
+        namespace["__jinja_template__"] = t
+
+        return t
+
+    def render(self, *args: t.Any, **kwargs: t.Any) -> str:
         """This method accepts the same arguments as the `dict` constructor:
         A dict, a dict subclass or some keyword arguments.  If no arguments
         are given the context will be empty.  These two calls do the same::
@@ -740,9 +1279,31 @@ class Template:

         This will return the rendered template as a string.
         """
-        pass
+        if self.environment.is_async:
+            import asyncio
+
+            close = False

-    async def render_async(self, *args: t.Any, **kwargs: t.Any) ->str:
+            try:
+                loop = asyncio.get_running_loop()
+            except RuntimeError:
+                loop = asyncio.new_event_loop()
+                close = True
+
+            try:
+                return loop.run_until_complete(self.render_async(*args, **kwargs))
+            finally:
+                if close:
+                    loop.close()
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment.concat(self.root_render_func(ctx))  # type: ignore
+        except Exception:
+            self.environment.handle_exception()
+
+    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> str:
         """This works similar to :meth:`render` but returns a coroutine
         that when awaited returns the entire rendered template string.  This
         requires the async feature to be enabled.
@@ -751,15 +1312,27 @@ class Template:

             await template.render_async(knights='that say nih; asynchronously')
         """
-        pass
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))

-    def stream(self, *args: t.Any, **kwargs: t.Any) ->'TemplateStream':
+        try:
+            return self.environment.concat(  # type: ignore
+                [n async for n in self.root_render_func(ctx)]  # type: ignore
+            )
+        except Exception:
+            return self.environment.handle_exception()
+
+    def stream(self, *args: t.Any, **kwargs: t.Any) -> "TemplateStream":
         """Works exactly like :meth:`generate` but returns a
         :class:`TemplateStream`.
         """
-        pass
+        return TemplateStream(self.generate(*args, **kwargs))

-    def generate(self, *args: t.Any, **kwargs: t.Any) ->t.Iterator[str]:
+    def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]:
         """For very large templates it can be useful to not render the whole
         template at once but evaluate each statement after another and yield
         piece for piece.  This method basically does exactly that and returns
@@ -767,17 +1340,47 @@ class Template:

         It accepts the same arguments as :meth:`render`.
         """
-        pass
+        if self.environment.is_async:
+            import asyncio
+
+            async def to_list() -> t.List[str]:
+                return [x async for x in self.generate_async(*args, **kwargs)]
+
+            yield from asyncio.run(to_list())
+            return

-    async def generate_async(self, *args: t.Any, **kwargs: t.Any
-        ) ->t.AsyncIterator[str]:
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            yield from self.root_render_func(ctx)
+        except Exception:
+            yield self.environment.handle_exception()
+
+    async def generate_async(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.AsyncIterator[str]:
         """An async version of :meth:`generate`.  Works very similarly but
         returns an async iterator instead.
         """
-        pass
-
-    def new_context(self, vars: t.Optional[t.Dict[str, t.Any]]=None, shared:
-        bool=False, locals: t.Optional[t.Mapping[str, t.Any]]=None) ->Context:
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            async for event in self.root_render_func(ctx):  # type: ignore
+                yield event
+        except Exception:
+            yield self.environment.handle_exception()
+
+    def new_context(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> Context:
         """Create a new :class:`Context` for this template.  The vars
         provided will be passed to the template.  Per default the globals
         are added to the context.  If shared is set to `True` the data
@@ -785,32 +1388,45 @@ class Template:

         `locals` can be a dict of local variables for internal usage.
         """
-        pass
-
-    def make_module(self, vars: t.Optional[t.Dict[str, t.Any]]=None, shared:
-        bool=False, locals: t.Optional[t.Mapping[str, t.Any]]=None
-        ) ->'TemplateModule':
+        return new_context(
+            self.environment, self.name, self.blocks, vars, shared, self.globals, locals
+        )
+
+    def make_module(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> "TemplateModule":
         """This method works like the :attr:`module` attribute when called
         without arguments but it will evaluate the template on every call
         rather than caching it.  It's also possible to provide
         a dict which is then used as context.  The arguments are the same
         as for the :meth:`new_context` method.
         """
-        pass
-
-    async def make_module_async(self, vars: t.Optional[t.Dict[str, t.Any]]=
-        None, shared: bool=False, locals: t.Optional[t.Mapping[str, t.Any]]
-        =None) ->'TemplateModule':
+        ctx = self.new_context(vars, shared, locals)
+        return TemplateModule(self, ctx)
+
+    async def make_module_async(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> "TemplateModule":
         """As template module creation can invoke template code for
         asynchronous executions this method must be used instead of the
         normal :meth:`make_module` one.  Likewise the module attribute
         becomes unavailable in async mode.
         """
-        pass
+        ctx = self.new_context(vars, shared, locals)
+        return TemplateModule(
+            self,
+            ctx,
+            [x async for x in self.root_render_func(ctx)],  # type: ignore
+        )

     @internalcode
-    def _get_default_module(self, ctx: t.Optional[Context]=None
-        ) ->'TemplateModule':
+    def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule":
         """If a context is passed in, this means that the template was
         imported. Imported templates have access to the current
         template's globals by default, but they can only be accessed via
@@ -822,10 +1438,36 @@ class Template:
         cached because the template can be imported elsewhere, and it
         should have access to only the current template's globals.
         """
-        pass
+        if self.environment.is_async:
+            raise RuntimeError("Module is not available in async mode.")
+
+        if ctx is not None:
+            keys = ctx.globals_keys - self.globals.keys()
+
+            if keys:
+                return self.make_module({k: ctx.parent[k] for k in keys})
+
+        if self._module is None:
+            self._module = self.make_module()
+
+        return self._module
+
+    async def _get_default_module_async(
+        self, ctx: t.Optional[Context] = None
+    ) -> "TemplateModule":
+        if ctx is not None:
+            keys = ctx.globals_keys - self.globals.keys()
+
+            if keys:
+                return await self.make_module_async({k: ctx.parent[k] for k in keys})
+
+        if self._module is None:
+            self._module = await self.make_module_async()
+
+        return self._module

     @property
-    def module(self) ->'TemplateModule':
+    def module(self) -> "TemplateModule":
         """The template as module.  This is used for imports in the
         template runtime but is also useful if one wants to access
         exported template variables from the Python layer:
@@ -838,30 +1480,41 @@ class Template:

         This attribute is not available if async mode is enabled.
         """
-        pass
+        return self._get_default_module()

-    def get_corresponding_lineno(self, lineno: int) ->int:
+    def get_corresponding_lineno(self, lineno: int) -> int:
         """Return the source line number of a line number in the
         generated bytecode as they are not in sync.
         """
-        pass
+        for template_line, code_line in reversed(self.debug_info):
+            if code_line <= lineno:
+                return template_line
+        return 1

     @property
-    def is_up_to_date(self) ->bool:
+    def is_up_to_date(self) -> bool:
         """If this variable is `False` there is a newer version available."""
-        pass
+        if self._uptodate is None:
+            return True
+        return self._uptodate()

     @property
-    def debug_info(self) ->t.List[t.Tuple[int, int]]:
+    def debug_info(self) -> t.List[t.Tuple[int, int]]:
         """The debug info mapping."""
-        pass
+        if self._debug_info:
+            return [
+                tuple(map(int, x.split("=")))  # type: ignore
+                for x in self._debug_info.split("&")
+            ]
+
+        return []

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         if self.name is None:
-            name = f'memory:{id(self):x}'
+            name = f"memory:{id(self):x}"
         else:
             name = repr(self.name)
-        return f'<{type(self).__name__} {name}>'
+        return f"<{type(self).__name__} {name}>"


 class TemplateModule:
@@ -870,30 +1523,38 @@ class TemplateModule:
     converting it into a string renders the contents.
     """

-    def __init__(self, template: Template, context: Context, body_stream: t
-        .Optional[t.Iterable[str]]=None) ->None:
+    def __init__(
+        self,
+        template: Template,
+        context: Context,
+        body_stream: t.Optional[t.Iterable[str]] = None,
+    ) -> None:
         if body_stream is None:
             if context.environment.is_async:
                 raise RuntimeError(
-                    'Async mode requires a body stream to be passed to a template module. Use the async methods of the API you are using.'
-                    )
+                    "Async mode requires a body stream to be passed to"
+                    " a template module. Use the async methods of the"
+                    " API you are using."
+                )
+
             body_stream = list(template.root_render_func(context))
+
         self._body_stream = body_stream
         self.__dict__.update(context.get_exported())
         self.__name__ = template.name

-    def __html__(self) ->Markup:
+    def __html__(self) -> Markup:
         return Markup(concat(self._body_stream))

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return concat(self._body_stream)

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         if self.__name__ is None:
-            name = f'memory:{id(self):x}'
+            name = f"memory:{id(self):x}"
         else:
             name = repr(self.__name__)
-        return f'<{type(self).__name__} {name}>'
+        return f"<{type(self).__name__} {name}>"


 class TemplateExpression:
@@ -902,14 +1563,14 @@ class TemplateExpression:
     to the template with an expression it wraps.
     """

-    def __init__(self, template: Template, undefined_to_none: bool) ->None:
+    def __init__(self, template: Template, undefined_to_none: bool) -> None:
         self._template = template
         self._undefined_to_none = undefined_to_none

-    def __call__(self, *args: t.Any, **kwargs: t.Any) ->t.Optional[t.Any]:
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]:
         context = self._template.new_context(dict(*args, **kwargs))
         consume(self._template.root_render_func(context))
-        rv = context.vars['result']
+        rv = context.vars["result"]
         if self._undefined_to_none and isinstance(rv, Undefined):
             rv = None
         return rv
@@ -926,12 +1587,16 @@ class TemplateStream:
     big templates to a client via WSGI which flushes after each iteration.
     """

-    def __init__(self, gen: t.Iterator[str]) ->None:
+    def __init__(self, gen: t.Iterator[str]) -> None:
         self._gen = gen
         self.disable_buffering()

-    def dump(self, fp: t.Union[str, t.IO[bytes]], encoding: t.Optional[str]
-        =None, errors: t.Optional[str]='strict') ->None:
+    def dump(
+        self,
+        fp: t.Union[str, t.IO[bytes]],
+        encoding: t.Optional[str] = None,
+        errors: t.Optional[str] = "strict",
+    ) -> None:
         """Dump the complete stream into a file or file-like object.
         Per default strings are written, if you want to encode
         before writing specify an `encoding`.
@@ -940,21 +1605,71 @@ class TemplateStream:

             Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
         """
-        pass
+        close = False

-    def disable_buffering(self) ->None:
-        """Disable the output buffering."""
-        pass
+        if isinstance(fp, str):
+            if encoding is None:
+                encoding = "utf-8"

-    def enable_buffering(self, size: int=5) ->None:
+            real_fp: t.IO[bytes] = open(fp, "wb")
+            close = True
+        else:
+            real_fp = fp
+
+        try:
+            if encoding is not None:
+                iterable = (x.encode(encoding, errors) for x in self)  # type: ignore
+            else:
+                iterable = self  # type: ignore
+
+            if hasattr(real_fp, "writelines"):
+                real_fp.writelines(iterable)
+            else:
+                for item in iterable:
+                    real_fp.write(item)
+        finally:
+            if close:
+                real_fp.close()
+
+    def disable_buffering(self) -> None:
+        """Disable the output buffering."""
+        self._next = partial(next, self._gen)
+        self.buffered = False
+
+    def _buffered_generator(self, size: int) -> t.Iterator[str]:
+        buf: t.List[str] = []
+        c_size = 0
+        push = buf.append
+
+        while True:
+            try:
+                while c_size < size:
+                    c = next(self._gen)
+                    push(c)
+                    if c:
+                        c_size += 1
+            except StopIteration:
+                if not c_size:
+                    return
+            yield concat(buf)
+            del buf[:]
+            c_size = 0
+
+    def enable_buffering(self, size: int = 5) -> None:
         """Enable buffering.  Buffer `size` items before yielding them."""
-        pass
+        if size <= 1:
+            raise ValueError("buffer size too small")
+
+        self.buffered = True
+        self._next = partial(next, self._buffered_generator(size))

-    def __iter__(self) ->'TemplateStream':
+    def __iter__(self) -> "TemplateStream":
         return self

-    def __next__(self) ->str:
-        return self._next()
+    def __next__(self) -> str:
+        return self._next()  # type: ignore


+# hook in default template class.  if anyone reads this comment: ignore that
+# it's possible to use custom templates ;-)
 Environment.template_class = Template
diff --git a/src/jinja2/exceptions.py b/src/jinja2/exceptions.py
index 39cc9cb..082ebe8 100644
--- a/src/jinja2/exceptions.py
+++ b/src/jinja2/exceptions.py
@@ -1,4 +1,5 @@
 import typing as t
+
 if t.TYPE_CHECKING:
     from .runtime import Undefined

@@ -6,9 +7,13 @@ if t.TYPE_CHECKING:
 class TemplateError(Exception):
     """Baseclass for all template errors."""

-    def __init__(self, message: t.Optional[str]=None) ->None:
+    def __init__(self, message: t.Optional[str] = None) -> None:
         super().__init__(message)

+    @property
+    def message(self) -> t.Optional[str]:
+        return self.args[0] if self.args else None
+

 class TemplateNotFound(IOError, LookupError, TemplateError):
     """Raised if a template does not exist.
@@ -17,21 +22,31 @@ class TemplateNotFound(IOError, LookupError, TemplateError):
         If the given name is :class:`Undefined` and no message was
         provided, an :exc:`UndefinedError` is raised.
     """
+
+    # Silence the Python warning about message being deprecated since
+    # it's not valid here.
     message: t.Optional[str] = None

-    def __init__(self, name: t.Optional[t.Union[str, 'Undefined']], message:
-        t.Optional[str]=None) ->None:
+    def __init__(
+        self,
+        name: t.Optional[t.Union[str, "Undefined"]],
+        message: t.Optional[str] = None,
+    ) -> None:
         IOError.__init__(self, name)
+
         if message is None:
             from .runtime import Undefined
+
             if isinstance(name, Undefined):
                 name._fail_with_undefined_error()
+
             message = name
+
         self.message = message
         self.name = name
         self.templates = [name]

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return str(self.message)


@@ -47,18 +62,25 @@ class TemplatesNotFound(TemplateNotFound):
     .. versionadded:: 2.2
     """

-    def __init__(self, names: t.Sequence[t.Union[str, 'Undefined']]=(),
-        message: t.Optional[str]=None) ->None:
+    def __init__(
+        self,
+        names: t.Sequence[t.Union[str, "Undefined"]] = (),
+        message: t.Optional[str] = None,
+    ) -> None:
         if message is None:
             from .runtime import Undefined
+
             parts = []
+
             for name in names:
                 if isinstance(name, Undefined):
                     parts.append(name._undefined_message)
                 else:
                     parts.append(name)
-            parts_str = ', '.join(map(str, parts))
-            message = f'none of the templates given were found: {parts_str}'
+
+            parts_str = ", ".join(map(str, parts))
+            message = f"none of the templates given were found: {parts_str}"
+
         super().__init__(names[-1] if names else None, message)
         self.templates = list(names)

@@ -66,35 +88,52 @@ class TemplatesNotFound(TemplateNotFound):
 class TemplateSyntaxError(TemplateError):
     """Raised to tell the user that there is a problem with the template."""

-    def __init__(self, message: str, lineno: int, name: t.Optional[str]=
-        None, filename: t.Optional[str]=None) ->None:
+    def __init__(
+        self,
+        message: str,
+        lineno: int,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> None:
         super().__init__(message)
         self.lineno = lineno
         self.name = name
         self.filename = filename
         self.source: t.Optional[str] = None
+
+        # this is set to True if the debug.translate_syntax_error
+        # function translated the syntax error into a new traceback
         self.translated = False

-    def __str__(self) ->str:
+    def __str__(self) -> str:
+        # for translated errors we only return the message
         if self.translated:
             return t.cast(str, self.message)
-        location = f'line {self.lineno}'
+
+        # otherwise attach some stuff
+        location = f"line {self.lineno}"
         name = self.filename or self.name
         if name:
             location = f'File "{name}", {location}'
-        lines = [t.cast(str, self.message), '  ' + location]
+        lines = [t.cast(str, self.message), "  " + location]
+
+        # if the source is set, add the line to the output
         if self.source is not None:
             try:
                 line = self.source.splitlines()[self.lineno - 1]
             except IndexError:
                 pass
             else:
-                lines.append('    ' + line.strip())
-        return '\n'.join(lines)
+                lines.append("    " + line.strip())
+
+        return "\n".join(lines)

-    def __reduce__(self):
-        return self.__class__, (self.message, self.lineno, self.name, self.
-            filename)
+    def __reduce__(self):  # type: ignore
+        # https://bugs.python.org/issue1692335 Exceptions that take
+        # multiple required arguments have problems with pickling.
+        # Without this, raises TypeError: __init__() missing 1 required
+        # positional argument: 'lineno'
+        return self.__class__, (self.message, self.lineno, self.name, self.filename)


 class TemplateAssertionError(TemplateSyntaxError):
diff --git a/src/jinja2/ext.py b/src/jinja2/ext.py
index 337f30c..8d0810c 100644
--- a/src/jinja2/ext.py
+++ b/src/jinja2/ext.py
@@ -1,35 +1,55 @@
 """Extension API for adding custom tags and behavior."""
+
 import pprint
 import re
 import typing as t
+
 from markupsafe import Markup
+
 from . import defaults
 from . import nodes
 from .environment import Environment
 from .exceptions import TemplateAssertionError
 from .exceptions import TemplateSyntaxError
-from .runtime import concat
+from .runtime import concat  # type: ignore
 from .runtime import Context
 from .runtime import Undefined
 from .utils import import_string
 from .utils import pass_context
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .lexer import Token
     from .lexer import TokenStream
     from .parser import Parser

-
     class _TranslationsBasic(te.Protocol):
-        pass
+        def gettext(self, message: str) -> str: ...

+        def ngettext(self, singular: str, plural: str, n: int) -> str:
+            pass

     class _TranslationsContext(_TranslationsBasic):
-        pass
+        def pgettext(self, context: str, message: str) -> str: ...
+
+        def npgettext(
+            self, context: str, singular: str, plural: str, n: int
+        ) -> str: ...
+
     _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
-GETTEXT_FUNCTIONS: t.Tuple[str, ...] = ('_', 'gettext', 'ngettext',
-    'pgettext', 'npgettext')
-_ws_re = re.compile('\\s*\\n\\s*')
+
+
+# I18N functions available in Jinja templates. If the I18N library
+# provides ugettext, it will be assigned to gettext.
+GETTEXT_FUNCTIONS: t.Tuple[str, ...] = (
+    "_",
+    "gettext",
+    "ngettext",
+    "pgettext",
+    "npgettext",
+)
+_ws_re = re.compile(r"\s*\n\s*")


 class Extension:
@@ -50,48 +70,62 @@ class Extension:
     is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
     name as includes the name of the extension (fragment cache).
     """
+
     identifier: t.ClassVar[str]

-    def __init_subclass__(cls) ->None:
-        cls.identifier = f'{cls.__module__}.{cls.__name__}'
+    def __init_subclass__(cls) -> None:
+        cls.identifier = f"{cls.__module__}.{cls.__name__}"
+
+    #: if this extension parses this is the list of tags it's listening to.
     tags: t.Set[str] = set()
+
+    #: the priority of that extension.  This is especially useful for
+    #: extensions that preprocess values.  A lower value means higher
+    #: priority.
+    #:
+    #: .. versionadded:: 2.4
     priority = 100

-    def __init__(self, environment: Environment) ->None:
+    def __init__(self, environment: Environment) -> None:
         self.environment = environment

-    def bind(self, environment: Environment) ->'Extension':
+    def bind(self, environment: Environment) -> "Extension":
         """Create a copy of this extension bound to another environment."""
-        pass
-
-    def preprocess(self, source: str, name: t.Optional[str], filename: t.
-        Optional[str]=None) ->str:
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.environment = environment
+        return rv
+
+    def preprocess(
+        self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
+    ) -> str:
         """This method is called before the actual lexing and can be used to
         preprocess the source.  The `filename` is optional.  The return value
         must be the preprocessed source.
         """
-        pass
+        return source

-    def filter_stream(self, stream: 'TokenStream') ->t.Union['TokenStream',
-        t.Iterable['Token']]:
+    def filter_stream(
+        self, stream: "TokenStream"
+    ) -> t.Union["TokenStream", t.Iterable["Token"]]:
         """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
         to filter tokens returned.  This method has to return an iterable of
         :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
         :class:`~jinja2.lexer.TokenStream`.
         """
-        pass
+        return stream

-    def parse(self, parser: 'Parser') ->t.Union[nodes.Node, t.List[nodes.Node]
-        ]:
+    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
         """If any of the :attr:`tags` matched this method is called with the
         parser as first argument.  The token the parser stream is pointing at
         is the name token that matched.  This method has to return one or a
         list of multiple nodes.
         """
-        pass
+        raise NotImplementedError()

-    def attr(self, name: str, lineno: t.Optional[int]=None
-        ) ->nodes.ExtensionAttribute:
+    def attr(
+        self, name: str, lineno: t.Optional[int] = None
+    ) -> nodes.ExtensionAttribute:
         """Return an attribute node for the current extension.  This is useful
         to pass constants on extensions to generated template code.

@@ -99,59 +133,483 @@ class Extension:

             self.attr('_my_attribute', lineno=lineno)
         """
-        pass
-
-    def call_method(self, name: str, args: t.Optional[t.List[nodes.Expr]]=
-        None, kwargs: t.Optional[t.List[nodes.Keyword]]=None, dyn_args: t.
-        Optional[nodes.Expr]=None, dyn_kwargs: t.Optional[nodes.Expr]=None,
-        lineno: t.Optional[int]=None) ->nodes.Call:
+        return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
+
+    def call_method(
+        self,
+        name: str,
+        args: t.Optional[t.List[nodes.Expr]] = None,
+        kwargs: t.Optional[t.List[nodes.Keyword]] = None,
+        dyn_args: t.Optional[nodes.Expr] = None,
+        dyn_kwargs: t.Optional[nodes.Expr] = None,
+        lineno: t.Optional[int] = None,
+    ) -> nodes.Call:
         """Call a method of the extension.  This is a shortcut for
         :meth:`attr` + :class:`jinja2.nodes.Call`.
         """
-        pass
+        if args is None:
+            args = []
+        if kwargs is None:
+            kwargs = []
+        return nodes.Call(
+            self.attr(name, lineno=lineno),
+            args,
+            kwargs,
+            dyn_args,
+            dyn_kwargs,
+            lineno=lineno,
+        )
+
+
+@pass_context
+def _gettext_alias(
+    __context: Context, *args: t.Any, **kwargs: t.Any
+) -> t.Union[t.Any, Undefined]:
+    return __context.call(__context.resolve("gettext"), *args, **kwargs)
+
+
+def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]:
+    @pass_context
+    def gettext(__context: Context, __string: str, **variables: t.Any) -> str:
+        rv = __context.call(func, __string)
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+        # Always treat as a format string, even if there are no
+        # variables. This makes translation strings more consistent
+        # and predictable. This requires escaping
+        return rv % variables  # type: ignore
+
+    return gettext
+
+
+def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]:
+    @pass_context
+    def ngettext(
+        __context: Context,
+        __singular: str,
+        __plural: str,
+        __num: int,
+        **variables: t.Any,
+    ) -> str:
+        variables.setdefault("num", __num)
+        rv = __context.call(func, __singular, __plural, __num)
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return ngettext
+
+
+def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]:
+    @pass_context
+    def pgettext(
+        __context: Context, __string_ctx: str, __string: str, **variables: t.Any
+    ) -> str:
+        variables.setdefault("context", __string_ctx)
+        rv = __context.call(func, __string_ctx, __string)
+
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return pgettext
+
+
+def _make_new_npgettext(
+    func: t.Callable[[str, str, str, int], str],
+) -> t.Callable[..., str]:
+    @pass_context
+    def npgettext(
+        __context: Context,
+        __string_ctx: str,
+        __singular: str,
+        __plural: str,
+        __num: int,
+        **variables: t.Any,
+    ) -> str:
+        variables.setdefault("context", __string_ctx)
+        variables.setdefault("num", __num)
+        rv = __context.call(func, __string_ctx, __singular, __plural, __num)
+
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return npgettext


 class InternationalizationExtension(Extension):
     """This extension adds gettext support to Jinja."""
-    tags = {'trans'}

-    def __init__(self, environment: Environment) ->None:
+    tags = {"trans"}
+
+    # TODO: the i18n extension is currently reevaluating values in a few
+    # situations.  Take this example:
+    #   {% trans count=something() %}{{ count }} foo{% pluralize
+    #     %}{{ count }} fooss{% endtrans %}
+    # something is called twice here.  One time for the gettext value and
+    # the other time for the n-parameter of the ngettext function.
+
+    def __init__(self, environment: Environment) -> None:
         super().__init__(environment)
-        environment.globals['_'] = _gettext_alias
-        environment.extend(install_gettext_translations=self._install,
+        environment.globals["_"] = _gettext_alias
+        environment.extend(
+            install_gettext_translations=self._install,
             install_null_translations=self._install_null,
             install_gettext_callables=self._install_callables,
             uninstall_gettext_translations=self._uninstall,
-            extract_translations=self._extract, newstyle_gettext=False)
-
-    def parse(self, parser: 'Parser') ->t.Union[nodes.Node, t.List[nodes.Node]
-        ]:
+            extract_translations=self._extract,
+            newstyle_gettext=False,
+        )
+
+    def _install(
+        self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
+    ) -> None:
+        # ugettext and ungettext are preferred in case the I18N library
+        # is providing compatibility with older Python versions.
+        gettext = getattr(translations, "ugettext", None)
+        if gettext is None:
+            gettext = translations.gettext
+        ngettext = getattr(translations, "ungettext", None)
+        if ngettext is None:
+            ngettext = translations.ngettext
+
+        pgettext = getattr(translations, "pgettext", None)
+        npgettext = getattr(translations, "npgettext", None)
+        self._install_callables(
+            gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
+        )
+
+    def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
+        import gettext
+
+        translations = gettext.NullTranslations()
+
+        if hasattr(translations, "pgettext"):
+            # Python < 3.8
+            pgettext = translations.pgettext
+        else:
+
+            def pgettext(c: str, s: str) -> str:  # type: ignore[misc]
+                return s
+
+        if hasattr(translations, "npgettext"):
+            npgettext = translations.npgettext
+        else:
+
+            def npgettext(c: str, s: str, p: str, n: int) -> str:  # type: ignore[misc]
+                return s if n == 1 else p
+
+        self._install_callables(
+            gettext=translations.gettext,
+            ngettext=translations.ngettext,
+            newstyle=newstyle,
+            pgettext=pgettext,
+            npgettext=npgettext,
+        )
+
+    def _install_callables(
+        self,
+        gettext: t.Callable[[str], str],
+        ngettext: t.Callable[[str, str, int], str],
+        newstyle: t.Optional[bool] = None,
+        pgettext: t.Optional[t.Callable[[str, str], str]] = None,
+        npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
+    ) -> None:
+        if newstyle is not None:
+            self.environment.newstyle_gettext = newstyle  # type: ignore
+        if self.environment.newstyle_gettext:  # type: ignore
+            gettext = _make_new_gettext(gettext)
+            ngettext = _make_new_ngettext(ngettext)
+
+            if pgettext is not None:
+                pgettext = _make_new_pgettext(pgettext)
+
+            if npgettext is not None:
+                npgettext = _make_new_npgettext(npgettext)
+
+        self.environment.globals.update(
+            gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext
+        )
+
+    def _uninstall(self, translations: "_SupportedTranslations") -> None:
+        for key in ("gettext", "ngettext", "pgettext", "npgettext"):
+            self.environment.globals.pop(key, None)
+
+    def _extract(
+        self,
+        source: t.Union[str, nodes.Template],
+        gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
+    ) -> t.Iterator[
+        t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
+    ]:
+        if isinstance(source, str):
+            source = self.environment.parse(source)
+        return extract_from_ast(source, gettext_functions)
+
+    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
         """Parse a translatable tag."""
-        pass
-
-    def _parse_block(self, parser: 'Parser', allow_pluralize: bool) ->t.Tuple[
-        t.List[str], str]:
+        lineno = next(parser.stream).lineno
+
+        context = None
+        context_token = parser.stream.next_if("string")
+
+        if context_token is not None:
+            context = context_token.value
+
+        # find all the variables referenced.  Additionally a variable can be
+        # defined in the body of the trans block too, but this is checked at
+        # a later state.
+        plural_expr: t.Optional[nodes.Expr] = None
+        plural_expr_assignment: t.Optional[nodes.Assign] = None
+        num_called_num = False
+        variables: t.Dict[str, nodes.Expr] = {}
+        trimmed = None
+        while parser.stream.current.type != "block_end":
+            if variables:
+                parser.stream.expect("comma")
+
+            # skip colon for python compatibility
+            if parser.stream.skip_if("colon"):
+                break
+
+            token = parser.stream.expect("name")
+            if token.value in variables:
+                parser.fail(
+                    f"translatable variable {token.value!r} defined twice.",
+                    token.lineno,
+                    exc=TemplateAssertionError,
+                )
+
+            # expressions
+            if parser.stream.current.type == "assign":
+                next(parser.stream)
+                variables[token.value] = var = parser.parse_expression()
+            elif trimmed is None and token.value in ("trimmed", "notrimmed"):
+                trimmed = token.value == "trimmed"
+                continue
+            else:
+                variables[token.value] = var = nodes.Name(token.value, "load")
+
+            if plural_expr is None:
+                if isinstance(var, nodes.Call):
+                    plural_expr = nodes.Name("_trans", "load")
+                    variables[token.value] = plural_expr
+                    plural_expr_assignment = nodes.Assign(
+                        nodes.Name("_trans", "store"), var
+                    )
+                else:
+                    plural_expr = var
+                num_called_num = token.value == "num"
+
+        parser.stream.expect("block_end")
+
+        plural = None
+        have_plural = False
+        referenced = set()
+
+        # now parse until endtrans or pluralize
+        singular_names, singular = self._parse_block(parser, True)
+        if singular_names:
+            referenced.update(singular_names)
+            if plural_expr is None:
+                plural_expr = nodes.Name(singular_names[0], "load")
+                num_called_num = singular_names[0] == "num"
+
+        # if we have a pluralize block, we parse that too
+        if parser.stream.current.test("name:pluralize"):
+            have_plural = True
+            next(parser.stream)
+            if parser.stream.current.type != "block_end":
+                token = parser.stream.expect("name")
+                if token.value not in variables:
+                    parser.fail(
+                        f"unknown variable {token.value!r} for pluralization",
+                        token.lineno,
+                        exc=TemplateAssertionError,
+                    )
+                plural_expr = variables[token.value]
+                num_called_num = token.value == "num"
+            parser.stream.expect("block_end")
+            plural_names, plural = self._parse_block(parser, False)
+            next(parser.stream)
+            referenced.update(plural_names)
+        else:
+            next(parser.stream)
+
+        # register free names as simple name expressions
+        for name in referenced:
+            if name not in variables:
+                variables[name] = nodes.Name(name, "load")
+
+        if not have_plural:
+            plural_expr = None
+        elif plural_expr is None:
+            parser.fail("pluralize without variables", lineno)
+
+        if trimmed is None:
+            trimmed = self.environment.policies["ext.i18n.trimmed"]
+        if trimmed:
+            singular = self._trim_whitespace(singular)
+            if plural:
+                plural = self._trim_whitespace(plural)
+
+        node = self._make_node(
+            singular,
+            plural,
+            context,
+            variables,
+            plural_expr,
+            bool(referenced),
+            num_called_num and have_plural,
+        )
+        node.set_lineno(lineno)
+        if plural_expr_assignment is not None:
+            return [plural_expr_assignment, node]
+        else:
+            return node
+
+    def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str:
+        return _ws_re.sub(" ", string.strip())
+
+    def _parse_block(
+        self, parser: "Parser", allow_pluralize: bool
+    ) -> t.Tuple[t.List[str], str]:
         """Parse until the next block tag with a given name."""
-        pass
-
-    def _make_node(self, singular: str, plural: t.Optional[str], context: t
-        .Optional[str], variables: t.Dict[str, nodes.Expr], plural_expr: t.
-        Optional[nodes.Expr], vars_referenced: bool, num_called_num: bool
-        ) ->nodes.Output:
+        referenced = []
+        buf = []
+
+        while True:
+            if parser.stream.current.type == "data":
+                buf.append(parser.stream.current.value.replace("%", "%%"))
+                next(parser.stream)
+            elif parser.stream.current.type == "variable_begin":
+                next(parser.stream)
+                name = parser.stream.expect("name").value
+                referenced.append(name)
+                buf.append(f"%({name})s")
+                parser.stream.expect("variable_end")
+            elif parser.stream.current.type == "block_begin":
+                next(parser.stream)
+                block_name = (
+                    parser.stream.current.value
+                    if parser.stream.current.type == "name"
+                    else None
+                )
+                if block_name == "endtrans":
+                    break
+                elif block_name == "pluralize":
+                    if allow_pluralize:
+                        break
+                    parser.fail(
+                        "a translatable section can have only one pluralize section"
+                    )
+                elif block_name == "trans":
+                    parser.fail(
+                        "trans blocks can't be nested; did you mean `endtrans`?"
+                    )
+                parser.fail(
+                    f"control structures in translatable sections are not allowed; "
+                    f"saw `{block_name}`"
+                )
+            elif parser.stream.eos:
+                parser.fail("unclosed translation block")
+            else:
+                raise RuntimeError("internal parser error")
+
+        return referenced, concat(buf)
+
+    def _make_node(
+        self,
+        singular: str,
+        plural: t.Optional[str],
+        context: t.Optional[str],
+        variables: t.Dict[str, nodes.Expr],
+        plural_expr: t.Optional[nodes.Expr],
+        vars_referenced: bool,
+        num_called_num: bool,
+    ) -> nodes.Output:
         """Generates a useful node from the data provided."""
-        pass
+        newstyle = self.environment.newstyle_gettext  # type: ignore
+        node: nodes.Expr
+
+        # no variables referenced?  no need to escape for old style
+        # gettext invocations only if there are vars.
+        if not vars_referenced and not newstyle:
+            singular = singular.replace("%%", "%")
+            if plural:
+                plural = plural.replace("%%", "%")
+
+        func_name = "gettext"
+        func_args: t.List[nodes.Expr] = [nodes.Const(singular)]
+
+        if context is not None:
+            func_args.insert(0, nodes.Const(context))
+            func_name = f"p{func_name}"
+
+        if plural_expr is not None:
+            func_name = f"n{func_name}"
+            func_args.extend((nodes.Const(plural), plural_expr))
+
+        node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None)
+
+        # in case newstyle gettext is used, the method is powerful
+        # enough to handle the variable expansion and autoescape
+        # handling itself
+        if newstyle:
+            for key, value in variables.items():
+                # the function adds that later anyways in case num was
+                # called num, so just skip it.
+                if num_called_num and key == "num":
+                    continue
+                node.kwargs.append(nodes.Keyword(key, value))
+
+        # otherwise do that here
+        else:
+            # mark the return value as safe if we are in an
+            # environment with autoescaping turned on
+            node = nodes.MarkSafeIfAutoescape(node)
+            if variables:
+                node = nodes.Mod(
+                    node,
+                    nodes.Dict(
+                        [
+                            nodes.Pair(nodes.Const(key), value)
+                            for key, value in variables.items()
+                        ]
+                    ),
+                )
+        return nodes.Output([node])


 class ExprStmtExtension(Extension):
     """Adds a `do` tag to Jinja that works like the print statement just
     that it doesn't print the return value.
     """
-    tags = {'do'}
+
+    tags = {"do"}
+
+    def parse(self, parser: "Parser") -> nodes.ExprStmt:
+        node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
+        node.node = parser.parse_tuple()
+        return node


 class LoopControlExtension(Extension):
     """Adds break and continue to the template engine."""
-    tags = {'break', 'continue'}
+
+    tags = {"break", "continue"}
+
+    def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
+        token = next(parser.stream)
+        if token.value == "break":
+            return nodes.Break(lineno=token.lineno)
+        return nodes.Continue(lineno=token.lineno)


 class DebugExtension(Extension):
@@ -174,12 +632,33 @@ class DebugExtension(Extension):

     .. versionadded:: 2.11.0
     """
-    tags = {'debug'}

+    tags = {"debug"}

-def extract_from_ast(ast: nodes.Template, gettext_functions: t.Sequence[str
-    ]=GETTEXT_FUNCTIONS, babel_style: bool=True) ->t.Iterator[t.Tuple[int,
-    str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]]:
+    def parse(self, parser: "Parser") -> nodes.Output:
+        lineno = parser.stream.expect("name:debug").lineno
+        context = nodes.ContextReference()
+        result = self.call_method("_render", [context], lineno=lineno)
+        return nodes.Output([result], lineno=lineno)
+
+    def _render(self, context: Context) -> str:
+        result = {
+            "context": context.get_all(),
+            "filters": sorted(self.environment.filters.keys()),
+            "tests": sorted(self.environment.tests.keys()),
+        }
+
+        # Set the depth since the intent is to show the top few names.
+        return pprint.pformat(result, depth=3, compact=True)
+
+
+def extract_from_ast(
+    ast: nodes.Template,
+    gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
+    babel_style: bool = True,
+) -> t.Iterator[
+    t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
+]:
     """Extract localizable strings from the given template node.  Per
     default this function returns matches in babel style that means non string
     parameters as well as keyword arguments are returned as `None`.  This
@@ -214,7 +693,42 @@ def extract_from_ast(ast: nodes.Template, gettext_functions: t.Sequence[str
     to extract any comments.  For comment support you have to use the babel
     extraction interface or extract comments yourself.
     """
-    pass
+    out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]
+
+    for node in ast.find_all(nodes.Call):
+        if (
+            not isinstance(node.node, nodes.Name)
+            or node.node.name not in gettext_functions
+        ):
+            continue
+
+        strings: t.List[t.Optional[str]] = []
+
+        for arg in node.args:
+            if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
+                strings.append(arg.value)
+            else:
+                strings.append(None)
+
+        for _ in node.kwargs:
+            strings.append(None)
+        if node.dyn_args is not None:
+            strings.append(None)
+        if node.dyn_kwargs is not None:
+            strings.append(None)
+
+        if not babel_style:
+            out = tuple(x for x in strings if x is not None)
+
+            if not out:
+                continue
+        else:
+            if len(strings) == 1:
+                out = strings[0]
+            else:
+                out = tuple(strings)
+
+        yield node.lineno, node.node.name, out


 class _CommentFinder:
@@ -224,18 +738,49 @@ class _CommentFinder:
     usable value.
     """

-    def __init__(self, tokens: t.Sequence[t.Tuple[int, str, str]],
-        comment_tags: t.Sequence[str]) ->None:
+    def __init__(
+        self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str]
+    ) -> None:
         self.tokens = tokens
         self.comment_tags = comment_tags
         self.offset = 0
         self.last_lineno = 0

-
-def babel_extract(fileobj: t.BinaryIO, keywords: t.Sequence[str],
-    comment_tags: t.Sequence[str], options: t.Dict[str, t.Any]) ->t.Iterator[t
-    .Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]
-    ], t.List[str]]]:
+    def find_backwards(self, offset: int) -> t.List[str]:
+        try:
+            for _, token_type, token_value in reversed(
+                self.tokens[self.offset : offset]
+            ):
+                if token_type in ("comment", "linecomment"):
+                    try:
+                        prefix, comment = token_value.split(None, 1)
+                    except ValueError:
+                        continue
+                    if prefix in self.comment_tags:
+                        return [comment.rstrip()]
+            return []
+        finally:
+            self.offset = offset
+
+    def find_comments(self, lineno: int) -> t.List[str]:
+        if not self.comment_tags or self.last_lineno > lineno:
+            return []
+        for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
+            if token_lineno > lineno:
+                return self.find_backwards(self.offset + idx)
+        return self.find_backwards(len(self.tokens))
+
+
+def babel_extract(
+    fileobj: t.BinaryIO,
+    keywords: t.Sequence[str],
+    comment_tags: t.Sequence[str],
+    options: t.Dict[str, t.Any],
+) -> t.Iterator[
+    t.Tuple[
+        int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str]
+    ]
+]:
     """Babel extraction method for Jinja templates.

     .. versionchanged:: 2.3
@@ -263,9 +808,62 @@ def babel_extract(fileobj: t.BinaryIO, keywords: t.Sequence[str],
     :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
              (comments will be empty currently)
     """
-    pass
-
-
+    extensions: t.Dict[t.Type[Extension], None] = {}
+
+    for extension_name in options.get("extensions", "").split(","):
+        extension_name = extension_name.strip()
+
+        if not extension_name:
+            continue
+
+        extensions[import_string(extension_name)] = None
+
+    if InternationalizationExtension not in extensions:
+        extensions[InternationalizationExtension] = None
+
+    def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool:
+        return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"}
+
+    silent = getbool(options, "silent", True)
+    environment = Environment(
+        options.get("block_start_string", defaults.BLOCK_START_STRING),
+        options.get("block_end_string", defaults.BLOCK_END_STRING),
+        options.get("variable_start_string", defaults.VARIABLE_START_STRING),
+        options.get("variable_end_string", defaults.VARIABLE_END_STRING),
+        options.get("comment_start_string", defaults.COMMENT_START_STRING),
+        options.get("comment_end_string", defaults.COMMENT_END_STRING),
+        options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX,
+        options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX,
+        getbool(options, "trim_blocks", defaults.TRIM_BLOCKS),
+        getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS),
+        defaults.NEWLINE_SEQUENCE,
+        getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE),
+        tuple(extensions),
+        cache_size=0,
+        auto_reload=False,
+    )
+
+    if getbool(options, "trimmed"):
+        environment.policies["ext.i18n.trimmed"] = True
+    if getbool(options, "newstyle_gettext"):
+        environment.newstyle_gettext = True  # type: ignore
+
+    source = fileobj.read().decode(options.get("encoding", "utf-8"))
+    try:
+        node = environment.parse(source)
+        tokens = list(environment.lex(environment.preprocess(source)))
+    except TemplateSyntaxError:
+        if not silent:
+            raise
+        # skip templates with syntax errors
+        return
+
+    finder = _CommentFinder(tokens, comment_tags)
+    for lineno, func, message in extract_from_ast(node, keywords):
+        yield lineno, func, message, finder.find_comments(lineno)
+
+
+#: nicer import names
 i18n = InternationalizationExtension
 do = ExprStmtExtension
 loopcontrols = LoopControlExtension
diff --git a/src/jinja2/filters.py b/src/jinja2/filters.py
index 9498dc3..acd1197 100644
--- a/src/jinja2/filters.py
+++ b/src/jinja2/filters.py
@@ -1,4 +1,5 @@
 """Built-in template filters used with the ``|`` operator."""
+
 import math
 import random
 import re
@@ -7,9 +8,11 @@ import typing as t
 from collections import abc
 from itertools import chain
 from itertools import groupby
+
 from markupsafe import escape
 from markupsafe import Markup
 from markupsafe import soft_str
+
 from .async_utils import async_variant
 from .async_utils import auto_aiter
 from .async_utils import auto_await
@@ -23,43 +26,67 @@ from .utils import pass_eval_context
 from .utils import pformat
 from .utils import url_quote
 from .utils import urlize
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .environment import Environment
     from .nodes import EvalContext
     from .runtime import Context
-    from .sandbox import SandboxedEnvironment
-
+    from .sandbox import SandboxedEnvironment  # noqa: F401

     class HasHTML(te.Protocol):
-
-        def __html__(self) ->str:
+        def __html__(self) -> str:
             pass
-F = t.TypeVar('F', bound=t.Callable[..., t.Any])
-K = t.TypeVar('K')
-V = t.TypeVar('V')


-def ignore_case(value: V) ->V:
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+K = t.TypeVar("K")
+V = t.TypeVar("V")
+
+
+def ignore_case(value: V) -> V:
     """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
     to lowercase and returns other types as-is."""
-    pass
+    if isinstance(value, str):
+        return t.cast(V, value.lower())
+
+    return value


-def make_attrgetter(environment: 'Environment', attribute: t.Optional[t.
-    Union[str, int]], postprocess: t.Optional[t.Callable[[t.Any], t.Any]]=
-    None, default: t.Optional[t.Any]=None) ->t.Callable[[t.Any], t.Any]:
+def make_attrgetter(
+    environment: "Environment",
+    attribute: t.Optional[t.Union[str, int]],
+    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+    default: t.Optional[t.Any] = None,
+) -> t.Callable[[t.Any], t.Any]:
     """Returns a callable that looks up the given attribute from a
     passed object with the rules of the environment.  Dots are allowed
     to access attributes of attributes.  Integer parts in paths are
     looked up as integers.
     """
-    pass
+    parts = _prepare_attribute_parts(attribute)

+    def attrgetter(item: t.Any) -> t.Any:
+        for part in parts:
+            item = environment.getitem(item, part)

-def make_multi_attrgetter(environment: 'Environment', attribute: t.Optional
-    [t.Union[str, int]], postprocess: t.Optional[t.Callable[[t.Any], t.Any]
-    ]=None) ->t.Callable[[t.Any], t.List[t.Any]]:
+            if default is not None and isinstance(item, Undefined):
+                item = default
+
+        if postprocess is not None:
+            item = postprocess(item)
+
+        return item
+
+    return attrgetter
+
+
+def make_multi_attrgetter(
+    environment: "Environment",
+    attribute: t.Optional[t.Union[str, int]],
+    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+) -> t.Callable[[t.Any], t.List[t.Any]]:
     """Returns a callable that looks up the given comma separated
     attributes from a passed object with the rules of the environment.
     Dots are allowed to access attributes of each attribute.  Integer
@@ -70,16 +97,55 @@ def make_multi_attrgetter(environment: 'Environment', attribute: t.Optional

     Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
     """
-    pass
+    if isinstance(attribute, str):
+        split: t.Sequence[t.Union[str, int, None]] = attribute.split(",")
+    else:
+        split = [attribute]
+
+    parts = [_prepare_attribute_parts(item) for item in split]
+
+    def attrgetter(item: t.Any) -> t.List[t.Any]:
+        items = [None] * len(parts)
+
+        for i, attribute_part in enumerate(parts):
+            item_i = item
+
+            for part in attribute_part:
+                item_i = environment.getitem(item_i, part)

+            if postprocess is not None:
+                item_i = postprocess(item_i)

-def do_forceescape(value: 't.Union[str, HasHTML]') ->Markup:
+            items[i] = item_i
+
+        return items
+
+    return attrgetter
+
+
+def _prepare_attribute_parts(
+    attr: t.Optional[t.Union[str, int]],
+) -> t.List[t.Union[str, int]]:
+    if attr is None:
+        return []
+
+    if isinstance(attr, str):
+        return [int(x) if x.isdigit() else x for x in attr.split(".")]
+
+    return [attr]
+
+
+def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup:
     """Enforce HTML escaping.  This will probably double escape variables."""
-    pass
+    if hasattr(value, "__html__"):
+        value = t.cast("HasHTML", value).__html__()
+
+    return escape(str(value))


-def do_urlencode(value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.
-    Tuple[str, t.Any]]]) ->str:
+def do_urlencode(
+    value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]],
+) -> str:
     """Quote data for use in a URL path or query using UTF-8.

     Basic wrapper around :func:`urllib.parse.quote` when given a
@@ -95,12 +161,23 @@ def do_urlencode(value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.

     .. versionadded:: 2.7
     """
-    pass
+    if isinstance(value, str) or not isinstance(value, abc.Iterable):
+        return url_quote(value)
+
+    if isinstance(value, dict):
+        items: t.Iterable[t.Tuple[str, t.Any]] = value.items()
+    else:
+        items = value  # type: ignore
+
+    return "&".join(
+        f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items
+    )


 @pass_eval_context
-def do_replace(eval_ctx: 'EvalContext', s: str, old: str, new: str, count:
-    t.Optional[int]=None) ->str:
+def do_replace(
+    eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None
+) -> str:
     """Return a copy of the value with all occurrences of a substring
     replaced with a new one. The first argument is the substring
     that should be replaced, the second is the replacement string.
@@ -115,21 +192,35 @@ def do_replace(eval_ctx: 'EvalContext', s: str, old: str, new: str, count:
         {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
             -> d'oh, d'oh, aaargh
     """
-    pass
+    if count is None:
+        count = -1
+
+    if not eval_ctx.autoescape:
+        return str(s).replace(str(old), str(new), count)
+
+    if (
+        hasattr(old, "__html__")
+        or hasattr(new, "__html__")
+        and not hasattr(s, "__html__")
+    ):
+        s = escape(s)
+    else:
+        s = soft_str(s)
+
+    return s.replace(soft_str(old), soft_str(new), count)


-def do_upper(s: str) ->str:
+def do_upper(s: str) -> str:
     """Convert a value to uppercase."""
-    pass
+    return soft_str(s).upper()


-def do_lower(s: str) ->str:
+def do_lower(s: str) -> str:
     """Convert a value to lowercase."""
-    pass
+    return soft_str(s).lower()


-def do_items(value: t.Union[t.Mapping[K, V], Undefined]) ->t.Iterator[t.
-    Tuple[K, V]]:
+def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]:
     """Return an iterator over the ``(key, value)`` items of a mapping.

     ``x|items`` is the same as ``x.items()``, except if ``x`` is
@@ -150,15 +241,24 @@ def do_items(value: t.Union[t.Mapping[K, V], Undefined]) ->t.Iterator[t.

     .. versionadded:: 3.1
     """
-    pass
+    if isinstance(value, Undefined):
+        return

+    if not isinstance(value, abc.Mapping):
+        raise TypeError("Can only get item pairs from a mapping.")

-_attr_key_re = re.compile('[\\s/>=]', flags=re.ASCII)
+    yield from value.items()
+
+
+# Check for characters that would move the parser state from key to value.
+# https://html.spec.whatwg.org/#attribute-name-state
+_attr_key_re = re.compile(r"[\s/>=]", flags=re.ASCII)


 @pass_eval_context
-def do_xmlattr(eval_ctx: 'EvalContext', d: t.Mapping[str, t.Any], autospace:
-    bool=True) ->str:
+def do_xmlattr(
+    eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True
+) -> str:
     """Create an SGML/XML attribute string based on the items in a dict.

     **Values** that are neither ``none`` nor ``undefined`` are automatically
@@ -195,29 +295,57 @@ def do_xmlattr(eval_ctx: 'EvalContext', d: t.Mapping[str, t.Any], autospace:
     .. versionchanged:: 3.1.3
         Keys with spaces are not allowed.
     """
-    pass
+    items = []
+
+    for key, value in d.items():
+        if value is None or isinstance(value, Undefined):
+            continue
+
+        if _attr_key_re.search(key) is not None:
+            raise ValueError(f"Invalid character in attribute name: {key!r}")

+        items.append(f'{escape(key)}="{escape(value)}"')

-def do_capitalize(s: str) ->str:
+    rv = " ".join(items)
+
+    if autospace and rv:
+        rv = " " + rv
+
+    if eval_ctx.autoescape:
+        rv = Markup(rv)
+
+    return rv
+
+
+def do_capitalize(s: str) -> str:
     """Capitalize a value. The first character will be uppercase, all others
     lowercase.
     """
-    pass
+    return soft_str(s).capitalize()


-_word_beginning_split_re = re.compile('([-\\s({\\[<]+)')
+_word_beginning_split_re = re.compile(r"([-\s({\[<]+)")


-def do_title(s: str) ->str:
+def do_title(s: str) -> str:
     """Return a titlecased version of the value. I.e. words will start with
     uppercase letters, all remaining characters are lowercase.
     """
-    pass
-
-
-def do_dictsort(value: t.Mapping[K, V], case_sensitive: bool=False, by:
-    'te.Literal["key", "value"]'='key', reverse: bool=False) ->t.List[t.
-    Tuple[K, V]]:
+    return "".join(
+        [
+            item[0].upper() + item[1:].lower()
+            for item in _word_beginning_split_re.split(soft_str(s))
+            if item
+        ]
+    )
+
+
+def do_dictsort(
+    value: t.Mapping[K, V],
+    case_sensitive: bool = False,
+    by: 'te.Literal["key", "value"]' = "key",
+    reverse: bool = False,
+) -> t.List[t.Tuple[K, V]]:
     """Sort a dict and yield (key, value) pairs. Python dicts may not
     be in the order you want to display them in, so sort them first.

@@ -235,13 +363,32 @@ def do_dictsort(value: t.Mapping[K, V], case_sensitive: bool=False, by:
         {% for key, value in mydict|dictsort(false, 'value') %}
             sort the dict by value, case insensitive
     """
-    pass
+    if by == "key":
+        pos = 0
+    elif by == "value":
+        pos = 1
+    else:
+        raise FilterArgumentError('You can only sort by either "key" or "value"')
+
+    def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any:
+        value = item[pos]
+
+        if not case_sensitive:
+            value = ignore_case(value)
+
+        return value
+
+    return sorted(value.items(), key=sort_func, reverse=reverse)


 @pass_environment
-def do_sort(environment: 'Environment', value: 't.Iterable[V]', reverse:
-    bool=False, case_sensitive: bool=False, attribute: t.Optional[t.Union[
-    str, int]]=None) ->'t.List[V]':
+def do_sort(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    reverse: bool = False,
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.List[V]":
     """Sort an iterable using Python's :func:`sorted`.

     .. sourcecode:: jinja
@@ -284,13 +431,19 @@ def do_sort(environment: 'Environment', value: 't.Iterable[V]', reverse:
     .. versionchanged:: 2.6
        The ``attribute`` parameter was added.
     """
-    pass
+    key_func = make_multi_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    return sorted(value, key=key_func, reverse=reverse)


 @pass_environment
-def do_unique(environment: 'Environment', value: 't.Iterable[V]',
-    case_sensitive: bool=False, attribute: t.Optional[t.Union[str, int]]=None
-    ) ->'t.Iterator[V]':
+def do_unique(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Iterator[V]":
     """Returns a list of unique items from the given iterable.

     .. sourcecode:: jinja
@@ -304,13 +457,46 @@ def do_unique(environment: 'Environment', value: 't.Iterable[V]',
     :param case_sensitive: Treat upper and lower case strings as distinct.
     :param attribute: Filter objects with unique values for this attribute.
     """
-    pass
+    getter = make_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    seen = set()
+
+    for item in value:
+        key = getter(item)
+
+        if key not in seen:
+            seen.add(key)
+            yield item
+
+
+def _min_or_max(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    func: "t.Callable[..., V]",
+    case_sensitive: bool,
+    attribute: t.Optional[t.Union[str, int]],
+) -> "t.Union[V, Undefined]":
+    it = iter(value)
+
+    try:
+        first = next(it)
+    except StopIteration:
+        return environment.undefined("No aggregated item, sequence was empty.")
+
+    key_func = make_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    return func(chain([first], it), key=key_func)


 @pass_environment
-def do_min(environment: 'Environment', value: 't.Iterable[V]',
-    case_sensitive: bool=False, attribute: t.Optional[t.Union[str, int]]=None
-    ) ->'t.Union[V, Undefined]':
+def do_min(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Union[V, Undefined]":
     """Return the smallest item from the sequence.

     .. sourcecode:: jinja
@@ -321,13 +507,16 @@ def do_min(environment: 'Environment', value: 't.Iterable[V]',
     :param case_sensitive: Treat upper and lower case strings as distinct.
     :param attribute: Get the object with the min value of this attribute.
     """
-    pass
+    return _min_or_max(environment, value, min, case_sensitive, attribute)


 @pass_environment
-def do_max(environment: 'Environment', value: 't.Iterable[V]',
-    case_sensitive: bool=False, attribute: t.Optional[t.Union[str, int]]=None
-    ) ->'t.Union[V, Undefined]':
+def do_max(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Union[V, Undefined]":
     """Return the largest item from the sequence.

     .. sourcecode:: jinja
@@ -338,10 +527,14 @@ def do_max(environment: 'Environment', value: 't.Iterable[V]',
     :param case_sensitive: Treat upper and lower case strings as distinct.
     :param attribute: Get the object with the max value of this attribute.
     """
-    pass
+    return _min_or_max(environment, value, max, case_sensitive, attribute)


-def do_default(value: V, default_value: V='', boolean: bool=False) ->V:
+def do_default(
+    value: V,
+    default_value: V = "",  # type: ignore
+    boolean: bool = False,
+) -> V:
     """If the value is undefined it will return the passed default value,
     otherwise the value of the variable:

@@ -364,12 +557,19 @@ def do_default(value: V, default_value: V='', boolean: bool=False) ->V:
        on nested elements and attributes that may contain undefined values
        in the chain without getting an :exc:`~jinja2.UndefinedError`.
     """
-    pass
+    if isinstance(value, Undefined) or (boolean and not value):
+        return default_value
+
+    return value


 @pass_eval_context
-def sync_do_join(eval_ctx: 'EvalContext', value: t.Iterable[t.Any], d: str=
-    '', attribute: t.Optional[t.Union[str, int]]=None) ->str:
+def sync_do_join(
+    eval_ctx: "EvalContext",
+    value: t.Iterable[t.Any],
+    d: str = "",
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> str:
     """Return a string which is the concatenation of the strings in the
     sequence. The separator between elements is an empty string per
     default, you can define it with the optional parameter:
@@ -391,24 +591,76 @@ def sync_do_join(eval_ctx: 'EvalContext', value: t.Iterable[t.Any], d: str=
     .. versionadded:: 2.6
        The `attribute` parameter was added.
     """
-    pass
+    if attribute is not None:
+        value = map(make_attrgetter(eval_ctx.environment, attribute), value)

+    # no automatic escaping?  joining is a lot easier then
+    if not eval_ctx.autoescape:
+        return str(d).join(map(str, value))

-def do_center(value: str, width: int=80) ->str:
+    # if the delimiter doesn't have an html representation we check
+    # if any of the items has.  If yes we do a coercion to Markup
+    if not hasattr(d, "__html__"):
+        value = list(value)
+        do_escape = False
+
+        for idx, item in enumerate(value):
+            if hasattr(item, "__html__"):
+                do_escape = True
+            else:
+                value[idx] = str(item)
+
+        if do_escape:
+            d = escape(d)
+        else:
+            d = str(d)
+
+        return d.join(value)
+
+    # no html involved, to normal joining
+    return soft_str(d).join(map(soft_str, value))
+
+
+@async_variant(sync_do_join)  # type: ignore
+async def do_join(
+    eval_ctx: "EvalContext",
+    value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+    d: str = "",
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> str:
+    return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute)
+
+
+def do_center(value: str, width: int = 80) -> str:
     """Centers the value in a field of a given width."""
-    pass
+    return soft_str(value).center(width)


 @pass_environment
-def sync_do_first(environment: 'Environment', seq: 't.Iterable[V]'
-    ) ->'t.Union[V, Undefined]':
+def sync_do_first(
+    environment: "Environment", seq: "t.Iterable[V]"
+) -> "t.Union[V, Undefined]":
     """Return the first item of a sequence."""
-    pass
+    try:
+        return next(iter(seq))
+    except StopIteration:
+        return environment.undefined("No first item, sequence was empty.")
+
+
+@async_variant(sync_do_first)  # type: ignore
+async def do_first(
+    environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]"
+) -> "t.Union[V, Undefined]":
+    try:
+        return await auto_aiter(seq).__anext__()
+    except StopAsyncIteration:
+        return environment.undefined("No first item, sequence was empty.")


 @pass_environment
-def do_last(environment: 'Environment', seq: 't.Reversible[V]'
-    ) ->'t.Union[V, Undefined]':
+def do_last(
+    environment: "Environment", seq: "t.Reversible[V]"
+) -> "t.Union[V, Undefined]":
     """Return the last item of a sequence.

     Note: Does not work with generators. You may want to explicitly
@@ -418,39 +670,75 @@ def do_last(environment: 'Environment', seq: 't.Reversible[V]'

         {{ data | selectattr('name', '==', 'Jinja') | list | last }}
     """
-    pass
+    try:
+        return next(iter(reversed(seq)))
+    except StopIteration:
+        return environment.undefined("No last item, sequence was empty.")
+
+
+# No async do_last, it may not be safe in async mode.


 @pass_context
-def do_random(context: 'Context', seq: 't.Sequence[V]'
-    ) ->'t.Union[V, Undefined]':
+def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]":
     """Return a random item from the sequence."""
-    pass
+    try:
+        return random.choice(seq)
+    except IndexError:
+        return context.environment.undefined("No random item, sequence was empty.")


-def do_filesizeformat(value: t.Union[str, float, int], binary: bool=False
-    ) ->str:
+def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str:
     """Format the value like a 'human-readable' file size (i.e. 13 kB,
     4.1 MB, 102 Bytes, etc).  Per default decimal prefixes are used (Mega,
     Giga, etc.), if the second parameter is set to `True` the binary
     prefixes are used (Mebi, Gibi).
     """
-    pass
-
-
-def do_pprint(value: t.Any) ->str:
+    bytes = float(value)
+    base = 1024 if binary else 1000
+    prefixes = [
+        ("KiB" if binary else "kB"),
+        ("MiB" if binary else "MB"),
+        ("GiB" if binary else "GB"),
+        ("TiB" if binary else "TB"),
+        ("PiB" if binary else "PB"),
+        ("EiB" if binary else "EB"),
+        ("ZiB" if binary else "ZB"),
+        ("YiB" if binary else "YB"),
+    ]
+
+    if bytes == 1:
+        return "1 Byte"
+    elif bytes < base:
+        return f"{int(bytes)} Bytes"
+    else:
+        for i, prefix in enumerate(prefixes):
+            unit = base ** (i + 2)
+
+            if bytes < unit:
+                return f"{base * bytes / unit:.1f} {prefix}"
+
+        return f"{base * bytes / unit:.1f} {prefix}"
+
+
+def do_pprint(value: t.Any) -> str:
     """Pretty print a variable. Useful for debugging."""
-    pass
+    return pformat(value)


-_uri_scheme_re = re.compile('^([\\w.+-]{2,}:(/){0,2})$')
+_uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$")


 @pass_eval_context
-def do_urlize(eval_ctx: 'EvalContext', value: str, trim_url_limit: t.
-    Optional[int]=None, nofollow: bool=False, target: t.Optional[str]=None,
-    rel: t.Optional[str]=None, extra_schemes: t.Optional[t.Iterable[str]]=None
-    ) ->str:
+def do_urlize(
+    eval_ctx: "EvalContext",
+    value: str,
+    trim_url_limit: t.Optional[int] = None,
+    nofollow: bool = False,
+    target: t.Optional[str] = None,
+    rel: t.Optional[str] = None,
+    extra_schemes: t.Optional[t.Iterable[str]] = None,
+) -> str:
     """Convert URLs in text into clickable links.

     This may not recognize links in some situations. Usually, a more
@@ -488,11 +776,42 @@ def do_urlize(eval_ctx: 'EvalContext', value: str, trim_url_limit: t.
     .. versionchanged:: 2.8
        The ``target`` parameter was added.
     """
-    pass
+    policies = eval_ctx.environment.policies
+    rel_parts = set((rel or "").split())
+
+    if nofollow:
+        rel_parts.add("nofollow")
+
+    rel_parts.update((policies["urlize.rel"] or "").split())
+    rel = " ".join(sorted(rel_parts)) or None
+
+    if target is None:
+        target = policies["urlize.target"]
+
+    if extra_schemes is None:
+        extra_schemes = policies["urlize.extra_schemes"] or ()

+    for scheme in extra_schemes:
+        if _uri_scheme_re.fullmatch(scheme) is None:
+            raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.")

-def do_indent(s: str, width: t.Union[int, str]=4, first: bool=False, blank:
-    bool=False) ->str:
+    rv = urlize(
+        value,
+        trim_url_limit=trim_url_limit,
+        rel=rel,
+        target=target,
+        extra_schemes=extra_schemes,
+    )
+
+    if eval_ctx.autoescape:
+        rv = Markup(rv)
+
+    return rv
+
+
+def do_indent(
+    s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False
+) -> str:
     """Return a copy of the string with each line indented by 4 spaces. The
     first line and blank lines are not indented by default.

@@ -508,12 +827,45 @@ def do_indent(s: str, width: t.Union[int, str]=4, first: bool=False, blank:

         Rename the ``indentfirst`` argument to ``first``.
     """
-    pass
+    if isinstance(width, str):
+        indention = width
+    else:
+        indention = " " * width
+
+    newline = "\n"
+
+    if isinstance(s, Markup):
+        indention = Markup(indention)
+        newline = Markup(newline)
+
+    s += newline  # this quirk is necessary for splitlines method
+
+    if blank:
+        rv = (newline + indention).join(s.splitlines())
+    else:
+        lines = s.splitlines()
+        rv = lines.pop(0)
+
+        if lines:
+            rv += newline + newline.join(
+                indention + line if line else line for line in lines
+            )
+
+    if first:
+        rv = indention + rv
+
+    return rv


 @pass_environment
-def do_truncate(env: 'Environment', s: str, length: int=255, killwords:
-    bool=False, end: str='...', leeway: t.Optional[int]=None) ->str:
+def do_truncate(
+    env: "Environment",
+    s: str,
+    length: int = 255,
+    killwords: bool = False,
+    end: str = "...",
+    leeway: t.Optional[int] = None,
+) -> str:
     """Return a truncated copy of the string. The length is specified
     with the first parameter which defaults to ``255``. If the second
     parameter is ``true`` the filter will cut the text at length. Otherwise
@@ -537,13 +889,31 @@ def do_truncate(env: 'Environment', s: str, length: int=255, killwords:
     The default leeway on newer Jinja versions is 5 and was 0 before but
     can be reconfigured globally.
     """
-    pass
+    if leeway is None:
+        leeway = env.policies["truncate.leeway"]
+
+    assert length >= len(end), f"expected length >= {len(end)}, got {length}"
+    assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
+
+    if len(s) <= length + leeway:
+        return s
+
+    if killwords:
+        return s[: length - len(end)] + end
+
+    result = s[: length - len(end)].rsplit(" ", 1)[0]
+    return result + end


 @pass_environment
-def do_wordwrap(environment: 'Environment', s: str, width: int=79,
-    break_long_words: bool=True, wrapstring: t.Optional[str]=None,
-    break_on_hyphens: bool=True) ->str:
+def do_wordwrap(
+    environment: "Environment",
+    s: str,
+    width: int = 79,
+    break_long_words: bool = True,
+    wrapstring: t.Optional[str] = None,
+    break_on_hyphens: bool = True,
+) -> str:
     """Wrap a string to the given width. Existing newlines are treated
     as paragraphs to be wrapped separately.

@@ -565,18 +935,41 @@ def do_wordwrap(environment: 'Environment', s: str, width: int=79,
     .. versionchanged:: 2.7
         Added the ``wrapstring`` parameter.
     """
-    pass
-
-
-_word_re = re.compile('\\w+')
-
-
-def do_wordcount(s: str) ->int:
+    import textwrap
+
+    if wrapstring is None:
+        wrapstring = environment.newline_sequence
+
+    # textwrap.wrap doesn't consider existing newlines when wrapping.
+    # If the string has a newline before width, wrap will still insert
+    # a newline at width, resulting in a short line. Instead, split and
+    # wrap each paragraph individually.
+    return wrapstring.join(
+        [
+            wrapstring.join(
+                textwrap.wrap(
+                    line,
+                    width=width,
+                    expand_tabs=False,
+                    replace_whitespace=False,
+                    break_long_words=break_long_words,
+                    break_on_hyphens=break_on_hyphens,
+                )
+            )
+            for line in s.splitlines()
+        ]
+    )
+
+
+_word_re = re.compile(r"\w+")
+
+
+def do_wordcount(s: str) -> int:
     """Count the words in that string."""
-    pass
+    return len(_word_re.findall(soft_str(s)))


-def do_int(value: t.Any, default: int=0, base: int=10) ->int:
+def do_int(value: t.Any, default: int = 0, base: int = 10) -> int:
     """Convert the value into an integer. If the
     conversion doesn't work it will return ``0``. You can
     override this default using the first parameter. You
@@ -585,18 +978,31 @@ def do_int(value: t.Any, default: int=0, base: int=10) ->int:
     0b, 0o and 0x for bases 2, 8 and 16 respectively.
     The base is ignored for decimal numbers and non-string values.
     """
-    pass
+    try:
+        if isinstance(value, str):
+            return int(value, base)

+        return int(value)
+    except (TypeError, ValueError):
+        # this quirk is necessary so that "42.23"|int gives 42.
+        try:
+            return int(float(value))
+        except (TypeError, ValueError):
+            return default

-def do_float(value: t.Any, default: float=0.0) ->float:
+
+def do_float(value: t.Any, default: float = 0.0) -> float:
     """Convert the value into a floating point number. If the
     conversion doesn't work it will return ``0.0``. You can
     override this default using the first parameter.
     """
-    pass
+    try:
+        return float(value)
+    except (TypeError, ValueError):
+        return default


-def do_format(value: str, *args: t.Any, **kwargs: t.Any) ->str:
+def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str:
     """Apply the given values to a `printf-style`_ format string, like
     ``string % values``.

@@ -616,21 +1022,30 @@ def do_format(value: str, *args: t.Any, **kwargs: t.Any) ->str:
     .. _printf-style: https://docs.python.org/library/stdtypes.html
         #printf-style-string-formatting
     """
-    pass
+    if args and kwargs:
+        raise FilterArgumentError(
+            "can't handle positional and keyword arguments at the same time"
+        )
+
+    return soft_str(value) % (kwargs or args)


-def do_trim(value: str, chars: t.Optional[str]=None) ->str:
+def do_trim(value: str, chars: t.Optional[str] = None) -> str:
     """Strip leading and trailing characters, by default whitespace."""
-    pass
+    return soft_str(value).strip(chars)


-def do_striptags(value: 't.Union[str, HasHTML]') ->str:
+def do_striptags(value: "t.Union[str, HasHTML]") -> str:
     """Strip SGML/XML tags and replace adjacent whitespace by one space."""
-    pass
+    if hasattr(value, "__html__"):
+        value = t.cast("HasHTML", value).__html__()
+
+    return Markup(str(value)).striptags()


-def sync_do_slice(value: 't.Collection[V]', slices: int, fill_with:
-    't.Optional[V]'=None) ->'t.Iterator[t.List[V]]':
+def sync_do_slice(
+    value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None
+) -> "t.Iterator[t.List[V]]":
     """Slice an iterator and return a list of lists containing
     those items. Useful if you want to create a div containing
     three ul tags that represent columns:
@@ -650,11 +1065,39 @@ def sync_do_slice(value: 't.Collection[V]', slices: int, fill_with:
     If you pass it a second argument it's used to fill missing
     values on the last iteration.
     """
-    pass
+    seq = list(value)
+    length = len(seq)
+    items_per_slice = length // slices
+    slices_with_extra = length % slices
+    offset = 0
+
+    for slice_number in range(slices):
+        start = offset + slice_number * items_per_slice
+
+        if slice_number < slices_with_extra:
+            offset += 1
+
+        end = offset + (slice_number + 1) * items_per_slice
+        tmp = seq[start:end]
+
+        if fill_with is not None and slice_number >= slices_with_extra:
+            tmp.append(fill_with)
+
+        yield tmp
+

+@async_variant(sync_do_slice)  # type: ignore
+async def do_slice(
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    slices: int,
+    fill_with: t.Optional[t.Any] = None,
+) -> "t.Iterator[t.List[V]]":
+    return sync_do_slice(await auto_to_list(value), slices, fill_with)

-def do_batch(value: 't.Iterable[V]', linecount: int, fill_with:
-    't.Optional[V]'=None) ->'t.Iterator[t.List[V]]':
+
+def do_batch(
+    value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None
+) -> "t.Iterator[t.List[V]]":
     """
     A filter that batches items. It works pretty much like `slice`
     just the other way round. It returns a list of lists with the
@@ -673,11 +1116,27 @@ def do_batch(value: 't.Iterable[V]', linecount: int, fill_with:
         {%- endfor %}
         </table>
     """
-    pass
+    tmp: "t.List[V]" = []
+
+    for item in value:
+        if len(tmp) == linecount:
+            yield tmp
+            tmp = []
+
+        tmp.append(item)
+
+    if tmp:
+        if fill_with is not None and len(tmp) < linecount:
+            tmp += [fill_with] * (linecount - len(tmp))
+
+        yield tmp


-def do_round(value: float, precision: int=0, method:
-    'te.Literal["common", "ceil", "floor"]'='common') ->float:
+def do_round(
+    value: float,
+    precision: int = 0,
+    method: 'te.Literal["common", "ceil", "floor"]' = "common",
+) -> float:
     """Round the number to a given precision. The first
     parameter specifies the precision (default is ``0``), the
     second the rounding method:
@@ -703,24 +1162,37 @@ def do_round(value: float, precision: int=0, method:
         {{ 42.55|round|int }}
             -> 43
     """
-    pass
+    if method not in {"common", "ceil", "floor"}:
+        raise FilterArgumentError("method must be common, ceil or floor")
+
+    if method == "common":
+        return round(value, precision)
+
+    func = getattr(math, method)
+    return t.cast(float, func(value * (10**precision)) / (10**precision))


 class _GroupTuple(t.NamedTuple):
     grouper: t.Any
     list: t.List[t.Any]

-    def __repr__(self) ->str:
+    # Use the regular tuple repr to hide this subclass if users print
+    # out the value during debugging.
+    def __repr__(self) -> str:
         return tuple.__repr__(self)

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return tuple.__str__(self)


 @pass_environment
-def sync_do_groupby(environment: 'Environment', value: 't.Iterable[V]',
-    attribute: t.Union[str, int], default: t.Optional[t.Any]=None,
-    case_sensitive: bool=False) ->'t.List[_GroupTuple]':
+def sync_do_groupby(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    attribute: t.Union[str, int],
+    default: t.Optional[t.Any] = None,
+    case_sensitive: bool = False,
+) -> "t.List[_GroupTuple]":
     """Group a sequence of objects by an attribute using Python's
     :func:`itertools.groupby`. The attribute can use dot notation for
     nested access, like ``"address.city"``. Unlike Python's ``groupby``,
@@ -778,12 +1250,59 @@ def sync_do_groupby(environment: 'Environment', value: 't.Iterable[V]',
     .. versionchanged:: 2.6
         The attribute supports dot notation for nested access.
     """
-    pass
+    expr = make_attrgetter(
+        environment,
+        attribute,
+        postprocess=ignore_case if not case_sensitive else None,
+        default=default,
+    )
+    out = [
+        _GroupTuple(key, list(values))
+        for key, values in groupby(sorted(value, key=expr), expr)
+    ]
+
+    if not case_sensitive:
+        # Return the real key from the first value instead of the lowercase key.
+        output_expr = make_attrgetter(environment, attribute, default=default)
+        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
+
+    return out
+
+
+@async_variant(sync_do_groupby)  # type: ignore
+async def do_groupby(
+    environment: "Environment",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    attribute: t.Union[str, int],
+    default: t.Optional[t.Any] = None,
+    case_sensitive: bool = False,
+) -> "t.List[_GroupTuple]":
+    expr = make_attrgetter(
+        environment,
+        attribute,
+        postprocess=ignore_case if not case_sensitive else None,
+        default=default,
+    )
+    out = [
+        _GroupTuple(key, await auto_to_list(values))
+        for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr)
+    ]
+
+    if not case_sensitive:
+        # Return the real key from the first value instead of the lowercase key.
+        output_expr = make_attrgetter(environment, attribute, default=default)
+        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
+
+    return out


 @pass_environment
-def sync_do_sum(environment: 'Environment', iterable: 't.Iterable[V]',
-    attribute: t.Optional[t.Union[str, int]]=None, start: V=0) ->V:
+def sync_do_sum(
+    environment: "Environment",
+    iterable: "t.Iterable[V]",
+    attribute: t.Optional[t.Union[str, int]] = None,
+    start: V = 0,  # type: ignore
+) -> V:
     """Returns the sum of a sequence of numbers plus the value of parameter
     'start' (which defaults to 0).  When the sequence is empty it returns
     start.
@@ -798,51 +1317,139 @@ def sync_do_sum(environment: 'Environment', iterable: 't.Iterable[V]',
        The ``attribute`` parameter was added to allow summing up over
        attributes.  Also the ``start`` parameter was moved on to the right.
     """
-    pass
+    if attribute is not None:
+        iterable = map(make_attrgetter(environment, attribute), iterable)
+
+    return sum(iterable, start)  # type: ignore[no-any-return, call-overload]
+

+@async_variant(sync_do_sum)  # type: ignore
+async def do_sum(
+    environment: "Environment",
+    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    attribute: t.Optional[t.Union[str, int]] = None,
+    start: V = 0,  # type: ignore
+) -> V:
+    rv = start

-def sync_do_list(value: 't.Iterable[V]') ->'t.List[V]':
+    if attribute is not None:
+        func = make_attrgetter(environment, attribute)
+    else:
+
+        def func(x: V) -> V:
+            return x
+
+    async for item in auto_aiter(iterable):
+        rv += func(item)
+
+    return rv
+
+
+def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]":
     """Convert the value into a list.  If it was a string the returned list
     will be a list of characters.
     """
-    pass
+    return list(value)


-def do_mark_safe(value: str) ->Markup:
+@async_variant(sync_do_list)  # type: ignore
+async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]":
+    return await auto_to_list(value)
+
+
+def do_mark_safe(value: str) -> Markup:
     """Mark the value as safe which means that in an environment with automatic
     escaping enabled this variable will not be escaped.
     """
-    pass
+    return Markup(value)


-def do_mark_unsafe(value: str) ->str:
+def do_mark_unsafe(value: str) -> str:
     """Mark a value as unsafe.  This is the reverse operation for :func:`safe`."""
-    pass
+    return str(value)
+
+
+@typing.overload
+def do_reverse(value: str) -> str: ...


-def do_reverse(value: t.Union[str, t.Iterable[V]]) ->t.Union[str, t.Iterable[V]
-    ]:
+@typing.overload
+def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]": ...
+
+
+def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]:
     """Reverse the object or return an iterator that iterates over it the other
     way round.
     """
-    pass
+    if isinstance(value, str):
+        return value[::-1]
+
+    try:
+        return reversed(value)  # type: ignore
+    except TypeError:
+        try:
+            rv = list(value)
+            rv.reverse()
+            return rv
+        except TypeError as e:
+            raise FilterArgumentError("argument must be iterable") from e


 @pass_environment
-def do_attr(environment: 'Environment', obj: t.Any, name: str) ->t.Union[
-    Undefined, t.Any]:
+def do_attr(
+    environment: "Environment", obj: t.Any, name: str
+) -> t.Union[Undefined, t.Any]:
     """Get an attribute of an object.  ``foo|attr("bar")`` works like
     ``foo.bar`` just that always an attribute is returned and items are not
     looked up.

     See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
     """
-    pass
+    try:
+        name = str(name)
+    except UnicodeError:
+        pass
+    else:
+        try:
+            value = getattr(obj, name)
+        except AttributeError:
+            pass
+        else:
+            if environment.sandboxed:
+                environment = t.cast("SandboxedEnvironment", environment)
+
+                if not environment.is_safe_attribute(obj, name, value):
+                    return environment.unsafe_undefined(obj, name)
+
+            return value
+
+    return environment.undefined(obj=obj, name=name)
+
+
+@typing.overload
+def sync_do_map(
+    context: "Context",
+    value: t.Iterable[t.Any],
+    name: str,
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> t.Iterable[t.Any]: ...
+
+
+@typing.overload
+def sync_do_map(
+    context: "Context",
+    value: t.Iterable[t.Any],
+    *,
+    attribute: str = ...,
+    default: t.Optional[t.Any] = None,
+) -> t.Iterable[t.Any]: ...


 @pass_context
-def sync_do_map(context: 'Context', value: t.Iterable[t.Any], *args: t.Any,
-    **kwargs: t.Any) ->t.Iterable[t.Any]:
+def sync_do_map(
+    context: "Context", value: t.Iterable[t.Any], *args: t.Any, **kwargs: t.Any
+) -> t.Iterable[t.Any]:
     """Applies a filter on a sequence of objects or looks up an attribute.
     This is useful when dealing with lists of objects but you are really
     only interested in a certain value of it.
@@ -882,12 +1489,51 @@ def sync_do_map(context: 'Context', value: t.Iterable[t.Any], *args: t.Any,

     .. versionadded:: 2.7
     """
-    pass
+    if value:
+        func = prepare_map(context, args, kwargs)
+
+        for item in value:
+            yield func(item)
+
+
+@typing.overload
+def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+    name: str,
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> t.Iterable[t.Any]: ...
+
+
+@typing.overload
+def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+    *,
+    attribute: str = ...,
+    default: t.Optional[t.Any] = None,
+) -> t.Iterable[t.Any]: ...
+
+
+@async_variant(sync_do_map)  # type: ignore
+async def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable[t.Any], t.Iterable[t.Any]],
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> t.AsyncIterable[t.Any]:
+    if value:
+        func = prepare_map(context, args, kwargs)
+
+        async for item in auto_aiter(value):
+            yield await auto_await(func(item))


 @pass_context
-def sync_do_select(context: 'Context', value: 't.Iterable[V]', *args: t.Any,
-    **kwargs: t.Any) ->'t.Iterator[V]':
+def sync_do_select(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
     """Filters a sequence of objects by applying a test to each object,
     and only selecting the objects with the test succeeding.

@@ -912,12 +1558,23 @@ def sync_do_select(context: 'Context', value: 't.Iterable[V]', *args: t.Any,

     .. versionadded:: 2.7
     """
-    pass
+    return select_or_reject(context, value, args, kwargs, lambda x: x, False)
+
+
+@async_variant(sync_do_select)  # type: ignore
+async def do_select(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: x, False)


 @pass_context
-def sync_do_reject(context: 'Context', value: 't.Iterable[V]', *args: t.Any,
-    **kwargs: t.Any) ->'t.Iterator[V]':
+def sync_do_reject(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
     """Filters a sequence of objects by applying a test to each object,
     and rejecting the objects with the test succeeding.

@@ -937,12 +1594,23 @@ def sync_do_reject(context: 'Context', value: 't.Iterable[V]', *args: t.Any,

     .. versionadded:: 2.7
     """
-    pass
+    return select_or_reject(context, value, args, kwargs, lambda x: not x, False)
+
+
+@async_variant(sync_do_reject)  # type: ignore
+async def do_reject(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False)


 @pass_context
-def sync_do_selectattr(context: 'Context', value: 't.Iterable[V]', *args: t
-    .Any, **kwargs: t.Any) ->'t.Iterator[V]':
+def sync_do_selectattr(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
     """Filters a sequence of objects by applying a test to the specified
     attribute of each object, and only selecting the objects with the
     test succeeding.
@@ -966,12 +1634,23 @@ def sync_do_selectattr(context: 'Context', value: 't.Iterable[V]', *args: t

     .. versionadded:: 2.7
     """
-    pass
+    return select_or_reject(context, value, args, kwargs, lambda x: x, True)
+
+
+@async_variant(sync_do_selectattr)  # type: ignore
+async def do_selectattr(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: x, True)


 @pass_context
-def sync_do_rejectattr(context: 'Context', value: 't.Iterable[V]', *args: t
-    .Any, **kwargs: t.Any) ->'t.Iterator[V]':
+def sync_do_rejectattr(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
     """Filters a sequence of objects by applying a test to the specified
     attribute of each object, and rejecting the objects with the test
     succeeding.
@@ -993,12 +1672,23 @@ def sync_do_rejectattr(context: 'Context', value: 't.Iterable[V]', *args: t

     .. versionadded:: 2.7
     """
-    pass
+    return select_or_reject(context, value, args, kwargs, lambda x: not x, True)
+
+
+@async_variant(sync_do_rejectattr)  # type: ignore
+async def do_rejectattr(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True)


 @pass_eval_context
-def do_tojson(eval_ctx: 'EvalContext', value: t.Any, indent: t.Optional[int
-    ]=None) ->Markup:
+def do_tojson(
+    eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None
+) -> Markup:
     """Serialize an object to a string of JSON, and mark it safe to
     render in HTML. This filter is only for use in HTML documents.

@@ -1013,23 +1703,164 @@ def do_tojson(eval_ctx: 'EvalContext', value: t.Any, indent: t.Optional[int

     .. versionadded:: 2.9
     """
-    pass
-
-
-FILTERS = {'abs': abs, 'attr': do_attr, 'batch': do_batch, 'capitalize':
-    do_capitalize, 'center': do_center, 'count': len, 'd': do_default,
-    'default': do_default, 'dictsort': do_dictsort, 'e': escape, 'escape':
-    escape, 'filesizeformat': do_filesizeformat, 'first': do_first, 'float':
-    do_float, 'forceescape': do_forceescape, 'format': do_format, 'groupby':
-    do_groupby, 'indent': do_indent, 'int': do_int, 'join': do_join, 'last':
-    do_last, 'length': len, 'list': do_list, 'lower': do_lower, 'items':
-    do_items, 'map': do_map, 'min': do_min, 'max': do_max, 'pprint':
-    do_pprint, 'random': do_random, 'reject': do_reject, 'rejectattr':
-    do_rejectattr, 'replace': do_replace, 'reverse': do_reverse, 'round':
-    do_round, 'safe': do_mark_safe, 'select': do_select, 'selectattr':
-    do_selectattr, 'slice': do_slice, 'sort': do_sort, 'string': soft_str,
-    'striptags': do_striptags, 'sum': do_sum, 'title': do_title, 'trim':
-    do_trim, 'truncate': do_truncate, 'unique': do_unique, 'upper':
-    do_upper, 'urlencode': do_urlencode, 'urlize': do_urlize, 'wordcount':
-    do_wordcount, 'wordwrap': do_wordwrap, 'xmlattr': do_xmlattr, 'tojson':
-    do_tojson}
+    policies = eval_ctx.environment.policies
+    dumps = policies["json.dumps_function"]
+    kwargs = policies["json.dumps_kwargs"]
+
+    if indent is not None:
+        kwargs = kwargs.copy()
+        kwargs["indent"] = indent
+
+    return htmlsafe_json_dumps(value, dumps=dumps, **kwargs)
+
+
+def prepare_map(
+    context: "Context", args: t.Tuple[t.Any, ...], kwargs: t.Dict[str, t.Any]
+) -> t.Callable[[t.Any], t.Any]:
+    if not args and "attribute" in kwargs:
+        attribute = kwargs.pop("attribute")
+        default = kwargs.pop("default", None)
+
+        if kwargs:
+            raise FilterArgumentError(
+                f"Unexpected keyword argument {next(iter(kwargs))!r}"
+            )
+
+        func = make_attrgetter(context.environment, attribute, default=default)
+    else:
+        try:
+            name = args[0]
+            args = args[1:]
+        except LookupError:
+            raise FilterArgumentError("map requires a filter argument") from None
+
+        def func(item: t.Any) -> t.Any:
+            return context.environment.call_filter(
+                name, item, args, kwargs, context=context
+            )
+
+    return func
+
+
+def prepare_select_or_reject(
+    context: "Context",
+    args: t.Tuple[t.Any, ...],
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> t.Callable[[t.Any], t.Any]:
+    if lookup_attr:
+        try:
+            attr = args[0]
+        except LookupError:
+            raise FilterArgumentError("Missing parameter for attribute name") from None
+
+        transfunc = make_attrgetter(context.environment, attr)
+        off = 1
+    else:
+        off = 0
+
+        def transfunc(x: V) -> V:
+            return x
+
+    try:
+        name = args[off]
+        args = args[1 + off :]
+
+        def func(item: t.Any) -> t.Any:
+            return context.environment.call_test(name, item, args, kwargs)
+
+    except LookupError:
+        func = bool  # type: ignore
+
+    return lambda item: modfunc(func(transfunc(item)))
+
+
+def select_or_reject(
+    context: "Context",
+    value: "t.Iterable[V]",
+    args: t.Tuple[t.Any, ...],
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> "t.Iterator[V]":
+    if value:
+        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
+
+        for item in value:
+            if func(item):
+                yield item
+
+
+async def async_select_or_reject(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    args: t.Tuple[t.Any, ...],
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> "t.AsyncIterator[V]":
+    if value:
+        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
+
+        async for item in auto_aiter(value):
+            if func(item):
+                yield item
+
+
+FILTERS = {
+    "abs": abs,
+    "attr": do_attr,
+    "batch": do_batch,
+    "capitalize": do_capitalize,
+    "center": do_center,
+    "count": len,
+    "d": do_default,
+    "default": do_default,
+    "dictsort": do_dictsort,
+    "e": escape,
+    "escape": escape,
+    "filesizeformat": do_filesizeformat,
+    "first": do_first,
+    "float": do_float,
+    "forceescape": do_forceescape,
+    "format": do_format,
+    "groupby": do_groupby,
+    "indent": do_indent,
+    "int": do_int,
+    "join": do_join,
+    "last": do_last,
+    "length": len,
+    "list": do_list,
+    "lower": do_lower,
+    "items": do_items,
+    "map": do_map,
+    "min": do_min,
+    "max": do_max,
+    "pprint": do_pprint,
+    "random": do_random,
+    "reject": do_reject,
+    "rejectattr": do_rejectattr,
+    "replace": do_replace,
+    "reverse": do_reverse,
+    "round": do_round,
+    "safe": do_mark_safe,
+    "select": do_select,
+    "selectattr": do_selectattr,
+    "slice": do_slice,
+    "sort": do_sort,
+    "string": soft_str,
+    "striptags": do_striptags,
+    "sum": do_sum,
+    "title": do_title,
+    "trim": do_trim,
+    "truncate": do_truncate,
+    "unique": do_unique,
+    "upper": do_upper,
+    "urlencode": do_urlencode,
+    "urlize": do_urlize,
+    "wordcount": do_wordcount,
+    "wordwrap": do_wordwrap,
+    "xmlattr": do_xmlattr,
+    "tojson": do_tojson,
+}
diff --git a/src/jinja2/idtracking.py b/src/jinja2/idtracking.py
index a1d69ca..995ebaa 100644
--- a/src/jinja2/idtracking.py
+++ b/src/jinja2/idtracking.py
@@ -1,32 +1,184 @@
 import typing as t
+
 from . import nodes
 from .visitor import NodeVisitor
-VAR_LOAD_PARAMETER = 'param'
-VAR_LOAD_RESOLVE = 'resolve'
-VAR_LOAD_ALIAS = 'alias'
-VAR_LOAD_UNDEFINED = 'undefined'

+VAR_LOAD_PARAMETER = "param"
+VAR_LOAD_RESOLVE = "resolve"
+VAR_LOAD_ALIAS = "alias"
+VAR_LOAD_UNDEFINED = "undefined"
+
+
+def find_symbols(
+    nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None
+) -> "Symbols":
+    sym = Symbols(parent=parent_symbols)
+    visitor = FrameSymbolVisitor(sym)
+    for node in nodes:
+        visitor.visit(node)
+    return sym

-class Symbols:

-    def __init__(self, parent: t.Optional['Symbols']=None, level: t.
-        Optional[int]=None) ->None:
+def symbols_for_node(
+    node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None
+) -> "Symbols":
+    sym = Symbols(parent=parent_symbols)
+    sym.analyze_node(node)
+    return sym
+
+
+class Symbols:
+    def __init__(
+        self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
+    ) -> None:
         if level is None:
             if parent is None:
                 level = 0
             else:
                 level = parent.level + 1
+
         self.level: int = level
         self.parent = parent
         self.refs: t.Dict[str, str] = {}
         self.loads: t.Dict[str, t.Any] = {}
         self.stores: t.Set[str] = set()

+    def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None:
+        visitor = RootVisitor(self)
+        visitor.visit(node, **kwargs)

-class RootVisitor(NodeVisitor):
+    def _define_ref(
+        self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None
+    ) -> str:
+        ident = f"l_{self.level}_{name}"
+        self.refs[name] = ident
+        if load is not None:
+            self.loads[ident] = load
+        return ident
+
+    def find_load(self, target: str) -> t.Optional[t.Any]:
+        if target in self.loads:
+            return self.loads[target]
+
+        if self.parent is not None:
+            return self.parent.find_load(target)
+
+        return None
+
+    def find_ref(self, name: str) -> t.Optional[str]:
+        if name in self.refs:
+            return self.refs[name]
+
+        if self.parent is not None:
+            return self.parent.find_ref(name)
+
+        return None
+
+    def ref(self, name: str) -> str:
+        rv = self.find_ref(name)
+        if rv is None:
+            raise AssertionError(
+                "Tried to resolve a name to a reference that was"
+                f" unknown to the frame ({name!r})"
+            )
+        return rv
+
+    def copy(self) -> "Symbols":
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.refs = self.refs.copy()
+        rv.loads = self.loads.copy()
+        rv.stores = self.stores.copy()
+        return rv
+
+    def store(self, name: str) -> None:
+        self.stores.add(name)
+
+        # If we have not see the name referenced yet, we need to figure
+        # out what to set it to.
+        if name not in self.refs:
+            # If there is a parent scope we check if the name has a
+            # reference there.  If it does it means we might have to alias
+            # to a variable there.
+            if self.parent is not None:
+                outer_ref = self.parent.find_ref(name)
+                if outer_ref is not None:
+                    self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
+                    return
+
+            # Otherwise we can just set it to undefined.
+            self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
+
+    def declare_parameter(self, name: str) -> str:
+        self.stores.add(name)
+        return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
+
+    def load(self, name: str) -> None:
+        if self.find_ref(name) is None:
+            self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
+
+    def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None:
+        stores: t.Dict[str, int] = {}
+        for branch in branch_symbols:
+            for target in branch.stores:
+                if target in self.stores:
+                    continue
+                stores[target] = stores.get(target, 0) + 1
+
+        for sym in branch_symbols:
+            self.refs.update(sym.refs)
+            self.loads.update(sym.loads)
+            self.stores.update(sym.stores)
+
+        for name, branch_count in stores.items():
+            if branch_count == len(branch_symbols):
+                continue
+
+            target = self.find_ref(name)  # type: ignore
+            assert target is not None, "should not happen"
+
+            if self.parent is not None:
+                outer_target = self.parent.find_ref(name)
+                if outer_target is not None:
+                    self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
+                    continue
+            self.loads[target] = (VAR_LOAD_RESOLVE, name)
+
+    def dump_stores(self) -> t.Dict[str, str]:
+        rv: t.Dict[str, str] = {}
+        node: t.Optional["Symbols"] = self
+
+        while node is not None:
+            for name in sorted(node.stores):
+                if name not in rv:
+                    rv[name] = self.find_ref(name)  # type: ignore
+
+            node = node.parent

-    def __init__(self, symbols: 'Symbols') ->None:
+        return rv
+
+    def dump_param_targets(self) -> t.Set[str]:
+        rv = set()
+        node: t.Optional["Symbols"] = self
+
+        while node is not None:
+            for target, (instr, _) in self.loads.items():
+                if instr == VAR_LOAD_PARAMETER:
+                    rv.add(target)
+
+            node = node.parent
+
+        return rv
+
+
+class RootVisitor(NodeVisitor):
+    def __init__(self, symbols: "Symbols") -> None:
         self.sym_visitor = FrameSymbolVisitor(symbols)
+
+    def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None:
+        for child in node.iter_child_nodes():
+            self.sym_visitor.visit(child)
+
     visit_Template = _simple_visit
     visit_Block = _simple_visit
     visit_Macro = _simple_visit
@@ -35,42 +187,132 @@ class RootVisitor(NodeVisitor):
     visit_If = _simple_visit
     visit_ScopedEvalContextModifier = _simple_visit

+    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
+        for child in node.iter_child_nodes(exclude=("call",)):
+            self.sym_visitor.visit(child)
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def visit_For(
+        self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any
+    ) -> None:
+        if for_branch == "body":
+            self.sym_visitor.visit(node.target, store_as_param=True)
+            branch = node.body
+        elif for_branch == "else":
+            branch = node.else_
+        elif for_branch == "test":
+            self.sym_visitor.visit(node.target, store_as_param=True)
+            if node.test is not None:
+                self.sym_visitor.visit(node.test)
+            return
+        else:
+            raise RuntimeError("Unknown for branch")
+
+        if branch:
+            for item in branch:
+                self.sym_visitor.visit(item)
+
+    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
+        for target in node.targets:
+            self.sym_visitor.visit(target)
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None:
+        raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}")
+

 class FrameSymbolVisitor(NodeVisitor):
     """A visitor for `Frame.inspect`."""

-    def __init__(self, symbols: 'Symbols') ->None:
+    def __init__(self, symbols: "Symbols") -> None:
         self.symbols = symbols

-    def visit_Name(self, node: nodes.Name, store_as_param: bool=False, **
-        kwargs: t.Any) ->None:
+    def visit_Name(
+        self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any
+    ) -> None:
         """All assignments to names go through this function."""
-        pass
+        if store_as_param or node.ctx == "param":
+            self.symbols.declare_parameter(node.name)
+        elif node.ctx == "store":
+            self.symbols.store(node.name)
+        elif node.ctx == "load":
+            self.symbols.load(node.name)
+
+    def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None:
+        self.symbols.load(node.name)
+
+    def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None:
+        self.visit(node.test, **kwargs)
+        original_symbols = self.symbols
+
+        def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols":
+            self.symbols = rv = original_symbols.copy()
+
+            for subnode in nodes:
+                self.visit(subnode, **kwargs)
+
+            self.symbols = original_symbols
+            return rv
+
+        body_symbols = inner_visit(node.body)
+        elif_symbols = inner_visit(node.elif_)
+        else_symbols = inner_visit(node.else_ or ())
+        self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
+
+    def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None:
+        self.symbols.store(node.name)

-    def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) ->None:
+    def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None:
+        self.generic_visit(node, **kwargs)
+        self.symbols.store(node.target)
+
+    def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None:
+        self.generic_visit(node, **kwargs)
+
+        for name in node.names:
+            if isinstance(name, tuple):
+                self.symbols.store(name[1])
+            else:
+                self.symbols.store(name)
+
+    def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None:
         """Visit assignments in the correct order."""
-        pass
+        self.visit(node.node, **kwargs)
+        self.visit(node.target, **kwargs)

-    def visit_For(self, node: nodes.For, **kwargs: t.Any) ->None:
+    def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None:
         """Visiting stops at for blocks.  However the block sequence
         is visited as part of the outer scope.
         """
-        pass
+        self.visit(node.iter, **kwargs)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
+        self.visit(node.call, **kwargs)
+
+    def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None:
+        self.visit(node.filter, **kwargs)
+
+    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
+        for target in node.values:
+            self.visit(target)

-    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any
-        ) ->None:
+    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
         """Stop visiting at block assigns."""
-        pass
+        self.visit(node.target, **kwargs)

-    def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) ->None:
+    def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None:
         """Stop visiting at scopes."""
-        pass

-    def visit_Block(self, node: nodes.Block, **kwargs: t.Any) ->None:
+    def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None:
         """Stop visiting at blocks."""
-        pass

-    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any
-        ) ->None:
+    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
         """Do not visit into overlay scopes."""
-        pass
diff --git a/src/jinja2/lexer.py b/src/jinja2/lexer.py
index 2281b7e..62b0471 100644
--- a/src/jinja2/lexer.py
+++ b/src/jinja2/lexer.py
@@ -3,138 +3,252 @@ is used to do some preprocessing. It filters out invalid operators like
 the bitshift operators we don't allow in templates. It separates
 template code and python code in expressions.
 """
+
 import re
 import typing as t
 from ast import literal_eval
 from collections import deque
 from sys import intern
+
 from ._identifier import pattern as name_re
 from .exceptions import TemplateSyntaxError
 from .utils import LRUCache
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .environment import Environment
-_lexer_cache: t.MutableMapping[t.Tuple, 'Lexer'] = LRUCache(50)
-whitespace_re = re.compile('\\s+')
-newline_re = re.compile('(\\r\\n|\\r|\\n)')
+
+# cache for the lexers. Exists in order to be able to have multiple
+# environments with the same lexer
+_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50)  # type: ignore
+
+# static regular expressions
+whitespace_re = re.compile(r"\s+")
+newline_re = re.compile(r"(\r\n|\r|\n)")
 string_re = re.compile(
-    '(\'([^\'\\\\]*(?:\\\\.[^\'\\\\]*)*)\'|"([^"\\\\]*(?:\\\\.[^"\\\\]*)*)")',
-    re.S)
+    r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
+)
 integer_re = re.compile(
-    """
+    r"""
     (
         0b(_?[0-1])+ # binary
     |
         0o(_?[0-7])+ # octal
     |
-        0x(_?[\\da-f])+ # hex
+        0x(_?[\da-f])+ # hex
     |
-        [1-9](_?\\d)* # decimal
+        [1-9](_?\d)* # decimal
     |
         0(_?0)* # decimal zero
     )
-    """
-    , re.IGNORECASE | re.VERBOSE)
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
 float_re = re.compile(
-    """
-    (?<!\\.)  # doesn't start with a .
-    (\\d+_)*\\d+  # digits, possibly _ separated
+    r"""
+    (?<!\.)  # doesn't start with a .
+    (\d+_)*\d+  # digits, possibly _ separated
     (
-        (\\.(\\d+_)*\\d+)?  # optional fractional part
-        e[+\\-]?(\\d+_)*\\d+  # exponent part
+        (\.(\d+_)*\d+)?  # optional fractional part
+        e[+\-]?(\d+_)*\d+  # exponent part
     |
-        \\.(\\d+_)*\\d+  # required fractional part
+        \.(\d+_)*\d+  # required fractional part
     )
-    """
-    , re.IGNORECASE | re.VERBOSE)
-TOKEN_ADD = intern('add')
-TOKEN_ASSIGN = intern('assign')
-TOKEN_COLON = intern('colon')
-TOKEN_COMMA = intern('comma')
-TOKEN_DIV = intern('div')
-TOKEN_DOT = intern('dot')
-TOKEN_EQ = intern('eq')
-TOKEN_FLOORDIV = intern('floordiv')
-TOKEN_GT = intern('gt')
-TOKEN_GTEQ = intern('gteq')
-TOKEN_LBRACE = intern('lbrace')
-TOKEN_LBRACKET = intern('lbracket')
-TOKEN_LPAREN = intern('lparen')
-TOKEN_LT = intern('lt')
-TOKEN_LTEQ = intern('lteq')
-TOKEN_MOD = intern('mod')
-TOKEN_MUL = intern('mul')
-TOKEN_NE = intern('ne')
-TOKEN_PIPE = intern('pipe')
-TOKEN_POW = intern('pow')
-TOKEN_RBRACE = intern('rbrace')
-TOKEN_RBRACKET = intern('rbracket')
-TOKEN_RPAREN = intern('rparen')
-TOKEN_SEMICOLON = intern('semicolon')
-TOKEN_SUB = intern('sub')
-TOKEN_TILDE = intern('tilde')
-TOKEN_WHITESPACE = intern('whitespace')
-TOKEN_FLOAT = intern('float')
-TOKEN_INTEGER = intern('integer')
-TOKEN_NAME = intern('name')
-TOKEN_STRING = intern('string')
-TOKEN_OPERATOR = intern('operator')
-TOKEN_BLOCK_BEGIN = intern('block_begin')
-TOKEN_BLOCK_END = intern('block_end')
-TOKEN_VARIABLE_BEGIN = intern('variable_begin')
-TOKEN_VARIABLE_END = intern('variable_end')
-TOKEN_RAW_BEGIN = intern('raw_begin')
-TOKEN_RAW_END = intern('raw_end')
-TOKEN_COMMENT_BEGIN = intern('comment_begin')
-TOKEN_COMMENT_END = intern('comment_end')
-TOKEN_COMMENT = intern('comment')
-TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
-TOKEN_LINESTATEMENT_END = intern('linestatement_end')
-TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
-TOKEN_LINECOMMENT_END = intern('linecomment_end')
-TOKEN_LINECOMMENT = intern('linecomment')
-TOKEN_DATA = intern('data')
-TOKEN_INITIAL = intern('initial')
-TOKEN_EOF = intern('eof')
-operators = {'+': TOKEN_ADD, '-': TOKEN_SUB, '/': TOKEN_DIV, '//':
-    TOKEN_FLOORDIV, '*': TOKEN_MUL, '%': TOKEN_MOD, '**': TOKEN_POW, '~':
-    TOKEN_TILDE, '[': TOKEN_LBRACKET, ']': TOKEN_RBRACKET, '(':
-    TOKEN_LPAREN, ')': TOKEN_RPAREN, '{': TOKEN_LBRACE, '}': TOKEN_RBRACE,
-    '==': TOKEN_EQ, '!=': TOKEN_NE, '>': TOKEN_GT, '>=': TOKEN_GTEQ, '<':
-    TOKEN_LT, '<=': TOKEN_LTEQ, '=': TOKEN_ASSIGN, '.': TOKEN_DOT, ':':
-    TOKEN_COLON, '|': TOKEN_PIPE, ',': TOKEN_COMMA, ';': TOKEN_SEMICOLON}
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
+
+# internal the tokens and keep references to them
+TOKEN_ADD = intern("add")
+TOKEN_ASSIGN = intern("assign")
+TOKEN_COLON = intern("colon")
+TOKEN_COMMA = intern("comma")
+TOKEN_DIV = intern("div")
+TOKEN_DOT = intern("dot")
+TOKEN_EQ = intern("eq")
+TOKEN_FLOORDIV = intern("floordiv")
+TOKEN_GT = intern("gt")
+TOKEN_GTEQ = intern("gteq")
+TOKEN_LBRACE = intern("lbrace")
+TOKEN_LBRACKET = intern("lbracket")
+TOKEN_LPAREN = intern("lparen")
+TOKEN_LT = intern("lt")
+TOKEN_LTEQ = intern("lteq")
+TOKEN_MOD = intern("mod")
+TOKEN_MUL = intern("mul")
+TOKEN_NE = intern("ne")
+TOKEN_PIPE = intern("pipe")
+TOKEN_POW = intern("pow")
+TOKEN_RBRACE = intern("rbrace")
+TOKEN_RBRACKET = intern("rbracket")
+TOKEN_RPAREN = intern("rparen")
+TOKEN_SEMICOLON = intern("semicolon")
+TOKEN_SUB = intern("sub")
+TOKEN_TILDE = intern("tilde")
+TOKEN_WHITESPACE = intern("whitespace")
+TOKEN_FLOAT = intern("float")
+TOKEN_INTEGER = intern("integer")
+TOKEN_NAME = intern("name")
+TOKEN_STRING = intern("string")
+TOKEN_OPERATOR = intern("operator")
+TOKEN_BLOCK_BEGIN = intern("block_begin")
+TOKEN_BLOCK_END = intern("block_end")
+TOKEN_VARIABLE_BEGIN = intern("variable_begin")
+TOKEN_VARIABLE_END = intern("variable_end")
+TOKEN_RAW_BEGIN = intern("raw_begin")
+TOKEN_RAW_END = intern("raw_end")
+TOKEN_COMMENT_BEGIN = intern("comment_begin")
+TOKEN_COMMENT_END = intern("comment_end")
+TOKEN_COMMENT = intern("comment")
+TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
+TOKEN_LINESTATEMENT_END = intern("linestatement_end")
+TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
+TOKEN_LINECOMMENT_END = intern("linecomment_end")
+TOKEN_LINECOMMENT = intern("linecomment")
+TOKEN_DATA = intern("data")
+TOKEN_INITIAL = intern("initial")
+TOKEN_EOF = intern("eof")
+
+# bind operators to token types
+operators = {
+    "+": TOKEN_ADD,
+    "-": TOKEN_SUB,
+    "/": TOKEN_DIV,
+    "//": TOKEN_FLOORDIV,
+    "*": TOKEN_MUL,
+    "%": TOKEN_MOD,
+    "**": TOKEN_POW,
+    "~": TOKEN_TILDE,
+    "[": TOKEN_LBRACKET,
+    "]": TOKEN_RBRACKET,
+    "(": TOKEN_LPAREN,
+    ")": TOKEN_RPAREN,
+    "{": TOKEN_LBRACE,
+    "}": TOKEN_RBRACE,
+    "==": TOKEN_EQ,
+    "!=": TOKEN_NE,
+    ">": TOKEN_GT,
+    ">=": TOKEN_GTEQ,
+    "<": TOKEN_LT,
+    "<=": TOKEN_LTEQ,
+    "=": TOKEN_ASSIGN,
+    ".": TOKEN_DOT,
+    ":": TOKEN_COLON,
+    "|": TOKEN_PIPE,
+    ",": TOKEN_COMMA,
+    ";": TOKEN_SEMICOLON,
+}
+
 reverse_operators = {v: k for k, v in operators.items()}
-assert len(operators) == len(reverse_operators), 'operators dropped'
+assert len(operators) == len(reverse_operators), "operators dropped"
 operator_re = re.compile(
     f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
-    )
-ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT,
-    TOKEN_COMMENT_END, TOKEN_WHITESPACE, TOKEN_LINECOMMENT_BEGIN,
-    TOKEN_LINECOMMENT_END, TOKEN_LINECOMMENT])
-ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT,
-    TOKEN_LINECOMMENT])
-
-
-def describe_token(token: 'Token') ->str:
+)
+
+ignored_tokens = frozenset(
+    [
+        TOKEN_COMMENT_BEGIN,
+        TOKEN_COMMENT,
+        TOKEN_COMMENT_END,
+        TOKEN_WHITESPACE,
+        TOKEN_LINECOMMENT_BEGIN,
+        TOKEN_LINECOMMENT_END,
+        TOKEN_LINECOMMENT,
+    ]
+)
+ignore_if_empty = frozenset(
+    [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
+)
+
+
+def _describe_token_type(token_type: str) -> str:
+    if token_type in reverse_operators:
+        return reverse_operators[token_type]
+
+    return {
+        TOKEN_COMMENT_BEGIN: "begin of comment",
+        TOKEN_COMMENT_END: "end of comment",
+        TOKEN_COMMENT: "comment",
+        TOKEN_LINECOMMENT: "comment",
+        TOKEN_BLOCK_BEGIN: "begin of statement block",
+        TOKEN_BLOCK_END: "end of statement block",
+        TOKEN_VARIABLE_BEGIN: "begin of print statement",
+        TOKEN_VARIABLE_END: "end of print statement",
+        TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
+        TOKEN_LINESTATEMENT_END: "end of line statement",
+        TOKEN_DATA: "template data / text",
+        TOKEN_EOF: "end of template",
+    }.get(token_type, token_type)
+
+
+def describe_token(token: "Token") -> str:
     """Returns a description of the token."""
-    pass
+    if token.type == TOKEN_NAME:
+        return token.value

+    return _describe_token_type(token.type)

-def describe_token_expr(expr: str) ->str:
+
+def describe_token_expr(expr: str) -> str:
     """Like `describe_token` but for token expressions."""
-    pass
+    if ":" in expr:
+        type, value = expr.split(":", 1)
+
+        if type == TOKEN_NAME:
+            return value
+    else:
+        type = expr
+
+    return _describe_token_type(type)


-def count_newlines(value: str) ->int:
+def count_newlines(value: str) -> int:
     """Count the number of newline characters in the string.  This is
     useful for extensions that filter a stream.
     """
-    pass
+    return len(newline_re.findall(value))


-def compile_rules(environment: 'Environment') ->t.List[t.Tuple[str, str]]:
+def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]:
     """Compiles all the rules from the environment into a list of rules."""
-    pass
+    e = re.escape
+    rules = [
+        (
+            len(environment.comment_start_string),
+            TOKEN_COMMENT_BEGIN,
+            e(environment.comment_start_string),
+        ),
+        (
+            len(environment.block_start_string),
+            TOKEN_BLOCK_BEGIN,
+            e(environment.block_start_string),
+        ),
+        (
+            len(environment.variable_start_string),
+            TOKEN_VARIABLE_BEGIN,
+            e(environment.variable_start_string),
+        ),
+    ]
+
+    if environment.line_statement_prefix is not None:
+        rules.append(
+            (
+                len(environment.line_statement_prefix),
+                TOKEN_LINESTATEMENT_BEGIN,
+                r"^[ \t\v]*" + e(environment.line_statement_prefix),
+            )
+        )
+    if environment.line_comment_prefix is not None:
+        rules.append(
+            (
+                len(environment.line_comment_prefix),
+                TOKEN_LINECOMMENT_BEGIN,
+                r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
+            )
+        )
+
+    return [x[1:] for x in sorted(rules, reverse=True)]


 class Failure:
@@ -142,12 +256,13 @@ class Failure:
     Used by the `Lexer` to specify known errors.
     """

-    def __init__(self, message: str, cls: t.Type[TemplateSyntaxError]=
-        TemplateSyntaxError) ->None:
+    def __init__(
+        self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
+    ) -> None:
         self.message = message
         self.error_class = cls

-    def __call__(self, lineno: int, filename: str) ->'te.NoReturn':
+    def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
         raise self.error_class(self.message, lineno, filename)


@@ -156,19 +271,27 @@ class Token(t.NamedTuple):
     type: str
     value: str

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return describe_token(self)

-    def test(self, expr: str) ->bool:
+    def test(self, expr: str) -> bool:
         """Test a token against a token expression.  This can either be a
         token type or ``'token_type:token_value'``.  This can only test
         against string values and types.
         """
-        pass
+        # here we do a regular string equality check as test_any is usually
+        # passed an iterable of not interned strings.
+        if self.type == expr:
+            return True
+
+        if ":" in expr:
+            return expr.split(":", 1) == [self.type, self.value]
+
+        return False

-    def test_any(self, *iterable: str) ->bool:
+    def test_any(self, *iterable: str) -> bool:
         """Test against multiple token expressions."""
-        pass
+        return any(self.test(expr) for expr in iterable)


 class TokenStreamIterator:
@@ -176,17 +299,19 @@ class TokenStreamIterator:
     until the eof token is reached.
     """

-    def __init__(self, stream: 'TokenStream') ->None:
+    def __init__(self, stream: "TokenStream") -> None:
         self.stream = stream

-    def __iter__(self) ->'TokenStreamIterator':
+    def __iter__(self) -> "TokenStreamIterator":
         return self

-    def __next__(self) ->Token:
+    def __next__(self) -> Token:
         token = self.stream.current
+
         if token.type is TOKEN_EOF:
             self.stream.close()
             raise StopIteration
+
         next(self.stream)
         return token

@@ -197,55 +322,68 @@ class TokenStream:
     one token ahead.  The current active token is stored as :attr:`current`.
     """

-    def __init__(self, generator: t.Iterable[Token], name: t.Optional[str],
-        filename: t.Optional[str]):
+    def __init__(
+        self,
+        generator: t.Iterable[Token],
+        name: t.Optional[str],
+        filename: t.Optional[str],
+    ):
         self._iter = iter(generator)
-        self._pushed: 'te.Deque[Token]' = deque()
+        self._pushed: "te.Deque[Token]" = deque()
         self.name = name
         self.filename = filename
         self.closed = False
-        self.current = Token(1, TOKEN_INITIAL, '')
+        self.current = Token(1, TOKEN_INITIAL, "")
         next(self)

-    def __iter__(self) ->TokenStreamIterator:
+    def __iter__(self) -> TokenStreamIterator:
         return TokenStreamIterator(self)

-    def __bool__(self) ->bool:
+    def __bool__(self) -> bool:
         return bool(self._pushed) or self.current.type is not TOKEN_EOF

     @property
-    def eos(self) ->bool:
+    def eos(self) -> bool:
         """Are we at the end of the stream?"""
-        pass
+        return not self

-    def push(self, token: Token) ->None:
+    def push(self, token: Token) -> None:
         """Push a token back to the stream."""
-        pass
+        self._pushed.append(token)

-    def look(self) ->Token:
+    def look(self) -> Token:
         """Look at the next token."""
-        pass
+        old_token = next(self)
+        result = self.current
+        self.push(result)
+        self.current = old_token
+        return result

-    def skip(self, n: int=1) ->None:
+    def skip(self, n: int = 1) -> None:
         """Got n tokens ahead."""
-        pass
+        for _ in range(n):
+            next(self)

-    def next_if(self, expr: str) ->t.Optional[Token]:
+    def next_if(self, expr: str) -> t.Optional[Token]:
         """Perform the token test and return the token if it matched.
         Otherwise the return value is `None`.
         """
-        pass
+        if self.current.test(expr):
+            return next(self)
+
+        return None

-    def skip_if(self, expr: str) ->bool:
+    def skip_if(self, expr: str) -> bool:
         """Like :meth:`next_if` but only returns `True` or `False`."""
-        pass
+        return self.next_if(expr) is not None

-    def __next__(self) ->Token:
+    def __next__(self) -> Token:
         """Go one token ahead and return the old one.

         Use the built-in :func:`next` instead of calling this directly.
         """
         rv = self.current
+
         if self._pushed:
             self.current = self._pushed.popleft()
         elif self.current.type is not TOKEN_EOF:
@@ -253,31 +391,74 @@ class TokenStream:
                 self.current = next(self._iter)
             except StopIteration:
                 self.close()
+
         return rv

-    def close(self) ->None:
+    def close(self) -> None:
         """Close the stream."""
-        pass
+        self.current = Token(self.current.lineno, TOKEN_EOF, "")
+        self._iter = iter(())
+        self.closed = True

-    def expect(self, expr: str) ->Token:
+    def expect(self, expr: str) -> Token:
         """Expect a given token type and return it.  This accepts the same
         argument as :meth:`jinja2.lexer.Token.test`.
         """
-        pass
+        if not self.current.test(expr):
+            expr = describe_token_expr(expr)
+
+            if self.current.type is TOKEN_EOF:
+                raise TemplateSyntaxError(
+                    f"unexpected end of template, expected {expr!r}.",
+                    self.current.lineno,
+                    self.name,
+                    self.filename,
+                )
+
+            raise TemplateSyntaxError(
+                f"expected token {expr!r}, got {describe_token(self.current)!r}",
+                self.current.lineno,
+                self.name,
+                self.filename,
+            )

+        return next(self)

-def get_lexer(environment: 'Environment') ->'Lexer':
+
+def get_lexer(environment: "Environment") -> "Lexer":
     """Return a lexer which is probably cached."""
-    pass
+    key = (
+        environment.block_start_string,
+        environment.block_end_string,
+        environment.variable_start_string,
+        environment.variable_end_string,
+        environment.comment_start_string,
+        environment.comment_end_string,
+        environment.line_statement_prefix,
+        environment.line_comment_prefix,
+        environment.trim_blocks,
+        environment.lstrip_blocks,
+        environment.newline_sequence,
+        environment.keep_trailing_newline,
+    )
+    lexer = _lexer_cache.get(key)
+
+    if lexer is None:
+        _lexer_cache[key] = lexer = Lexer(environment)

+    return lexer

-class OptionalLStrip(tuple):
+
+class OptionalLStrip(tuple):  # type: ignore[type-arg]
     """A special tuple for marking a point in the state that can have
     lstrip applied.
     """
+
     __slots__ = ()

-    def __new__(cls, *members, **kwargs):
+    # Even though it looks like a no-op, creating instances fails
+    # without this.
+    def __new__(cls, *members, **kwargs):  # type: ignore
         return super().__new__(cls, members)


@@ -295,73 +476,203 @@ class Lexer:
     Multiple environments can share the same lexer.
     """

-    def __init__(self, environment: 'Environment') ->None:
+    def __init__(self, environment: "Environment") -> None:
+        # shortcuts
         e = re.escape

-        def c(x: str) ->t.Pattern[str]:
+        def c(x: str) -> t.Pattern[str]:
             return re.compile(x, re.M | re.S)
-        tag_rules: t.List[_Rule] = [_Rule(whitespace_re, TOKEN_WHITESPACE,
-            None), _Rule(float_re, TOKEN_FLOAT, None), _Rule(integer_re,
-            TOKEN_INTEGER, None), _Rule(name_re, TOKEN_NAME, None), _Rule(
-            string_re, TOKEN_STRING, None), _Rule(operator_re,
-            TOKEN_OPERATOR, None)]
+
+        # lexing rules for tags
+        tag_rules: t.List[_Rule] = [
+            _Rule(whitespace_re, TOKEN_WHITESPACE, None),
+            _Rule(float_re, TOKEN_FLOAT, None),
+            _Rule(integer_re, TOKEN_INTEGER, None),
+            _Rule(name_re, TOKEN_NAME, None),
+            _Rule(string_re, TOKEN_STRING, None),
+            _Rule(operator_re, TOKEN_OPERATOR, None),
+        ]
+
+        # assemble the root lexing rule. because "|" is ungreedy
+        # we have to sort by length so that the lexer continues working
+        # as expected when we have parsing rules like <% for block and
+        # <%= for variables. (if someone wants asp like syntax)
+        # variables are just part of the rules if variable processing
+        # is required.
         root_tag_rules = compile_rules(environment)
+
         block_start_re = e(environment.block_start_string)
         block_end_re = e(environment.block_end_string)
         comment_end_re = e(environment.comment_end_string)
         variable_end_re = e(environment.variable_end_string)
-        block_suffix_re = '\\n?' if environment.trim_blocks else ''
+
+        # block suffix if trimming is enabled
+        block_suffix_re = "\\n?" if environment.trim_blocks else ""
+
         self.lstrip_blocks = environment.lstrip_blocks
+
         self.newline_sequence = environment.newline_sequence
         self.keep_trailing_newline = environment.keep_trailing_newline
+
         root_raw_re = (
-            f'(?P<raw_begin>{block_start_re}(\\-|\\+|)\\s*raw\\s*(?:\\-{block_end_re}\\s*|{block_end_re}))'
-            )
-        root_parts_re = '|'.join([root_raw_re] + [f'(?P<{n}>{r}(\\-|\\+|))' for
-            n, r in root_tag_rules])
-        self.rules: t.Dict[str, t.List[_Rule]] = {'root': [_Rule(c(
-            f'(.*?)(?:{root_parts_re})'), OptionalLStrip(TOKEN_DATA,
-            '#bygroup'), '#bygroup'), _Rule(c('.+'), TOKEN_DATA, None)],
-            TOKEN_COMMENT_BEGIN: [_Rule(c(
-            f'(.*?)((?:\\+{comment_end_re}|\\-{comment_end_re}\\s*|{comment_end_re}{block_suffix_re}))'
-            ), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'), _Rule(c('(.)'),
-            (Failure('Missing end of comment tag'),), None)],
-            TOKEN_BLOCK_BEGIN: [_Rule(c(
-            f'(?:\\+{block_end_re}|\\-{block_end_re}\\s*|{block_end_re}{block_suffix_re})'
-            ), TOKEN_BLOCK_END, '#pop')] + tag_rules, TOKEN_VARIABLE_BEGIN:
-            [_Rule(c(f'\\-{variable_end_re}\\s*|{variable_end_re}'),
-            TOKEN_VARIABLE_END, '#pop')] + tag_rules, TOKEN_RAW_BEGIN: [
-            _Rule(c(
-            f'(.*?)((?:{block_start_re}(\\-|\\+|))\\s*endraw\\s*(?:\\+{block_end_re}|\\-{block_end_re}\\s*|{block_end_re}{block_suffix_re}))'
-            ), OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), '#pop'), _Rule(c(
-            '(.)'), (Failure('Missing end of raw directive'),), None)],
-            TOKEN_LINESTATEMENT_BEGIN: [_Rule(c('\\s*(\\n|$)'),
-            TOKEN_LINESTATEMENT_END, '#pop')] + tag_rules,
-            TOKEN_LINECOMMENT_BEGIN: [_Rule(c('(.*?)()(?=\\n|$)'), (
-            TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END), '#pop')]}
-
-    def _normalize_newlines(self, value: str) ->str:
+            rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
+            rf"(?:\-{block_end_re}\s*|{block_end_re}))"
+        )
+        root_parts_re = "|".join(
+            [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
+        )
+
+        # global lexing rules
+        self.rules: t.Dict[str, t.List[_Rule]] = {
+            "root": [
+                # directives
+                _Rule(
+                    c(rf"(.*?)(?:{root_parts_re})"),
+                    OptionalLStrip(TOKEN_DATA, "#bygroup"),  # type: ignore
+                    "#bygroup",
+                ),
+                # data
+                _Rule(c(".+"), TOKEN_DATA, None),
+            ],
+            # comments
+            TOKEN_COMMENT_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
+                        rf"|{comment_end_re}{block_suffix_re}))"
+                    ),
+                    (TOKEN_COMMENT, TOKEN_COMMENT_END),
+                    "#pop",
+                ),
+                _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None),
+            ],
+            # blocks
+            TOKEN_BLOCK_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
+                        rf"|{block_end_re}{block_suffix_re})"
+                    ),
+                    TOKEN_BLOCK_END,
+                    "#pop",
+                ),
+            ]
+            + tag_rules,
+            # variables
+            TOKEN_VARIABLE_BEGIN: [
+                _Rule(
+                    c(rf"\-{variable_end_re}\s*|{variable_end_re}"),
+                    TOKEN_VARIABLE_END,
+                    "#pop",
+                )
+            ]
+            + tag_rules,
+            # raw block
+            TOKEN_RAW_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
+                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
+                        rf"|{block_end_re}{block_suffix_re}))"
+                    ),
+                    OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),  # type: ignore
+                    "#pop",
+                ),
+                _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None),
+            ],
+            # line statements
+            TOKEN_LINESTATEMENT_BEGIN: [
+                _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
+            ]
+            + tag_rules,
+            # line comments
+            TOKEN_LINECOMMENT_BEGIN: [
+                _Rule(
+                    c(r"(.*?)()(?=\n|$)"),
+                    (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
+                    "#pop",
+                )
+            ],
+        }
+
+    def _normalize_newlines(self, value: str) -> str:
         """Replace all newlines with the configured sequence in strings
         and template data.
         """
-        pass
-
-    def tokenize(self, source: str, name: t.Optional[str]=None, filename: t
-        .Optional[str]=None, state: t.Optional[str]=None) ->TokenStream:
+        return newline_re.sub(self.newline_sequence, value)
+
+    def tokenize(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> TokenStream:
         """Calls tokeniter + tokenize and wraps it in a token stream."""
-        pass
-
-    def wrap(self, stream: t.Iterable[t.Tuple[int, str, str]], name: t.
-        Optional[str]=None, filename: t.Optional[str]=None) ->t.Iterator[Token
-        ]:
+        stream = self.tokeniter(source, name, filename, state)
+        return TokenStream(self.wrap(stream, name, filename), name, filename)
+
+    def wrap(
+        self,
+        stream: t.Iterable[t.Tuple[int, str, str]],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> t.Iterator[Token]:
         """This is called with the stream as returned by `tokenize` and wraps
         every token in a :class:`Token` and converts the value.
         """
-        pass
-
-    def tokeniter(self, source: str, name: t.Optional[str], filename: t.
-        Optional[str]=None, state: t.Optional[str]=None) ->t.Iterator[t.
-        Tuple[int, str, str]]:
+        for lineno, token, value_str in stream:
+            if token in ignored_tokens:
+                continue
+
+            value: t.Any = value_str
+
+            if token == TOKEN_LINESTATEMENT_BEGIN:
+                token = TOKEN_BLOCK_BEGIN
+            elif token == TOKEN_LINESTATEMENT_END:
+                token = TOKEN_BLOCK_END
+            # we are not interested in those tokens in the parser
+            elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
+                continue
+            elif token == TOKEN_DATA:
+                value = self._normalize_newlines(value_str)
+            elif token == "keyword":
+                token = value_str
+            elif token == TOKEN_NAME:
+                value = value_str
+
+                if not value.isidentifier():
+                    raise TemplateSyntaxError(
+                        "Invalid character in identifier", lineno, name, filename
+                    )
+            elif token == TOKEN_STRING:
+                # try to unescape string
+                try:
+                    value = (
+                        self._normalize_newlines(value_str[1:-1])
+                        .encode("ascii", "backslashreplace")
+                        .decode("unicode-escape")
+                    )
+                except Exception as e:
+                    msg = str(e).split(":")[-1].strip()
+                    raise TemplateSyntaxError(msg, lineno, name, filename) from e
+            elif token == TOKEN_INTEGER:
+                value = int(value_str.replace("_", ""), 0)
+            elif token == TOKEN_FLOAT:
+                # remove all "_" first to support more Python versions
+                value = literal_eval(value_str.replace("_", ""))
+            elif token == TOKEN_OPERATOR:
+                token = operators[value_str]
+
+            yield Token(lineno, token, value)
+
+    def tokeniter(
+        self,
+        source: str,
+        name: t.Optional[str],
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> t.Iterator[t.Tuple[int, str, str]]:
         """This method tokenizes the text and returns the tokens in a
         generator. Use this method if you just want to tokenize a template.

@@ -369,4 +680,189 @@ class Lexer:
             Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
             breaks.
         """
-        pass
+        lines = newline_re.split(source)[::2]
+
+        if not self.keep_trailing_newline and lines[-1] == "":
+            del lines[-1]
+
+        source = "\n".join(lines)
+        pos = 0
+        lineno = 1
+        stack = ["root"]
+
+        if state is not None and state != "root":
+            assert state in ("variable", "block"), "invalid state"
+            stack.append(state + "_begin")
+
+        statetokens = self.rules[stack[-1]]
+        source_length = len(source)
+        balancing_stack: t.List[str] = []
+        newlines_stripped = 0
+        line_starting = True
+
+        while True:
+            # tokenizer loop
+            for regex, tokens, new_state in statetokens:
+                m = regex.match(source, pos)
+
+                # if no match we try again with the next rule
+                if m is None:
+                    continue
+
+                # we only match blocks and variables if braces / parentheses
+                # are balanced. continue parsing with the lower rule which
+                # is the operator rule. do this only if the end tags look
+                # like operators
+                if balancing_stack and tokens in (
+                    TOKEN_VARIABLE_END,
+                    TOKEN_BLOCK_END,
+                    TOKEN_LINESTATEMENT_END,
+                ):
+                    continue
+
+                # tuples support more options
+                if isinstance(tokens, tuple):
+                    groups: t.Sequence[str] = m.groups()
+
+                    if isinstance(tokens, OptionalLStrip):
+                        # Rule supports lstrip. Match will look like
+                        # text, block type, whitespace control, type, control, ...
+                        text = groups[0]
+                        # Skipping the text and first type, every other group is the
+                        # whitespace control for each type. One of the groups will be
+                        # -, +, or empty string instead of None.
+                        strip_sign = next(g for g in groups[2::2] if g is not None)
+
+                        if strip_sign == "-":
+                            # Strip all whitespace between the text and the tag.
+                            stripped = text.rstrip()
+                            newlines_stripped = text[len(stripped) :].count("\n")
+                            groups = [stripped, *groups[1:]]
+                        elif (
+                            # Not marked for preserving whitespace.
+                            strip_sign != "+"
+                            # lstrip is enabled.
+                            and self.lstrip_blocks
+                            # Not a variable expression.
+                            and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
+                        ):
+                            # The start of text between the last newline and the tag.
+                            l_pos = text.rfind("\n") + 1
+
+                            if l_pos > 0 or line_starting:
+                                # If there's only whitespace between the newline and the
+                                # tag, strip it.
+                                if whitespace_re.fullmatch(text, l_pos):
+                                    groups = [text[:l_pos], *groups[1:]]
+
+                    for idx, token in enumerate(tokens):
+                        # failure group
+                        if token.__class__ is Failure:
+                            raise token(lineno, filename)
+                        # bygroup is a bit more complex, in that case we
+                        # yield for the current token the first named
+                        # group that matched
+                        elif token == "#bygroup":
+                            for key, value in m.groupdict().items():
+                                if value is not None:
+                                    yield lineno, key, value
+                                    lineno += value.count("\n")
+                                    break
+                            else:
+                                raise RuntimeError(
+                                    f"{regex!r} wanted to resolve the token dynamically"
+                                    " but no group matched"
+                                )
+                        # normal group
+                        else:
+                            data = groups[idx]
+
+                            if data or token not in ignore_if_empty:
+                                yield lineno, token, data
+
+                            lineno += data.count("\n") + newlines_stripped
+                            newlines_stripped = 0
+
+                # strings as token just are yielded as it.
+                else:
+                    data = m.group()
+
+                    # update brace/parentheses balance
+                    if tokens == TOKEN_OPERATOR:
+                        if data == "{":
+                            balancing_stack.append("}")
+                        elif data == "(":
+                            balancing_stack.append(")")
+                        elif data == "[":
+                            balancing_stack.append("]")
+                        elif data in ("}", ")", "]"):
+                            if not balancing_stack:
+                                raise TemplateSyntaxError(
+                                    f"unexpected '{data}'", lineno, name, filename
+                                )
+
+                            expected_op = balancing_stack.pop()
+
+                            if expected_op != data:
+                                raise TemplateSyntaxError(
+                                    f"unexpected '{data}', expected '{expected_op}'",
+                                    lineno,
+                                    name,
+                                    filename,
+                                )
+
+                    # yield items
+                    if data or tokens not in ignore_if_empty:
+                        yield lineno, tokens, data
+
+                    lineno += data.count("\n")
+
+                line_starting = m.group()[-1:] == "\n"
+                # fetch new position into new variable so that we can check
+                # if there is a internal parsing error which would result
+                # in an infinite loop
+                pos2 = m.end()
+
+                # handle state changes
+                if new_state is not None:
+                    # remove the uppermost state
+                    if new_state == "#pop":
+                        stack.pop()
+                    # resolve the new state by group checking
+                    elif new_state == "#bygroup":
+                        for key, value in m.groupdict().items():
+                            if value is not None:
+                                stack.append(key)
+                                break
+                        else:
+                            raise RuntimeError(
+                                f"{regex!r} wanted to resolve the new state dynamically"
+                                f" but no group matched"
+                            )
+                    # direct state name given
+                    else:
+                        stack.append(new_state)
+
+                    statetokens = self.rules[stack[-1]]
+                # we are still at the same position and no stack change.
+                # this means a loop without break condition, avoid that and
+                # raise error
+                elif pos2 == pos:
+                    raise RuntimeError(
+                        f"{regex!r} yielded empty string without stack change"
+                    )
+
+                # publish new function and start again
+                pos = pos2
+                break
+            # if loop terminated without break we haven't found a single match
+            # either we are at the end of the file or we have a problem
+            else:
+                # end of text
+                if pos >= source_length:
+                    return
+
+                # something went wrong
+                raise TemplateSyntaxError(
+                    f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
+                )
diff --git a/src/jinja2/loaders.py b/src/jinja2/loaders.py
index f336510..9eaf647 100644
--- a/src/jinja2/loaders.py
+++ b/src/jinja2/loaders.py
@@ -1,6 +1,7 @@
 """API and implementations for loading templates from different data
 sources.
 """
+
 import importlib.util
 import os
 import posixpath
@@ -12,18 +13,30 @@ from collections import abc
 from hashlib import sha1
 from importlib import import_module
 from types import ModuleType
+
 from .exceptions import TemplateNotFound
 from .utils import internalcode
+
 if t.TYPE_CHECKING:
     from .environment import Environment
     from .environment import Template


-def split_template_path(template: str) ->t.List[str]:
+def split_template_path(template: str) -> t.List[str]:
     """Split a path into segments and perform a sanity check.  If it detects
     '..' in the path it will raise a `TemplateNotFound` error.
     """
-    pass
+    pieces = []
+    for piece in template.split("/"):
+        if (
+            os.path.sep in piece
+            or (os.path.altsep and os.path.altsep in piece)
+            or piece == os.path.pardir
+        ):
+            raise TemplateNotFound(template)
+        elif piece and piece != ".":
+            pieces.append(piece)
+    return pieces


 class BaseLoader:
@@ -52,10 +65,16 @@ class BaseLoader:
                     source = f.read()
                 return source, path, lambda: mtime == getmtime(path)
     """
+
+    #: if set to `False` it indicates that the loader cannot provide access
+    #: to the source of templates.
+    #:
+    #: .. versionadded:: 2.4
     has_source_access = True

-    def get_source(self, environment: 'Environment', template: str) ->t.Tuple[
-        str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
         """Get the template source, filename and reload helper for a template.
         It's passed the environment and template name and has to return a
         tuple in the form ``(source, filename, uptodate)`` or raise a
@@ -73,24 +92,61 @@ class BaseLoader:
         old state somewhere (for example in a closure).  If it returns `False`
         the template will be reloaded.
         """
-        pass
+        if not self.has_source_access:
+            raise RuntimeError(
+                f"{type(self).__name__} cannot provide access to the source"
+            )
+        raise TemplateNotFound(template)

-    def list_templates(self) ->t.List[str]:
+    def list_templates(self) -> t.List[str]:
         """Iterates over all templates.  If the loader does not support that
         it should raise a :exc:`TypeError` which is the default behavior.
         """
-        pass
+        raise TypeError("this loader cannot iterate over all templates")

     @internalcode
-    def load(self, environment: 'Environment', name: str, globals: t.
-        Optional[t.MutableMapping[str, t.Any]]=None) ->'Template':
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
         """Loads a template.  This method looks up the template in the cache
         or loads one by calling :meth:`get_source`.  Subclasses should not
         override this method as loaders working on collections of other
         loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
         will not call this method but `get_source` directly.
         """
-        pass
+        code = None
+        if globals is None:
+            globals = {}
+
+        # first we try to get the source for this template together
+        # with the filename and the uptodate function.
+        source, filename, uptodate = self.get_source(environment, name)
+
+        # try to load the code from the bytecode cache if there is a
+        # bytecode cache configured.
+        bcc = environment.bytecode_cache
+        if bcc is not None:
+            bucket = bcc.get_bucket(environment, name, filename, source)
+            code = bucket.code
+
+        # if we don't have code so far (not cached, no longer up to
+        # date) etc. we compile the template
+        if code is None:
+            code = environment.compile(source, name, filename)
+
+        # if the bytecode cache is available and the bucket doesn't
+        # have a code so far, we give the bucket the new code and put
+        # it back to the bytecode cache.
+        if bcc is not None and bucket.code is None:
+            bucket.code = code
+            bcc.set_bucket(bucket)
+
+        return environment.template_class.from_code(
+            environment, code, globals, uptodate
+        )


 class FileSystemLoader(BaseLoader):
@@ -120,16 +176,67 @@ class FileSystemLoader(BaseLoader):
         Added the ``followlinks`` parameter.
     """

-    def __init__(self, searchpath: t.Union[str, 'os.PathLike[str]', t.
-        Sequence[t.Union[str, 'os.PathLike[str]']]], encoding: str='utf-8',
-        followlinks: bool=False) ->None:
-        if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath,
-            str):
+    def __init__(
+        self,
+        searchpath: t.Union[
+            str, "os.PathLike[str]", t.Sequence[t.Union[str, "os.PathLike[str]"]]
+        ],
+        encoding: str = "utf-8",
+        followlinks: bool = False,
+    ) -> None:
+        if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str):
             searchpath = [searchpath]
+
         self.searchpath = [os.fspath(p) for p in searchpath]
         self.encoding = encoding
         self.followlinks = followlinks

+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, str, t.Callable[[], bool]]:
+        pieces = split_template_path(template)
+
+        for searchpath in self.searchpath:
+            # Use posixpath even on Windows to avoid "drive:" or UNC
+            # segments breaking out of the search directory.
+            filename = posixpath.join(searchpath, *pieces)
+
+            if os.path.isfile(filename):
+                break
+        else:
+            raise TemplateNotFound(template)
+
+        with open(filename, encoding=self.encoding) as f:
+            contents = f.read()
+
+        mtime = os.path.getmtime(filename)
+
+        def uptodate() -> bool:
+            try:
+                return os.path.getmtime(filename) == mtime
+            except OSError:
+                return False
+
+        # Use normpath to convert Windows altsep to sep.
+        return contents, os.path.normpath(filename), uptodate
+
+    def list_templates(self) -> t.List[str]:
+        found = set()
+        for searchpath in self.searchpath:
+            walk_dir = os.walk(searchpath, followlinks=self.followlinks)
+            for dirpath, _, filenames in walk_dir:
+                for filename in filenames:
+                    template = (
+                        os.path.join(dirpath, filename)[len(searchpath) :]
+                        .strip(os.path.sep)
+                        .replace(os.path.sep, "/")
+                    )
+                    if template[:2] == "./":
+                        template = template[2:]
+                    if template not in found:
+                        found.add(template)
+        return sorted(found)
+

 class PackageLoader(BaseLoader):
     """Load templates from a directory in a Python package.
@@ -164,46 +271,138 @@ class PackageLoader(BaseLoader):
         Limited PEP 420 namespace package support.
     """

-    def __init__(self, package_name: str, package_path: 'str'='templates',
-        encoding: str='utf-8') ->None:
+    def __init__(
+        self,
+        package_name: str,
+        package_path: "str" = "templates",
+        encoding: str = "utf-8",
+    ) -> None:
         package_path = os.path.normpath(package_path).rstrip(os.path.sep)
+
+        # normpath preserves ".", which isn't valid in zip paths.
         if package_path == os.path.curdir:
-            package_path = ''
+            package_path = ""
         elif package_path[:2] == os.path.curdir + os.path.sep:
             package_path = package_path[2:]
+
         self.package_path = package_path
         self.package_name = package_name
         self.encoding = encoding
+
+        # Make sure the package exists. This also makes namespace
+        # packages work, otherwise get_loader returns None.
         import_module(package_name)
         spec = importlib.util.find_spec(package_name)
-        assert spec is not None, 'An import spec was not found for the package.'
+        assert spec is not None, "An import spec was not found for the package."
         loader = spec.loader
-        assert loader is not None, 'A loader was not found for the package.'
+        assert loader is not None, "A loader was not found for the package."
         self._loader = loader
         self._archive = None
         template_root = None
+
         if isinstance(loader, zipimport.zipimporter):
             self._archive = loader.archive
-            pkgdir = next(iter(spec.submodule_search_locations))
-            template_root = os.path.join(pkgdir, package_path).rstrip(os.
-                path.sep)
+            pkgdir = next(iter(spec.submodule_search_locations))  # type: ignore
+            template_root = os.path.join(pkgdir, package_path).rstrip(os.path.sep)
         else:
             roots: t.List[str] = []
+
+            # One element for regular packages, multiple for namespace
+            # packages, or None for single module file.
             if spec.submodule_search_locations:
                 roots.extend(spec.submodule_search_locations)
+            # A single module file, use the parent directory instead.
             elif spec.origin is not None:
                 roots.append(os.path.dirname(spec.origin))
+
             for root in roots:
                 root = os.path.join(root, package_path)
+
                 if os.path.isdir(root):
                     template_root = root
                     break
+
         if template_root is None:
             raise ValueError(
-                f'The {package_name!r} package was not installed in a way that PackageLoader understands.'
-                )
+                f"The {package_name!r} package was not installed in a"
+                " way that PackageLoader understands."
+            )
+
         self._template_root = template_root

+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]:
+        # Use posixpath even on Windows to avoid "drive:" or UNC
+        # segments breaking out of the search directory. Use normpath to
+        # convert Windows altsep to sep.
+        p = os.path.normpath(
+            posixpath.join(self._template_root, *split_template_path(template))
+        )
+        up_to_date: t.Optional[t.Callable[[], bool]]
+
+        if self._archive is None:
+            # Package is a directory.
+            if not os.path.isfile(p):
+                raise TemplateNotFound(template)
+
+            with open(p, "rb") as f:
+                source = f.read()
+
+            mtime = os.path.getmtime(p)
+
+            def up_to_date() -> bool:
+                return os.path.isfile(p) and os.path.getmtime(p) == mtime
+
+        else:
+            # Package is a zip file.
+            try:
+                source = self._loader.get_data(p)  # type: ignore
+            except OSError as e:
+                raise TemplateNotFound(template) from e
+
+            # Could use the zip's mtime for all template mtimes, but
+            # would need to safely reload the module if it's out of
+            # date, so just report it as always current.
+            up_to_date = None
+
+        return source.decode(self.encoding), p, up_to_date
+
+    def list_templates(self) -> t.List[str]:
+        results: t.List[str] = []
+
+        if self._archive is None:
+            # Package is a directory.
+            offset = len(self._template_root)
+
+            for dirpath, _, filenames in os.walk(self._template_root):
+                dirpath = dirpath[offset:].lstrip(os.path.sep)
+                results.extend(
+                    os.path.join(dirpath, name).replace(os.path.sep, "/")
+                    for name in filenames
+                )
+        else:
+            if not hasattr(self._loader, "_files"):
+                raise TypeError(
+                    "This zip import does not have the required"
+                    " metadata to list templates."
+                )
+
+            # Package is a zip file.
+            prefix = (
+                self._template_root[len(self._archive) :].lstrip(os.path.sep)
+                + os.path.sep
+            )
+            offset = len(prefix)
+
+            for name in self._loader._files.keys():
+                # Find names under the templates directory that aren't directories.
+                if name.startswith(prefix) and name[-1] != os.path.sep:
+                    results.append(name[offset:].replace(os.path.sep, "/"))
+
+        results.sort()
+        return results
+

 class DictLoader(BaseLoader):
     """Loads a template from a Python dict mapping template names to
@@ -214,9 +413,20 @@ class DictLoader(BaseLoader):
     Because auto reloading is rarely useful this is disabled per default.
     """

-    def __init__(self, mapping: t.Mapping[str, str]) ->None:
+    def __init__(self, mapping: t.Mapping[str, str]) -> None:
         self.mapping = mapping

+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, None, t.Callable[[], bool]]:
+        if template in self.mapping:
+            source = self.mapping[template]
+            return source, None, lambda: source == self.mapping.get(template)
+        raise TemplateNotFound(template)
+
+    def list_templates(self) -> t.List[str]:
+        return sorted(self.mapping)
+

 class FunctionLoader(BaseLoader):
     """A loader that is passed a function which does the loading.  The
@@ -236,11 +446,32 @@ class FunctionLoader(BaseLoader):
     return value.
     """

-    def __init__(self, load_func: t.Callable[[str], t.Optional[t.Union[str,
-        t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]]]]
-        ) ->None:
+    def __init__(
+        self,
+        load_func: t.Callable[
+            [str],
+            t.Optional[
+                t.Union[
+                    str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]
+                ]
+            ],
+        ],
+    ) -> None:
         self.load_func = load_func

+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        rv = self.load_func(template)
+
+        if rv is None:
+            raise TemplateNotFound(template)
+
+        if isinstance(rv, str):
+            return rv, None, None
+
+        return rv
+

 class PrefixLoader(BaseLoader):
     """A loader that is passed a dict of loaders where each loader is bound
@@ -257,11 +488,53 @@ class PrefixLoader(BaseLoader):
     by loading ``'app2/index.html'`` the file from the second.
     """

-    def __init__(self, mapping: t.Mapping[str, BaseLoader], delimiter: str='/'
-        ) ->None:
+    def __init__(
+        self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/"
+    ) -> None:
         self.mapping = mapping
         self.delimiter = delimiter

+    def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]:
+        try:
+            prefix, name = template.split(self.delimiter, 1)
+            loader = self.mapping[prefix]
+        except (ValueError, KeyError) as e:
+            raise TemplateNotFound(template) from e
+        return loader, name
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        loader, name = self.get_loader(template)
+        try:
+            return loader.get_source(environment, name)
+        except TemplateNotFound as e:
+            # re-raise the exception with the correct filename here.
+            # (the one that includes the prefix)
+            raise TemplateNotFound(template) from e
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        loader, local_name = self.get_loader(name)
+        try:
+            return loader.load(environment, local_name, globals)
+        except TemplateNotFound as e:
+            # re-raise the exception with the correct filename here.
+            # (the one that includes the prefix)
+            raise TemplateNotFound(name) from e
+
+    def list_templates(self) -> t.List[str]:
+        result = []
+        for prefix, loader in self.mapping.items():
+            for template in loader.list_templates():
+                result.append(prefix + self.delimiter + template)
+        return result
+

 class ChoiceLoader(BaseLoader):
     """This loader works like the `PrefixLoader` just that no prefix is
@@ -277,9 +550,39 @@ class ChoiceLoader(BaseLoader):
     from a different location.
     """

-    def __init__(self, loaders: t.Sequence[BaseLoader]) ->None:
+    def __init__(self, loaders: t.Sequence[BaseLoader]) -> None:
         self.loaders = loaders

+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        for loader in self.loaders:
+            try:
+                return loader.get_source(environment, template)
+            except TemplateNotFound:
+                pass
+        raise TemplateNotFound(template)
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        for loader in self.loaders:
+            try:
+                return loader.load(environment, name, globals)
+            except TemplateNotFound:
+                pass
+        raise TemplateNotFound(name)
+
+    def list_templates(self) -> t.List[str]:
+        found = set()
+        for loader in self.loaders:
+            found.update(loader.list_templates())
+        return sorted(found)
+

 class _TemplateModule(ModuleType):
     """Like a normal module but with support for weak references"""
@@ -297,16 +600,68 @@ class ModuleLoader(BaseLoader):

     Templates can be precompiled with :meth:`Environment.compile_templates`.
     """
+
     has_source_access = False

-    def __init__(self, path: t.Union[str, 'os.PathLike[str]', t.Sequence[t.
-        Union[str, 'os.PathLike[str]']]]) ->None:
-        package_name = f'_jinja2_module_templates_{id(self):x}'
+    def __init__(
+        self,
+        path: t.Union[
+            str, "os.PathLike[str]", t.Sequence[t.Union[str, "os.PathLike[str]"]]
+        ],
+    ) -> None:
+        package_name = f"_jinja2_module_templates_{id(self):x}"
+
+        # create a fake module that looks for the templates in the
+        # path given.
         mod = _TemplateModule(package_name)
+
         if not isinstance(path, abc.Iterable) or isinstance(path, str):
             path = [path]
+
         mod.__path__ = [os.fspath(p) for p in path]
-        sys.modules[package_name] = weakref.proxy(mod, lambda x: sys.
-            modules.pop(package_name, None))
+
+        sys.modules[package_name] = weakref.proxy(
+            mod, lambda x: sys.modules.pop(package_name, None)
+        )
+
+        # the only strong reference, the sys.modules entry is weak
+        # so that the garbage collector can remove it once the
+        # loader that created it goes out of business.
         self.module = mod
         self.package_name = package_name
+
+    @staticmethod
+    def get_template_key(name: str) -> str:
+        return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
+
+    @staticmethod
+    def get_module_filename(name: str) -> str:
+        return ModuleLoader.get_template_key(name) + ".py"
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        key = self.get_template_key(name)
+        module = f"{self.package_name}.{key}"
+        mod = getattr(self.module, module, None)
+
+        if mod is None:
+            try:
+                mod = __import__(module, None, None, ["root"])
+            except ImportError as e:
+                raise TemplateNotFound(name) from e
+
+            # remove the entry from sys.modules, we only want the attribute
+            # on the module object we have stored on the loader.
+            sys.modules.pop(module, None)
+
+        if globals is None:
+            globals = {}
+
+        return environment.template_class.from_module_dict(
+            environment, mod.__dict__, globals
+        )
diff --git a/src/jinja2/meta.py b/src/jinja2/meta.py
index 37016c7..298499e 100644
--- a/src/jinja2/meta.py
+++ b/src/jinja2/meta.py
@@ -1,10 +1,13 @@
 """Functions that expose information about templates that might be
 interesting for introspection.
 """
+
 import typing as t
+
 from . import nodes
 from .compiler import CodeGenerator
 from .compiler import Frame
+
 if t.TYPE_CHECKING:
     from .environment import Environment

@@ -12,20 +15,23 @@ if t.TYPE_CHECKING:
 class TrackingCodeGenerator(CodeGenerator):
     """We abuse the code generator for introspection."""

-    def __init__(self, environment: 'Environment') ->None:
-        super().__init__(environment, '<introspection>', '<introspection>')
+    def __init__(self, environment: "Environment") -> None:
+        super().__init__(environment, "<introspection>", "<introspection>")
         self.undeclared_identifiers: t.Set[str] = set()

-    def write(self, x: str) ->None:
+    def write(self, x: str) -> None:
         """Don't write."""
-        pass

-    def enter_frame(self, frame: Frame) ->None:
+    def enter_frame(self, frame: Frame) -> None:
         """Remember all undeclared identifiers."""
-        pass
+        super().enter_frame(frame)
+
+        for _, (action, param) in frame.symbols.loads.items():
+            if action == "resolve" and param not in self.environment.globals:
+                self.undeclared_identifiers.add(param)


-def find_undeclared_variables(ast: nodes.Template) ->t.Set[str]:
+def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
     """Returns a set of all variables in the AST that will be looked up from
     the context at runtime.  Because at compile time it's not known which
     variables will be used depending on the path the execution takes at
@@ -44,16 +50,16 @@ def find_undeclared_variables(ast: nodes.Template) ->t.Set[str]:
        :exc:`TemplateAssertionError` during compilation and as a matter of
        fact this function can currently raise that exception as well.
     """
-    pass
+    codegen = TrackingCodeGenerator(ast.environment)  # type: ignore
+    codegen.visit(ast)
+    return codegen.undeclared_identifiers


-_ref_types = nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include
-_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include
-    ]
+_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
+_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include]


-def find_referenced_templates(ast: nodes.Template) ->t.Iterator[t.Optional[str]
-    ]:
+def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]:
     """Finds all the referenced templates from the AST.  This will return an
     iterator over all the hardcoded template extensions, inclusions and
     imports.  If dynamic inheritance or inclusion is used, `None` will be
@@ -68,4 +74,39 @@ def find_referenced_templates(ast: nodes.Template) ->t.Iterator[t.Optional[str]
     This function is useful for dependency tracking.  For example if you want
     to rebuild parts of the website after a layout template has changed.
     """
-    pass
+    template_name: t.Any
+
+    for node in ast.find_all(_ref_types):
+        template: nodes.Expr = node.template  # type: ignore
+
+        if not isinstance(template, nodes.Const):
+            # a tuple with some non consts in there
+            if isinstance(template, (nodes.Tuple, nodes.List)):
+                for template_name in template.items:
+                    # something const, only yield the strings and ignore
+                    # non-string consts that really just make no sense
+                    if isinstance(template_name, nodes.Const):
+                        if isinstance(template_name.value, str):
+                            yield template_name.value
+                    # something dynamic in there
+                    else:
+                        yield None
+            # something dynamic we don't know about here
+            else:
+                yield None
+            continue
+        # constant is a basestring, direct template name
+        if isinstance(template.value, str):
+            yield template.value
+        # a tuple or list (latter *should* not happen) made of consts,
+        # yield the consts that are strings.  We could warn here for
+        # non string values
+        elif isinstance(node, nodes.Include) and isinstance(
+            template.value, (tuple, list)
+        ):
+            for template_name in template.value:
+                if isinstance(template_name, str):
+                    yield template_name
+        # something else we don't care about, we could warn here
+        else:
+            yield None
diff --git a/src/jinja2/nativetypes.py b/src/jinja2/nativetypes.py
index 9eae726..71db8cc 100644
--- a/src/jinja2/nativetypes.py
+++ b/src/jinja2/nativetypes.py
@@ -4,6 +4,7 @@ from ast import parse
 from itertools import chain
 from itertools import islice
 from types import GeneratorType
+
 from . import nodes
 from .compiler import CodeGenerator
 from .compiler import Frame
@@ -12,7 +13,7 @@ from .environment import Environment
 from .environment import Template


-def native_concat(values: t.Iterable[t.Any]) ->t.Optional[t.Any]:
+def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
     """Return a native Python type from the list of compiled nodes. If
     the result is a single node, its value is returned. Otherwise, the
     nodes are concatenated as strings. If the result can be parsed with
@@ -21,7 +22,29 @@ def native_concat(values: t.Iterable[t.Any]) ->t.Optional[t.Any]:

     :param values: Iterable of outputs to concatenate.
     """
-    pass
+    head = list(islice(values, 2))
+
+    if not head:
+        return None
+
+    if len(head) == 1:
+        raw = head[0]
+        if not isinstance(raw, str):
+            return raw
+    else:
+        if isinstance(values, GeneratorType):
+            values = chain(head, values)
+        raw = "".join([str(v) for v in values])
+
+    try:
+        return literal_eval(
+            # In Python 3.10+ ast.literal_eval removes leading spaces/tabs
+            # from the given string. For backwards compatibility we need to
+            # parse the string ourselves without removing leading spaces/tabs.
+            parse(raw, mode="eval")
+        )
+    except (ValueError, SyntaxError, MemoryError):
+        return raw


 class NativeCodeGenerator(CodeGenerator):
@@ -29,24 +52,79 @@ class NativeCodeGenerator(CodeGenerator):
     ``str()`` around output nodes.
     """

+    @staticmethod
+    def _default_finalize(value: t.Any) -> t.Any:
+        return value
+
+    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
+        return repr("".join([str(v) for v in group]))
+
+    def _output_child_to_const(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> t.Any:
+        const = node.as_const(frame.eval_ctx)
+
+        if not has_safe_repr(const):
+            raise nodes.Impossible()
+
+        if isinstance(node, nodes.TemplateData):
+            return const
+
+        return finalize.const(const)  # type: ignore
+
+    def _output_child_pre(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> None:
+        if finalize.src is not None:
+            self.write(finalize.src)
+
+    def _output_child_post(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> None:
+        if finalize.src is not None:
+            self.write(")")
+

 class NativeEnvironment(Environment):
     """An environment that renders templates to native Python types."""
+
     code_generator_class = NativeCodeGenerator
-    concat = staticmethod(native_concat)
+    concat = staticmethod(native_concat)  # type: ignore


 class NativeTemplate(Template):
     environment_class = NativeEnvironment

-    def render(self, *args: t.Any, **kwargs: t.Any) ->t.Any:
+    def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
         """Render the template to produce a native Python type. If the
         result is a single node, its value is returned. Otherwise, the
         nodes are concatenated as strings. If the result can be parsed
         with :func:`ast.literal_eval`, the parsed value is returned.
         Otherwise, the string is returned.
         """
-        pass
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment_class.concat(  # type: ignore
+                self.root_render_func(ctx)
+            )
+        except Exception:
+            return self.environment.handle_exception()
+
+    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment_class.concat(  # type: ignore
+                [n async for n in self.root_render_func(ctx)]  # type: ignore
+            )
+        except Exception:
+            return self.environment.handle_exception()


 NativeEnvironment.template_class = NativeTemplate
diff --git a/src/jinja2/nodes.py b/src/jinja2/nodes.py
index 4ec1d17..2f93b90 100644
--- a/src/jinja2/nodes.py
+++ b/src/jinja2/nodes.py
@@ -2,25 +2,49 @@
 some node tree helper functions used by the parser and compiler in order
 to normalize nodes.
 """
+
 import inspect
 import operator
 import typing as t
 from collections import deque
+
 from markupsafe import Markup
+
 from .utils import _PassArg
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .environment import Environment
-_NodeBound = t.TypeVar('_NodeBound', bound='Node')
-_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {'*':
-    operator.mul, '/': operator.truediv, '//': operator.floordiv, '**':
-    operator.pow, '%': operator.mod, '+': operator.add, '-': operator.sub}
-_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = {'not': operator.
-    not_, '+': operator.pos, '-': operator.neg}
-_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {'eq':
-    operator.eq, 'ne': operator.ne, 'gt': operator.gt, 'gteq': operator.ge,
-    'lt': operator.lt, 'lteq': operator.le, 'in': lambda a, b: a in b,
-    'notin': lambda a, b: a not in b}
+
+_NodeBound = t.TypeVar("_NodeBound", bound="Node")
+
+_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+    "*": operator.mul,
+    "/": operator.truediv,
+    "//": operator.floordiv,
+    "**": operator.pow,
+    "%": operator.mod,
+    "+": operator.add,
+    "-": operator.sub,
+}
+
+_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
+    "not": operator.not_,
+    "+": operator.pos,
+    "-": operator.neg,
+}
+
+_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+    "eq": operator.eq,
+    "ne": operator.ne,
+    "gt": operator.gt,
+    "gteq": operator.ge,
+    "lt": operator.lt,
+    "lteq": operator.le,
+    "in": lambda a, b: a in b,
+    "notin": lambda a, b: a not in b,
+}


 class Impossible(Exception):
@@ -32,15 +56,15 @@ class NodeType(type):
     inheritance.  fields and attributes from the parent class are
     automatically forwarded to the child."""

-    def __new__(mcs, name, bases, d):
-        for attr in ('fields', 'attributes'):
+    def __new__(mcs, name, bases, d):  # type: ignore
+        for attr in "fields", "attributes":
             storage: t.List[t.Tuple[str, ...]] = []
             storage.extend(getattr(bases[0] if bases else object, attr, ()))
             storage.extend(d.get(attr, ()))
-            assert len(bases) <= 1, 'multiple inheritance not allowed'
-            assert len(storage) == len(set(storage)), 'layout conflict'
+            assert len(bases) <= 1, "multiple inheritance not allowed"
+            assert len(storage) == len(set(storage)), "layout conflict"
             d[attr] = tuple(storage)
-        d.setdefault('abstract', False)
+        d.setdefault("abstract", False)
         return type.__new__(mcs, name, bases, d)


@@ -49,8 +73,9 @@ class EvalContext:
     to it in extensions.
     """

-    def __init__(self, environment: 'Environment', template_name: t.
-        Optional[str]=None) ->None:
+    def __init__(
+        self, environment: "Environment", template_name: t.Optional[str] = None
+    ) -> None:
         self.environment = environment
         if callable(environment.autoescape):
             self.autoescape = environment.autoescape(template_name)
@@ -58,6 +83,24 @@ class EvalContext:
             self.autoescape = environment.autoescape
         self.volatile = False

+    def save(self) -> t.Mapping[str, t.Any]:
+        return self.__dict__.copy()
+
+    def revert(self, old: t.Mapping[str, t.Any]) -> None:
+        self.__dict__.clear()
+        self.__dict__.update(old)
+
+
+def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext:
+    if ctx is None:
+        if node.environment is None:
+            raise RuntimeError(
+                "if no eval context is passed, the node must have an"
+                " attached environment."
+            )
+        return EvalContext(node.environment)
+    return ctx
+

 class Node(metaclass=NodeType):
     """Baseclass for all Jinja nodes.  There are a number of nodes available
@@ -75,96 +118,176 @@ class Node(metaclass=NodeType):
     The `environment` attribute is set at the end of the parsing process for
     all nodes automatically.
     """
+
     fields: t.Tuple[str, ...] = ()
-    attributes: t.Tuple[str, ...] = ('lineno', 'environment')
+    attributes: t.Tuple[str, ...] = ("lineno", "environment")
     abstract = True
+
     lineno: int
-    environment: t.Optional['Environment']
+    environment: t.Optional["Environment"]

-    def __init__(self, *fields: t.Any, **attributes: t.Any) ->None:
+    def __init__(self, *fields: t.Any, **attributes: t.Any) -> None:
         if self.abstract:
-            raise TypeError('abstract nodes are not instantiable')
+            raise TypeError("abstract nodes are not instantiable")
         if fields:
             if len(fields) != len(self.fields):
                 if not self.fields:
-                    raise TypeError(
-                        f'{type(self).__name__!r} takes 0 arguments')
+                    raise TypeError(f"{type(self).__name__!r} takes 0 arguments")
                 raise TypeError(
-                    f"{type(self).__name__!r} takes 0 or {len(self.fields)} argument{'s' if len(self.fields) != 1 else ''}"
-                    )
+                    f"{type(self).__name__!r} takes 0 or {len(self.fields)}"
+                    f" argument{'s' if len(self.fields) != 1 else ''}"
+                )
             for name, arg in zip(self.fields, fields):
                 setattr(self, name, arg)
         for attr in self.attributes:
             setattr(self, attr, attributes.pop(attr, None))
         if attributes:
-            raise TypeError(f'unknown attribute {next(iter(attributes))!r}')
+            raise TypeError(f"unknown attribute {next(iter(attributes))!r}")

-    def iter_fields(self, exclude: t.Optional[t.Container[str]]=None, only:
-        t.Optional[t.Container[str]]=None) ->t.Iterator[t.Tuple[str, t.Any]]:
+    def iter_fields(
+        self,
+        exclude: t.Optional[t.Container[str]] = None,
+        only: t.Optional[t.Container[str]] = None,
+    ) -> t.Iterator[t.Tuple[str, t.Any]]:
         """This method iterates over all fields that are defined and yields
         ``(key, value)`` tuples.  Per default all fields are returned, but
         it's possible to limit that to some fields by providing the `only`
         parameter or to exclude some using the `exclude` parameter.  Both
         should be sets or tuples of field names.
         """
-        pass
-
-    def iter_child_nodes(self, exclude: t.Optional[t.Container[str]]=None,
-        only: t.Optional[t.Container[str]]=None) ->t.Iterator['Node']:
+        for name in self.fields:
+            if (
+                (exclude is None and only is None)
+                or (exclude is not None and name not in exclude)
+                or (only is not None and name in only)
+            ):
+                try:
+                    yield name, getattr(self, name)
+                except AttributeError:
+                    pass
+
+    def iter_child_nodes(
+        self,
+        exclude: t.Optional[t.Container[str]] = None,
+        only: t.Optional[t.Container[str]] = None,
+    ) -> t.Iterator["Node"]:
         """Iterates over all direct child nodes of the node.  This iterates
         over all fields and yields the values of they are nodes.  If the value
         of a field is a list all the nodes in that list are returned.
         """
-        pass
-
-    def find(self, node_type: t.Type[_NodeBound]) ->t.Optional[_NodeBound]:
+        for _, item in self.iter_fields(exclude, only):
+            if isinstance(item, list):
+                for n in item:
+                    if isinstance(n, Node):
+                        yield n
+            elif isinstance(item, Node):
+                yield item
+
+    def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]:
         """Find the first node of a given type.  If no such node exists the
         return value is `None`.
         """
-        pass
+        for result in self.find_all(node_type):
+            return result
+
+        return None

-    def find_all(self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.
-        Type[_NodeBound], ...]]) ->t.Iterator[_NodeBound]:
+    def find_all(
+        self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]]
+    ) -> t.Iterator[_NodeBound]:
         """Find all the nodes of a given type.  If the type is a tuple,
         the check is performed for any of the tuple items.
         """
-        pass
+        for child in self.iter_child_nodes():
+            if isinstance(child, node_type):
+                yield child  # type: ignore
+            yield from child.find_all(node_type)

-    def set_ctx(self, ctx: str) ->'Node':
+    def set_ctx(self, ctx: str) -> "Node":
         """Reset the context of a node and all child nodes.  Per default the
         parser will all generate nodes that have a 'load' context as it's the
         most common one.  This method is used in the parser to set assignment
         targets and other nodes to a store context.
         """
-        pass
-
-    def set_lineno(self, lineno: int, override: bool=False) ->'Node':
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            if "ctx" in node.fields:
+                node.ctx = ctx  # type: ignore
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def set_lineno(self, lineno: int, override: bool = False) -> "Node":
         """Set the line numbers of the node and children."""
-        pass
-
-    def set_environment(self, environment: 'Environment') ->'Node':
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            if "lineno" in node.attributes:
+                if node.lineno is None or override:
+                    node.lineno = lineno
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def set_environment(self, environment: "Environment") -> "Node":
         """Set the environment for all nodes."""
-        pass
-
-    def __eq__(self, other: t.Any) ->bool:
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            node.environment = environment
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def __eq__(self, other: t.Any) -> bool:
         if type(self) is not type(other):
             return NotImplemented
+
         return tuple(self.iter_fields()) == tuple(other.iter_fields())
+
     __hash__ = object.__hash__

-    def __repr__(self) ->str:
-        args_str = ', '.join(f'{a}={getattr(self, a, None)!r}' for a in
-            self.fields)
-        return f'{type(self).__name__}({args_str})'
+    def __repr__(self) -> str:
+        args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
+        return f"{type(self).__name__}({args_str})"
+
+    def dump(self) -> str:
+        def _dump(node: t.Union[Node, t.Any]) -> None:
+            if not isinstance(node, Node):
+                buf.append(repr(node))
+                return
+
+            buf.append(f"nodes.{type(node).__name__}(")
+            if not node.fields:
+                buf.append(")")
+                return
+            for idx, field in enumerate(node.fields):
+                if idx:
+                    buf.append(", ")
+                value = getattr(node, field)
+                if isinstance(value, list):
+                    buf.append("[")
+                    for idx, item in enumerate(value):
+                        if idx:
+                            buf.append(", ")
+                        _dump(item)
+                    buf.append("]")
+                else:
+                    _dump(value)
+            buf.append(")")
+
+        buf: t.List[str] = []
+        _dump(self)
+        return "".join(buf)


 class Stmt(Node):
     """Base node for all statements."""
+
     abstract = True


 class Helper(Node):
     """Nodes that exist in a specific context only."""
+
     abstract = True


@@ -172,7 +295,8 @@ class Template(Node):
     """Node that represents a template.  This must be the outermost node that
     is passed to the compiler.
     """
-    fields = 'body',
+
+    fields = ("body",)
     body: t.List[Node]


@@ -180,14 +304,16 @@ class Output(Stmt):
     """A node that holds multiple expressions which are then printed out.
     This is used both for the `print` statement and the regular template data.
     """
-    fields = 'nodes',
-    nodes: t.List['Expr']
+
+    fields = ("nodes",)
+    nodes: t.List["Expr"]


 class Extends(Stmt):
     """Represents an extends statement."""
-    fields = 'template',
-    template: 'Expr'
+
+    fields = ("template",)
+    template: "Expr"


 class For(Stmt):
@@ -198,7 +324,8 @@ class For(Stmt):

     For filtered nodes an expression can be stored as `test`, otherwise `None`.
     """
-    fields = 'target', 'iter', 'body', 'else_', 'test', 'recursive'
+
+    fields = ("target", "iter", "body", "else_", "test", "recursive")
     target: Node
     iter: Node
     body: t.List[Node]
@@ -209,10 +336,11 @@ class For(Stmt):

 class If(Stmt):
     """If `test` is true, `body` is rendered, else `else_`."""
-    fields = 'test', 'body', 'elif_', 'else_'
+
+    fields = ("test", "body", "elif_", "else_")
     test: Node
     body: t.List[Node]
-    elif_: t.List['If']
+    elif_: t.List["If"]
     else_: t.List[Node]


@@ -221,10 +349,11 @@ class Macro(Stmt):
     arguments and `defaults` a list of defaults if there are any.  `body` is
     a list of nodes for the macro body.
     """
-    fields = 'name', 'args', 'defaults', 'body'
+
+    fields = ("name", "args", "defaults", "body")
     name: str
-    args: t.List['Name']
-    defaults: t.List['Expr']
+    args: t.List["Name"]
+    defaults: t.List["Expr"]
     body: t.List[Node]


@@ -232,18 +361,20 @@ class CallBlock(Stmt):
     """Like a macro without a name but a call instead.  `call` is called with
     the unnamed macro as `caller` argument this node holds.
     """
-    fields = 'call', 'args', 'defaults', 'body'
-    call: 'Call'
-    args: t.List['Name']
-    defaults: t.List['Expr']
+
+    fields = ("call", "args", "defaults", "body")
+    call: "Call"
+    args: t.List["Name"]
+    defaults: t.List["Expr"]
     body: t.List[Node]


 class FilterBlock(Stmt):
     """Node for filter sections."""
-    fields = 'body', 'filter'
+
+    fields = ("body", "filter")
     body: t.List[Node]
-    filter: 'Filter'
+    filter: "Filter"


 class With(Stmt):
@@ -252,9 +383,10 @@ class With(Stmt):

     .. versionadded:: 2.9.3
     """
-    fields = 'targets', 'values', 'body'
-    targets: t.List['Expr']
-    values: t.List['Expr']
+
+    fields = ("targets", "values", "body")
+    targets: t.List["Expr"]
+    values: t.List["Expr"]
     body: t.List[Node]


@@ -264,7 +396,8 @@ class Block(Stmt):
     .. versionchanged:: 3.0.0
         the `required` field was added.
     """
-    fields = 'name', 'body', 'scoped', 'required'
+
+    fields = ("name", "body", "scoped", "required")
     name: str
     body: t.List[Node]
     scoped: bool
@@ -273,16 +406,18 @@ class Block(Stmt):

 class Include(Stmt):
     """A node that represents the include tag."""
-    fields = 'template', 'with_context', 'ignore_missing'
-    template: 'Expr'
+
+    fields = ("template", "with_context", "ignore_missing")
+    template: "Expr"
     with_context: bool
     ignore_missing: bool


 class Import(Stmt):
     """A node that represents the import tag."""
-    fields = 'template', 'target', 'with_context'
-    template: 'Expr'
+
+    fields = ("template", "target", "with_context")
+    template: "Expr"
     target: str
     with_context: bool

@@ -298,38 +433,43 @@ class FromImport(Stmt):

     The list of names may contain tuples if aliases are wanted.
     """
-    fields = 'template', 'names', 'with_context'
-    template: 'Expr'
+
+    fields = ("template", "names", "with_context")
+    template: "Expr"
     names: t.List[t.Union[str, t.Tuple[str, str]]]
     with_context: bool


 class ExprStmt(Stmt):
     """A statement that evaluates an expression and discards the result."""
-    fields = 'node',
+
+    fields = ("node",)
     node: Node


 class Assign(Stmt):
     """Assigns an expression to a target."""
-    fields = 'target', 'node'
-    target: 'Expr'
+
+    fields = ("target", "node")
+    target: "Expr"
     node: Node


 class AssignBlock(Stmt):
     """Assigns a block to a target."""
-    fields = 'target', 'filter', 'body'
-    target: 'Expr'
-    filter: t.Optional['Filter']
+
+    fields = ("target", "filter", "body")
+    target: "Expr"
+    filter: t.Optional["Filter"]
     body: t.List[Node]


 class Expr(Node):
     """Baseclass for all expressions."""
+
     abstract = True

-    def as_const(self, eval_ctx: t.Optional[EvalContext]=None) ->t.Any:
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
         """Return the value of the expression as constant or raise
         :exc:`Impossible` if this was not possible.

@@ -340,29 +480,61 @@ class Expr(Node):
         .. versionchanged:: 2.4
            the `eval_ctx` parameter was added.
         """
-        pass
+        raise Impossible()

-    def can_assign(self) ->bool:
+    def can_assign(self) -> bool:
         """Check if it's possible to assign something to this node."""
-        pass
+        return False


 class BinExpr(Expr):
     """Baseclass for all binary expressions."""
-    fields = 'left', 'right'
+
+    fields = ("left", "right")
     left: Expr
     right: Expr
     operator: str
     abstract = True

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        # intercepted operators cannot be folded at compile time
+        if (
+            eval_ctx.environment.sandboxed
+            and self.operator in eval_ctx.environment.intercepted_binops  # type: ignore
+        ):
+            raise Impossible()
+        f = _binop_to_func[self.operator]
+        try:
+            return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+

 class UnaryExpr(Expr):
     """Baseclass for all unary expressions."""
-    fields = 'node',
+
+    fields = ("node",)
     node: Expr
     operator: str
     abstract = True

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        # intercepted operators cannot be folded at compile time
+        if (
+            eval_ctx.environment.sandboxed
+            and self.operator in eval_ctx.environment.intercepted_unops  # type: ignore
+        ):
+            raise Impossible()
+        f = _uaop_to_func[self.operator]
+        try:
+            return f(self.node.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+

 class Name(Expr):
     """Looks up a name or stores a value in a name.
@@ -372,20 +544,33 @@ class Name(Expr):
     -   `load`: load that name
     -   `param`: like `store` but if the name was defined as function parameter.
     """
-    fields = 'name', 'ctx'
+
+    fields = ("name", "ctx")
     name: str
     ctx: str

+    def can_assign(self) -> bool:
+        return self.name not in {"true", "false", "none", "True", "False", "None"}
+

 class NSRef(Expr):
     """Reference to a namespace value assignment"""
-    fields = 'name', 'attr'
+
+    fields = ("name", "attr")
     name: str
     attr: str

+    def can_assign(self) -> bool:
+        # We don't need any special checks here; NSRef assignments have a
+        # runtime check to ensure the target is a namespace object which will
+        # have been checked already as it is created using a normal assignment
+        # which goes through a `Name` node.
+        return True
+

 class Literal(Expr):
     """Baseclass for literals."""
+
     abstract = True


@@ -395,75 +580,164 @@ class Const(Literal):
     complex values such as lists too.  Only constants with a safe
     representation (objects where ``eval(repr(x)) == x`` is true).
     """
-    fields = 'value',
+
+    fields = ("value",)
     value: t.Any

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        return self.value
+
     @classmethod
-    def from_untrusted(cls, value: t.Any, lineno: t.Optional[int]=None,
-        environment: 't.Optional[Environment]'=None) ->'Const':
+    def from_untrusted(
+        cls,
+        value: t.Any,
+        lineno: t.Optional[int] = None,
+        environment: "t.Optional[Environment]" = None,
+    ) -> "Const":
         """Return a const object if the value is representable as
         constant value in the generated code, otherwise it will raise
         an `Impossible` exception.
         """
-        pass
+        from .compiler import has_safe_repr
+
+        if not has_safe_repr(value):
+            raise Impossible()
+        return cls(value, lineno=lineno, environment=environment)


 class TemplateData(Literal):
     """A constant template string."""
-    fields = 'data',
+
+    fields = ("data",)
     data: str

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if eval_ctx.volatile:
+            raise Impossible()
+        if eval_ctx.autoescape:
+            return Markup(self.data)
+        return self.data
+

 class Tuple(Literal):
     """For loop unpacking and some other things like multiple arguments
     for subscripts.  Like for :class:`Name` `ctx` specifies if the tuple
     is used for loading the names or storing.
     """
-    fields = 'items', 'ctx'
+
+    fields = ("items", "ctx")
     items: t.List[Expr]
     ctx: str

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return tuple(x.as_const(eval_ctx) for x in self.items)
+
+    def can_assign(self) -> bool:
+        for item in self.items:
+            if not item.can_assign():
+                return False
+        return True
+

 class List(Literal):
     """Any list literal such as ``[1, 2, 3]``"""
-    fields = 'items',
+
+    fields = ("items",)
     items: t.List[Expr]

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return [x.as_const(eval_ctx) for x in self.items]
+

 class Dict(Literal):
     """Any dict literal such as ``{1: 2, 3: 4}``.  The items must be a list of
     :class:`Pair` nodes.
     """
-    fields = 'items',
-    items: t.List['Pair']
+
+    fields = ("items",)
+    items: t.List["Pair"]
+
+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Dict[t.Any, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return dict(x.as_const(eval_ctx) for x in self.items)


 class Pair(Helper):
     """A key, value pair for dicts."""
-    fields = 'key', 'value'
+
+    fields = ("key", "value")
     key: Expr
     value: Expr

+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Tuple[t.Any, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
+

 class Keyword(Helper):
     """A key, value pair for keyword arguments where key is a string."""
-    fields = 'key', 'value'
+
+    fields = ("key", "value")
     key: str
     value: Expr

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.key, self.value.as_const(eval_ctx)
+

 class CondExpr(Expr):
     """A conditional expression (inline if expression).  (``{{
     foo if bar else baz }}``)
     """
-    fields = 'test', 'expr1', 'expr2'
+
+    fields = ("test", "expr1", "expr2")
     test: Expr
     expr1: Expr
     expr2: t.Optional[Expr]

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if self.test.as_const(eval_ctx):
+            return self.expr1.as_const(eval_ctx)
+
+        # if we evaluate to an undefined object, we better do that at runtime
+        if self.expr2 is None:
+            raise Impossible()
+
+        return self.expr2.as_const(eval_ctx)
+
+
+def args_as_const(
+    node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext]
+) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]:
+    args = [x.as_const(eval_ctx) for x in node.args]
+    kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
+
+    if node.dyn_args is not None:
+        try:
+            args.extend(node.dyn_args.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+    if node.dyn_kwargs is not None:
+        try:
+            kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+    return args, kwargs
+

 class _FilterTestCommon(Expr):
-    fields = 'node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs'
+    fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
     node: Expr
     name: str
     args: t.List[Expr]
@@ -473,6 +747,42 @@ class _FilterTestCommon(Expr):
     abstract = True
     _is_filter = True

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        if eval_ctx.volatile:
+            raise Impossible()
+
+        if self._is_filter:
+            env_map = eval_ctx.environment.filters
+        else:
+            env_map = eval_ctx.environment.tests
+
+        func = env_map.get(self.name)
+        pass_arg = _PassArg.from_obj(func)  # type: ignore
+
+        if func is None or pass_arg is _PassArg.context:
+            raise Impossible()
+
+        if eval_ctx.environment.is_async and (
+            getattr(func, "jinja_async_variant", False) is True
+            or inspect.iscoroutinefunction(func)
+        ):
+            raise Impossible()
+
+        args, kwargs = args_as_const(self, eval_ctx)
+        args.insert(0, self.node.as_const(eval_ctx))
+
+        if pass_arg is _PassArg.eval_context:
+            args.insert(0, eval_ctx)
+        elif pass_arg is _PassArg.environment:
+            args.insert(0, eval_ctx.environment)
+
+        try:
+            return func(*args, **kwargs)
+        except Exception as e:
+            raise Impossible() from e
+

 class Filter(_FilterTestCommon):
     """Apply a filter to an expression. ``name`` is the name of the
@@ -481,7 +791,14 @@ class Filter(_FilterTestCommon):
     If ``node`` is ``None``, the filter is being used in a filter block
     and is applied to the content of the block.
     """
-    node: t.Optional[Expr]
+
+    node: t.Optional[Expr]  # type: ignore
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.node is None:
+            raise Impossible()
+
+        return super().as_const(eval_ctx=eval_ctx)


 class Test(_FilterTestCommon):
@@ -493,6 +810,7 @@ class Test(_FilterTestCommon):
         check for volatile, async, and ``@pass_context`` etc.
         decorators.
     """
+
     _is_filter = False


@@ -503,7 +821,8 @@ class Call(Expr):
     node for dynamic positional (``*args``) or keyword (``**kwargs``)
     arguments.
     """
-    fields = 'node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs'
+
+    fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
     node: Expr
     args: t.List[Expr]
     kwargs: t.List[Keyword]
@@ -513,123 +832,209 @@ class Call(Expr):

 class Getitem(Expr):
     """Get an attribute or item from an expression and prefer the item."""
-    fields = 'node', 'arg', 'ctx'
+
+    fields = ("node", "arg", "ctx")
     node: Expr
     arg: Expr
     ctx: str

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.ctx != "load":
+            raise Impossible()
+
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        try:
+            return eval_ctx.environment.getitem(
+                self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
+            )
+        except Exception as e:
+            raise Impossible() from e
+

 class Getattr(Expr):
     """Get an attribute or item from an expression that is a ascii-only
     bytestring and prefer the attribute.
     """
-    fields = 'node', 'attr', 'ctx'
+
+    fields = ("node", "attr", "ctx")
     node: Expr
     attr: str
     ctx: str

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.ctx != "load":
+            raise Impossible()
+
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        try:
+            return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr)
+        except Exception as e:
+            raise Impossible() from e
+

 class Slice(Expr):
     """Represents a slice object.  This must only be used as argument for
     :class:`Subscript`.
     """
-    fields = 'start', 'stop', 'step'
+
+    fields = ("start", "stop", "step")
     start: t.Optional[Expr]
     stop: t.Optional[Expr]
     step: t.Optional[Expr]

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]:
+            if obj is None:
+                return None
+            return obj.as_const(eval_ctx)
+
+        return slice(const(self.start), const(self.stop), const(self.step))
+

 class Concat(Expr):
     """Concatenates the list of expressions provided after converting
     them to strings.
     """
-    fields = 'nodes',
+
+    fields = ("nodes",)
     nodes: t.List[Expr]

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
+

 class Compare(Expr):
     """Compares an expression with some other expressions.  `ops` must be a
     list of :class:`Operand`\\s.
     """
-    fields = 'expr', 'ops'
+
+    fields = ("expr", "ops")
     expr: Expr
-    ops: t.List['Operand']
+    ops: t.List["Operand"]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        result = value = self.expr.as_const(eval_ctx)
+
+        try:
+            for op in self.ops:
+                new_value = op.expr.as_const(eval_ctx)
+                result = _cmpop_to_func[op.op](value, new_value)
+
+                if not result:
+                    return False
+
+                value = new_value
+        except Exception as e:
+            raise Impossible() from e
+
+        return result


 class Operand(Helper):
     """Holds an operator and an expression."""
-    fields = 'op', 'expr'
+
+    fields = ("op", "expr")
     op: str
     expr: Expr


 class Mul(BinExpr):
     """Multiplies the left with the right node."""
-    operator = '*'
+
+    operator = "*"


 class Div(BinExpr):
     """Divides the left by the right node."""
-    operator = '/'
+
+    operator = "/"


 class FloorDiv(BinExpr):
     """Divides the left by the right node and converts the
     result into an integer by truncating.
     """
-    operator = '//'
+
+    operator = "//"


 class Add(BinExpr):
     """Add the left to the right node."""
-    operator = '+'
+
+    operator = "+"


 class Sub(BinExpr):
     """Subtract the right from the left node."""
-    operator = '-'
+
+    operator = "-"


 class Mod(BinExpr):
     """Left modulo right."""
-    operator = '%'
+
+    operator = "%"


 class Pow(BinExpr):
     """Left to the power of right."""
-    operator = '**'
+
+    operator = "**"


 class And(BinExpr):
     """Short circuited AND."""
-    operator = 'and'
+
+    operator = "and"
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)


 class Or(BinExpr):
     """Short circuited OR."""
-    operator = 'or'
+
+    operator = "or"
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)


 class Not(UnaryExpr):
     """Negate the expression."""
-    operator = 'not'
+
+    operator = "not"


 class Neg(UnaryExpr):
     """Make the expression negative."""
-    operator = '-'
+
+    operator = "-"


 class Pos(UnaryExpr):
     """Make the expression positive (noop for most expressions)"""
-    operator = '+'
+
+    operator = "+"
+
+
+# Helpers for extensions


 class EnvironmentAttribute(Expr):
     """Loads an attribute from the environment object.  This is useful for
     extensions that want to call a callback stored on the environment.
     """
-    fields = 'name',
+
+    fields = ("name",)
     name: str


@@ -640,7 +1045,8 @@ class ExtensionAttribute(Expr):
     This node is usually constructed by calling the
     :meth:`~jinja2.ext.Extension.attr` method on an extension.
     """
-    fields = 'identifier', 'name'
+
+    fields = ("identifier", "name")
     identifier: str
     name: str

@@ -651,7 +1057,8 @@ class ImportedName(Expr):
     function from the cgi module on evaluation.  Imports are optimized by the
     compiler so there is no need to assign them to local variables.
     """
-    fields = 'importname',
+
+    fields = ("importname",)
     importname: str


@@ -662,20 +1069,27 @@ class InternalName(Expr):
     a new identifier for you.  This identifier is not available from the
     template and is not treated specially by the compiler.
     """
-    fields = 'name',
+
+    fields = ("name",)
     name: str

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         raise TypeError(
-            "Can't create internal names.  Use the `free_identifier` method on a parser."
-            )
+            "Can't create internal names.  Use the "
+            "`free_identifier` method on a parser."
+        )


 class MarkSafe(Expr):
     """Mark the wrapped expression as safe (wrap it as `Markup`)."""
-    fields = 'expr',
+
+    fields = ("expr",)
     expr: Expr

+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return Markup(self.expr.as_const(eval_ctx))
+

 class MarkSafeIfAutoescape(Expr):
     """Mark the wrapped expression as safe (wrap it as `Markup`) but
@@ -683,9 +1097,21 @@ class MarkSafeIfAutoescape(Expr):

     .. versionadded:: 2.5
     """
-    fields = 'expr',
+
+    fields = ("expr",)
     expr: Expr

+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Union[Markup, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if eval_ctx.volatile:
+            raise Impossible()
+        expr = self.expr.as_const(eval_ctx)
+        if eval_ctx.autoescape:
+            return Markup(expr)
+        return expr
+

 class ContextReference(Expr):
     """Returns the current template context.  It can be used like a
@@ -724,7 +1150,8 @@ class Break(Stmt):

 class Scope(Stmt):
     """An artificial scope."""
-    fields = 'body',
+
+    fields = ("body",)
     body: t.List[Node]


@@ -741,7 +1168,8 @@ class OverlayScope(Stmt):

     .. versionadded:: 2.10
     """
-    fields = 'context', 'body'
+
+    fields = ("context", "body")
     context: Expr
     body: t.List[Node]

@@ -754,7 +1182,8 @@ class EvalContextModifier(Stmt):

         EvalContextModifier(options=[Keyword('autoescape', Const(True))])
     """
-    fields = 'options',
+
+    fields = ("options",)
     options: t.List[Keyword]


@@ -763,9 +1192,15 @@ class ScopedEvalContextModifier(EvalContextModifier):
     :class:`EvalContextModifier` but will only modify the
     :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
     """
-    fields = 'body',
+
+    fields = ("body",)
     body: t.List[Node]


-NodeType.__new__ = staticmethod(_failing_new)
+# make sure nobody creates custom nodes
+def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn":
+    raise TypeError("can't create custom node types")
+
+
+NodeType.__new__ = staticmethod(_failing_new)  # type: ignore
 del _failing_new
diff --git a/src/jinja2/optimizer.py b/src/jinja2/optimizer.py
index 53d50e4..32d1c71 100644
--- a/src/jinja2/optimizer.py
+++ b/src/jinja2/optimizer.py
@@ -7,20 +7,42 @@ want. For example, loop unrolling doesn't work because unrolled loops
 would have a different scope. The solution would be a second syntax tree
 that stored the scoping rules.
 """
+
 import typing as t
+
 from . import nodes
 from .visitor import NodeTransformer
+
 if t.TYPE_CHECKING:
     from .environment import Environment


-def optimize(node: nodes.Node, environment: 'Environment') ->nodes.Node:
+def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node:
     """The context hint can be used to perform an static optimization
     based on the context given."""
-    pass
+    optimizer = Optimizer(environment)
+    return t.cast(nodes.Node, optimizer.visit(node))


 class Optimizer(NodeTransformer):
-
-    def __init__(self, environment: 't.Optional[Environment]') ->None:
+    def __init__(self, environment: "t.Optional[Environment]") -> None:
         self.environment = environment
+
+    def generic_visit(
+        self, node: nodes.Node, *args: t.Any, **kwargs: t.Any
+    ) -> nodes.Node:
+        node = super().generic_visit(node, *args, **kwargs)
+
+        # Do constant folding. Some other nodes besides Expr have
+        # as_const, but folding them causes errors later on.
+        if isinstance(node, nodes.Expr):
+            try:
+                return nodes.Const.from_untrusted(
+                    node.as_const(args[0] if args else None),
+                    lineno=node.lineno,
+                    environment=self.environment,
+                )
+            except nodes.Impossible:
+                pass
+
+        return node
diff --git a/src/jinja2/parser.py b/src/jinja2/parser.py
index 05ce33d..0ec997f 100644
--- a/src/jinja2/parser.py
+++ b/src/jinja2/parser.py
@@ -1,22 +1,48 @@
 """Parse tokens from the lexer into nodes for the compiler."""
+
 import typing
 import typing as t
+
 from . import nodes
 from .exceptions import TemplateAssertionError
 from .exceptions import TemplateSyntaxError
 from .lexer import describe_token
 from .lexer import describe_token_expr
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
+
     from .environment import Environment
-_ImportInclude = t.TypeVar('_ImportInclude', nodes.Import, nodes.Include)
-_MacroCall = t.TypeVar('_MacroCall', nodes.Macro, nodes.CallBlock)
-_statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print',
-    'macro', 'include', 'from', 'import', 'set', 'with', 'autoescape'])
-_compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq'])
-_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = {'add': nodes.Add, 'sub':
-    nodes.Sub, 'mul': nodes.Mul, 'div': nodes.Div, 'floordiv': nodes.
-    FloorDiv, 'mod': nodes.Mod}
+
+_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include)
+_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock)
+
+_statement_keywords = frozenset(
+    [
+        "for",
+        "if",
+        "block",
+        "extends",
+        "print",
+        "macro",
+        "include",
+        "from",
+        "import",
+        "set",
+        "with",
+        "autoescape",
+    ]
+)
+_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
+
+_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = {
+    "add": nodes.Add,
+    "sub": nodes.Sub,
+    "mul": nodes.Mul,
+    "div": nodes.Div,
+    "floordiv": nodes.FloorDiv,
+    "mod": nodes.Mod,
+}


 class Parser:
@@ -24,16 +50,22 @@ class Parser:
     extensions and can be used to parse expressions or statements.
     """

-    def __init__(self, environment: 'Environment', source: str, name: t.
-        Optional[str]=None, filename: t.Optional[str]=None, state: t.
-        Optional[str]=None) ->None:
+    def __init__(
+        self,
+        environment: "Environment",
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> None:
         self.environment = environment
         self.stream = environment._tokenize(source, name, filename, state)
         self.name = name
         self.filename = filename
         self.closed = False
-        self.extensions: t.Dict[str, t.Callable[['Parser'], t.Union[nodes.
-            Node, t.List[nodes.Node]]]] = {}
+        self.extensions: t.Dict[
+            str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]]
+        ] = {}
         for extension in environment.iter_extensions():
             for tag in extension.tags:
                 self.extensions[tag] = extension.parse
@@ -41,43 +73,129 @@ class Parser:
         self._tag_stack: t.List[str] = []
         self._end_token_stack: t.List[t.Tuple[str, ...]] = []

-    def fail(self, msg: str, lineno: t.Optional[int]=None, exc: t.Type[
-        TemplateSyntaxError]=TemplateSyntaxError) ->'te.NoReturn':
+    def fail(
+        self,
+        msg: str,
+        lineno: t.Optional[int] = None,
+        exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError,
+    ) -> "te.NoReturn":
         """Convenience method that raises `exc` with the message, passed
         line number or last line number as well as the current name and
         filename.
         """
-        pass
+        if lineno is None:
+            lineno = self.stream.current.lineno
+        raise exc(msg, lineno, self.name, self.filename)
+
+    def _fail_ut_eof(
+        self,
+        name: t.Optional[str],
+        end_token_stack: t.List[t.Tuple[str, ...]],
+        lineno: t.Optional[int],
+    ) -> "te.NoReturn":
+        expected: t.Set[str] = set()
+        for exprs in end_token_stack:
+            expected.update(map(describe_token_expr, exprs))
+        if end_token_stack:
+            currently_looking: t.Optional[str] = " or ".join(
+                map(repr, map(describe_token_expr, end_token_stack[-1]))
+            )
+        else:
+            currently_looking = None
+
+        if name is None:
+            message = ["Unexpected end of template."]
+        else:
+            message = [f"Encountered unknown tag {name!r}."]
+
+        if currently_looking:
+            if name is not None and name in expected:
+                message.append(
+                    "You probably made a nesting mistake. Jinja is expecting this tag,"
+                    f" but currently looking for {currently_looking}."
+                )
+            else:
+                message.append(
+                    f"Jinja was looking for the following tags: {currently_looking}."
+                )
+
+        if self._tag_stack:
+            message.append(
+                "The innermost block that needs to be closed is"
+                f" {self._tag_stack[-1]!r}."
+            )

-    def fail_unknown_tag(self, name: str, lineno: t.Optional[int]=None
-        ) ->'te.NoReturn':
+        self.fail(" ".join(message), lineno)
+
+    def fail_unknown_tag(
+        self, name: str, lineno: t.Optional[int] = None
+    ) -> "te.NoReturn":
         """Called if the parser encounters an unknown tag.  Tries to fail
         with a human readable error message that could help to identify
         the problem.
         """
-        pass
+        self._fail_ut_eof(name, self._end_token_stack, lineno)

-    def fail_eof(self, end_tokens: t.Optional[t.Tuple[str, ...]]=None,
-        lineno: t.Optional[int]=None) ->'te.NoReturn':
+    def fail_eof(
+        self,
+        end_tokens: t.Optional[t.Tuple[str, ...]] = None,
+        lineno: t.Optional[int] = None,
+    ) -> "te.NoReturn":
         """Like fail_unknown_tag but for end of template situations."""
-        pass
+        stack = list(self._end_token_stack)
+        if end_tokens is not None:
+            stack.append(end_tokens)
+        self._fail_ut_eof(None, stack, lineno)

-    def is_tuple_end(self, extra_end_rules: t.Optional[t.Tuple[str, ...]]=None
-        ) ->bool:
+    def is_tuple_end(
+        self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None
+    ) -> bool:
         """Are we at the end of a tuple?"""
-        pass
+        if self.stream.current.type in ("variable_end", "block_end", "rparen"):
+            return True
+        elif extra_end_rules is not None:
+            return self.stream.current.test_any(extra_end_rules)  # type: ignore
+        return False

-    def free_identifier(self, lineno: t.Optional[int]=None
-        ) ->nodes.InternalName:
+    def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
         """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
-        pass
+        self._last_identifier += 1
+        rv = object.__new__(nodes.InternalName)
+        nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
+        return rv

-    def parse_statement(self) ->t.Union[nodes.Node, t.List[nodes.Node]]:
+    def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]:
         """Parse a single statement."""
-        pass
+        token = self.stream.current
+        if token.type != "name":
+            self.fail("tag name expected", token.lineno)
+        self._tag_stack.append(token.value)
+        pop_tag = True
+        try:
+            if token.value in _statement_keywords:
+                f = getattr(self, f"parse_{self.stream.current.value}")
+                return f()  # type: ignore
+            if token.value == "call":
+                return self.parse_call_block()
+            if token.value == "filter":
+                return self.parse_filter_block()
+            ext = self.extensions.get(token.value)
+            if ext is not None:
+                return ext(self)
+
+            # did not work out, remove the token we pushed by accident
+            # from the stack so that the unknown tag fail function can
+            # produce a proper error message.
+            self._tag_stack.pop()
+            pop_tag = False
+            self.fail_unknown_tag(token.value, token.lineno)
+        finally:
+            if pop_tag:
+                self._tag_stack.pop()

-    def parse_statements(self, end_tokens: t.Tuple[str, ...], drop_needle:
-        bool=False) ->t.List[nodes.Node]:
+    def parse_statements(
+        self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False
+    ) -> t.List[nodes.Node]:
         """Parse multiple statements into a list until one of the end tokens
         is reached.  This is used to parse the body of statements as it also
         parses template data if appropriate.  The parser checks first if the
@@ -87,24 +205,278 @@ class Parser:
         the call is the matched end token.  If this is not wanted `drop_needle`
         can be set to `True` and the end token is removed.
         """
-        pass
+        # the first token may be a colon for python compatibility
+        self.stream.skip_if("colon")
+
+        # in the future it would be possible to add whole code sections
+        # by adding some sort of end of statement token and parsing those here.
+        self.stream.expect("block_end")
+        result = self.subparse(end_tokens)

-    def parse_set(self) ->t.Union[nodes.Assign, nodes.AssignBlock]:
+        # we reached the end of the template too early, the subparser
+        # does not check for this, so we do that now
+        if self.stream.current.type == "eof":
+            self.fail_eof(end_tokens)
+
+        if drop_needle:
+            next(self.stream)
+        return result
+
+    def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]:
         """Parse an assign statement."""
-        pass
+        lineno = next(self.stream).lineno
+        target = self.parse_assign_target(with_namespace=True)
+        if self.stream.skip_if("assign"):
+            expr = self.parse_tuple()
+            return nodes.Assign(target, expr, lineno=lineno)
+        filter_node = self.parse_filter(None)
+        body = self.parse_statements(("name:endset",), drop_needle=True)
+        return nodes.AssignBlock(target, filter_node, body, lineno=lineno)

-    def parse_for(self) ->nodes.For:
+    def parse_for(self) -> nodes.For:
         """Parse a for loop."""
-        pass
+        lineno = self.stream.expect("name:for").lineno
+        target = self.parse_assign_target(extra_end_rules=("name:in",))
+        self.stream.expect("name:in")
+        iter = self.parse_tuple(
+            with_condexpr=False, extra_end_rules=("name:recursive",)
+        )
+        test = None
+        if self.stream.skip_if("name:if"):
+            test = self.parse_expression()
+        recursive = self.stream.skip_if("name:recursive")
+        body = self.parse_statements(("name:endfor", "name:else"))
+        if next(self.stream).value == "endfor":
+            else_ = []
+        else:
+            else_ = self.parse_statements(("name:endfor",), drop_needle=True)
+        return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)

-    def parse_if(self) ->nodes.If:
+    def parse_if(self) -> nodes.If:
         """Parse an if construct."""
-        pass
+        node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
+        while True:
+            node.test = self.parse_tuple(with_condexpr=False)
+            node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
+            node.elif_ = []
+            node.else_ = []
+            token = next(self.stream)
+            if token.test("name:elif"):
+                node = nodes.If(lineno=self.stream.current.lineno)
+                result.elif_.append(node)
+                continue
+            elif token.test("name:else"):
+                result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
+            break
+        return result
+
+    def parse_with(self) -> nodes.With:
+        node = nodes.With(lineno=next(self.stream).lineno)
+        targets: t.List[nodes.Expr] = []
+        values: t.List[nodes.Expr] = []
+        while self.stream.current.type != "block_end":
+            if targets:
+                self.stream.expect("comma")
+            target = self.parse_assign_target()
+            target.set_ctx("param")
+            targets.append(target)
+            self.stream.expect("assign")
+            values.append(self.parse_expression())
+        node.targets = targets
+        node.values = values
+        node.body = self.parse_statements(("name:endwith",), drop_needle=True)
+        return node
+
+    def parse_autoescape(self) -> nodes.Scope:
+        node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
+        node.options = [nodes.Keyword("autoescape", self.parse_expression())]
+        node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
+        return nodes.Scope([node])
+
+    def parse_block(self) -> nodes.Block:
+        node = nodes.Block(lineno=next(self.stream).lineno)
+        node.name = self.stream.expect("name").value
+        node.scoped = self.stream.skip_if("name:scoped")
+        node.required = self.stream.skip_if("name:required")
+
+        # common problem people encounter when switching from django
+        # to jinja.  we do not support hyphens in block names, so let's
+        # raise a nicer error message in that case.
+        if self.stream.current.type == "sub":
+            self.fail(
+                "Block names in Jinja have to be valid Python identifiers and may not"
+                " contain hyphens, use an underscore instead."
+            )
+
+        node.body = self.parse_statements(("name:endblock",), drop_needle=True)
+
+        # enforce that required blocks only contain whitespace or comments
+        # by asserting that the body, if not empty, is just TemplateData nodes
+        # with whitespace data
+        if node.required:
+            for body_node in node.body:
+                if not isinstance(body_node, nodes.Output) or any(
+                    not isinstance(output_node, nodes.TemplateData)
+                    or not output_node.data.isspace()
+                    for output_node in body_node.nodes
+                ):
+                    self.fail("Required blocks can only contain comments or whitespace")

-    def parse_assign_target(self, with_tuple: bool=True, name_only: bool=
-        False, extra_end_rules: t.Optional[t.Tuple[str, ...]]=None,
-        with_namespace: bool=False) ->t.Union[nodes.NSRef, nodes.Name,
-        nodes.Tuple]:
+        self.stream.skip_if("name:" + node.name)
+        return node
+
+    def parse_extends(self) -> nodes.Extends:
+        node = nodes.Extends(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        return node
+
+    def parse_import_context(
+        self, node: _ImportInclude, default: bool
+    ) -> _ImportInclude:
+        if self.stream.current.test_any(
+            "name:with", "name:without"
+        ) and self.stream.look().test("name:context"):
+            node.with_context = next(self.stream).value == "with"
+            self.stream.skip()
+        else:
+            node.with_context = default
+        return node
+
+    def parse_include(self) -> nodes.Include:
+        node = nodes.Include(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        if self.stream.current.test("name:ignore") and self.stream.look().test(
+            "name:missing"
+        ):
+            node.ignore_missing = True
+            self.stream.skip(2)
+        else:
+            node.ignore_missing = False
+        return self.parse_import_context(node, True)
+
+    def parse_import(self) -> nodes.Import:
+        node = nodes.Import(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        self.stream.expect("name:as")
+        node.target = self.parse_assign_target(name_only=True).name
+        return self.parse_import_context(node, False)
+
+    def parse_from(self) -> nodes.FromImport:
+        node = nodes.FromImport(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        self.stream.expect("name:import")
+        node.names = []
+
+        def parse_context() -> bool:
+            if self.stream.current.value in {
+                "with",
+                "without",
+            } and self.stream.look().test("name:context"):
+                node.with_context = next(self.stream).value == "with"
+                self.stream.skip()
+                return True
+            return False
+
+        while True:
+            if node.names:
+                self.stream.expect("comma")
+            if self.stream.current.type == "name":
+                if parse_context():
+                    break
+                target = self.parse_assign_target(name_only=True)
+                if target.name.startswith("_"):
+                    self.fail(
+                        "names starting with an underline can not be imported",
+                        target.lineno,
+                        exc=TemplateAssertionError,
+                    )
+                if self.stream.skip_if("name:as"):
+                    alias = self.parse_assign_target(name_only=True)
+                    node.names.append((target.name, alias.name))
+                else:
+                    node.names.append(target.name)
+                if parse_context() or self.stream.current.type != "comma":
+                    break
+            else:
+                self.stream.expect("name")
+        if not hasattr(node, "with_context"):
+            node.with_context = False
+        return node
+
+    def parse_signature(self, node: _MacroCall) -> None:
+        args = node.args = []
+        defaults = node.defaults = []
+        self.stream.expect("lparen")
+        while self.stream.current.type != "rparen":
+            if args:
+                self.stream.expect("comma")
+            arg = self.parse_assign_target(name_only=True)
+            arg.set_ctx("param")
+            if self.stream.skip_if("assign"):
+                defaults.append(self.parse_expression())
+            elif defaults:
+                self.fail("non-default argument follows default argument")
+            args.append(arg)
+        self.stream.expect("rparen")
+
+    def parse_call_block(self) -> nodes.CallBlock:
+        node = nodes.CallBlock(lineno=next(self.stream).lineno)
+        if self.stream.current.type == "lparen":
+            self.parse_signature(node)
+        else:
+            node.args = []
+            node.defaults = []
+
+        call_node = self.parse_expression()
+        if not isinstance(call_node, nodes.Call):
+            self.fail("expected call", node.lineno)
+        node.call = call_node
+        node.body = self.parse_statements(("name:endcall",), drop_needle=True)
+        return node
+
+    def parse_filter_block(self) -> nodes.FilterBlock:
+        node = nodes.FilterBlock(lineno=next(self.stream).lineno)
+        node.filter = self.parse_filter(None, start_inline=True)  # type: ignore
+        node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
+        return node
+
+    def parse_macro(self) -> nodes.Macro:
+        node = nodes.Macro(lineno=next(self.stream).lineno)
+        node.name = self.parse_assign_target(name_only=True).name
+        self.parse_signature(node)
+        node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
+        return node
+
+    def parse_print(self) -> nodes.Output:
+        node = nodes.Output(lineno=next(self.stream).lineno)
+        node.nodes = []
+        while self.stream.current.type != "block_end":
+            if node.nodes:
+                self.stream.expect("comma")
+            node.nodes.append(self.parse_expression())
+        return node
+
+    @typing.overload
+    def parse_assign_target(
+        self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ...
+    ) -> nodes.Name: ...
+
+    @typing.overload
+    def parse_assign_target(
+        self,
+        with_tuple: bool = True,
+        name_only: bool = False,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        with_namespace: bool = False,
+    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: ...
+
+    def parse_assign_target(
+        self,
+        with_tuple: bool = True,
+        name_only: bool = False,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        with_namespace: bool = False,
+    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
         """Parse an assignment target.  As Jinja allows assignments to
         tuples, this function can parse all allowed assignment targets.  Per
         default assignments to tuples are parsed, that can be disable however
@@ -113,18 +485,205 @@ class Parser:
         parameter is forwarded to the tuple parsing function.  If
         `with_namespace` is enabled, a namespace assignment may be parsed.
         """
-        pass
+        target: nodes.Expr
+
+        if with_namespace and self.stream.look().type == "dot":
+            token = self.stream.expect("name")
+            next(self.stream)  # dot
+            attr = self.stream.expect("name")
+            target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
+        elif name_only:
+            token = self.stream.expect("name")
+            target = nodes.Name(token.value, "store", lineno=token.lineno)
+        else:
+            if with_tuple:
+                target = self.parse_tuple(
+                    simplified=True, extra_end_rules=extra_end_rules
+                )
+            else:
+                target = self.parse_primary()

-    def parse_expression(self, with_condexpr: bool=True) ->nodes.Expr:
+            target.set_ctx("store")
+
+        if not target.can_assign():
+            self.fail(
+                f"can't assign to {type(target).__name__.lower()!r}", target.lineno
+            )
+
+        return target  # type: ignore
+
+    def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr:
         """Parse an expression.  Per default all expressions are parsed, if
         the optional `with_condexpr` parameter is set to `False` conditional
         expressions are not parsed.
         """
-        pass
+        if with_condexpr:
+            return self.parse_condexpr()
+        return self.parse_or()
+
+    def parse_condexpr(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        expr1 = self.parse_or()
+        expr3: t.Optional[nodes.Expr]
+
+        while self.stream.skip_if("name:if"):
+            expr2 = self.parse_or()
+            if self.stream.skip_if("name:else"):
+                expr3 = self.parse_condexpr()
+            else:
+                expr3 = None
+            expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return expr1

-    def parse_tuple(self, simplified: bool=False, with_condexpr: bool=True,
-        extra_end_rules: t.Optional[t.Tuple[str, ...]]=None,
-        explicit_parentheses: bool=False) ->t.Union[nodes.Tuple, nodes.Expr]:
+    def parse_or(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_and()
+        while self.stream.skip_if("name:or"):
+            right = self.parse_and()
+            left = nodes.Or(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_and(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_not()
+        while self.stream.skip_if("name:and"):
+            right = self.parse_not()
+            left = nodes.And(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_not(self) -> nodes.Expr:
+        if self.stream.current.test("name:not"):
+            lineno = next(self.stream).lineno
+            return nodes.Not(self.parse_not(), lineno=lineno)
+        return self.parse_compare()
+
+    def parse_compare(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        expr = self.parse_math1()
+        ops = []
+        while True:
+            token_type = self.stream.current.type
+            if token_type in _compare_operators:
+                next(self.stream)
+                ops.append(nodes.Operand(token_type, self.parse_math1()))
+            elif self.stream.skip_if("name:in"):
+                ops.append(nodes.Operand("in", self.parse_math1()))
+            elif self.stream.current.test("name:not") and self.stream.look().test(
+                "name:in"
+            ):
+                self.stream.skip(2)
+                ops.append(nodes.Operand("notin", self.parse_math1()))
+            else:
+                break
+            lineno = self.stream.current.lineno
+        if not ops:
+            return expr
+        return nodes.Compare(expr, ops, lineno=lineno)
+
+    def parse_math1(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_concat()
+        while self.stream.current.type in ("add", "sub"):
+            cls = _math_nodes[self.stream.current.type]
+            next(self.stream)
+            right = self.parse_concat()
+            left = cls(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_concat(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        args = [self.parse_math2()]
+        while self.stream.current.type == "tilde":
+            next(self.stream)
+            args.append(self.parse_math2())
+        if len(args) == 1:
+            return args[0]
+        return nodes.Concat(args, lineno=lineno)
+
+    def parse_math2(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_pow()
+        while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
+            cls = _math_nodes[self.stream.current.type]
+            next(self.stream)
+            right = self.parse_pow()
+            left = cls(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_pow(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_unary()
+        while self.stream.current.type == "pow":
+            next(self.stream)
+            right = self.parse_unary()
+            left = nodes.Pow(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_unary(self, with_filter: bool = True) -> nodes.Expr:
+        token_type = self.stream.current.type
+        lineno = self.stream.current.lineno
+        node: nodes.Expr
+
+        if token_type == "sub":
+            next(self.stream)
+            node = nodes.Neg(self.parse_unary(False), lineno=lineno)
+        elif token_type == "add":
+            next(self.stream)
+            node = nodes.Pos(self.parse_unary(False), lineno=lineno)
+        else:
+            node = self.parse_primary()
+        node = self.parse_postfix(node)
+        if with_filter:
+            node = self.parse_filter_expr(node)
+        return node
+
+    def parse_primary(self) -> nodes.Expr:
+        token = self.stream.current
+        node: nodes.Expr
+        if token.type == "name":
+            if token.value in ("true", "false", "True", "False"):
+                node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
+            elif token.value in ("none", "None"):
+                node = nodes.Const(None, lineno=token.lineno)
+            else:
+                node = nodes.Name(token.value, "load", lineno=token.lineno)
+            next(self.stream)
+        elif token.type == "string":
+            next(self.stream)
+            buf = [token.value]
+            lineno = token.lineno
+            while self.stream.current.type == "string":
+                buf.append(self.stream.current.value)
+                next(self.stream)
+            node = nodes.Const("".join(buf), lineno=lineno)
+        elif token.type in ("integer", "float"):
+            next(self.stream)
+            node = nodes.Const(token.value, lineno=token.lineno)
+        elif token.type == "lparen":
+            next(self.stream)
+            node = self.parse_tuple(explicit_parentheses=True)
+            self.stream.expect("rparen")
+        elif token.type == "lbracket":
+            node = self.parse_list()
+        elif token.type == "lbrace":
+            node = self.parse_dict()
+        else:
+            self.fail(f"unexpected {describe_token(token)!r}", token.lineno)
+        return node
+
+    def parse_tuple(
+        self,
+        simplified: bool = False,
+        with_condexpr: bool = True,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        explicit_parentheses: bool = False,
+    ) -> t.Union[nodes.Tuple, nodes.Expr]:
         """Works like `parse_expression` but if multiple expressions are
         delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
         This method could also return a regular expression instead of a tuple
@@ -143,8 +702,340 @@ class Parser:
         expression in parentheses.  This is used to figure out if an empty
         tuple is a valid expression or not.
         """
-        pass
+        lineno = self.stream.current.lineno
+        if simplified:
+            parse = self.parse_primary
+        elif with_condexpr:
+            parse = self.parse_expression
+        else:
+
+            def parse() -> nodes.Expr:
+                return self.parse_expression(with_condexpr=False)
+
+        args: t.List[nodes.Expr] = []
+        is_tuple = False
+
+        while True:
+            if args:
+                self.stream.expect("comma")
+            if self.is_tuple_end(extra_end_rules):
+                break
+            args.append(parse())
+            if self.stream.current.type == "comma":
+                is_tuple = True
+            else:
+                break
+            lineno = self.stream.current.lineno
+
+        if not is_tuple:
+            if args:
+                return args[0]
+
+            # if we don't have explicit parentheses, an empty tuple is
+            # not a valid expression.  This would mean nothing (literally
+            # nothing) in the spot of an expression would be an empty
+            # tuple.
+            if not explicit_parentheses:
+                self.fail(
+                    "Expected an expression,"
+                    f" got {describe_token(self.stream.current)!r}"
+                )
+
+        return nodes.Tuple(args, "load", lineno=lineno)
+
+    def parse_list(self) -> nodes.List:
+        token = self.stream.expect("lbracket")
+        items: t.List[nodes.Expr] = []
+        while self.stream.current.type != "rbracket":
+            if items:
+                self.stream.expect("comma")
+            if self.stream.current.type == "rbracket":
+                break
+            items.append(self.parse_expression())
+        self.stream.expect("rbracket")
+        return nodes.List(items, lineno=token.lineno)
+
+    def parse_dict(self) -> nodes.Dict:
+        token = self.stream.expect("lbrace")
+        items: t.List[nodes.Pair] = []
+        while self.stream.current.type != "rbrace":
+            if items:
+                self.stream.expect("comma")
+            if self.stream.current.type == "rbrace":
+                break
+            key = self.parse_expression()
+            self.stream.expect("colon")
+            value = self.parse_expression()
+            items.append(nodes.Pair(key, value, lineno=key.lineno))
+        self.stream.expect("rbrace")
+        return nodes.Dict(items, lineno=token.lineno)
+
+    def parse_postfix(self, node: nodes.Expr) -> nodes.Expr:
+        while True:
+            token_type = self.stream.current.type
+            if token_type == "dot" or token_type == "lbracket":
+                node = self.parse_subscript(node)
+            # calls are valid both after postfix expressions (getattr
+            # and getitem) as well as filters and tests
+            elif token_type == "lparen":
+                node = self.parse_call(node)
+            else:
+                break
+        return node
+
+    def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr:
+        while True:
+            token_type = self.stream.current.type
+            if token_type == "pipe":
+                node = self.parse_filter(node)  # type: ignore
+            elif token_type == "name" and self.stream.current.value == "is":
+                node = self.parse_test(node)
+            # calls are valid both after postfix expressions (getattr
+            # and getitem) as well as filters and tests
+            elif token_type == "lparen":
+                node = self.parse_call(node)
+            else:
+                break
+        return node
+
+    def parse_subscript(
+        self, node: nodes.Expr
+    ) -> t.Union[nodes.Getattr, nodes.Getitem]:
+        token = next(self.stream)
+        arg: nodes.Expr
+
+        if token.type == "dot":
+            attr_token = self.stream.current
+            next(self.stream)
+            if attr_token.type == "name":
+                return nodes.Getattr(
+                    node, attr_token.value, "load", lineno=token.lineno
+                )
+            elif attr_token.type != "integer":
+                self.fail("expected name or number", attr_token.lineno)
+            arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
+            return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+        if token.type == "lbracket":
+            args: t.List[nodes.Expr] = []
+            while self.stream.current.type != "rbracket":
+                if args:
+                    self.stream.expect("comma")
+                args.append(self.parse_subscribed())
+            self.stream.expect("rbracket")
+            if len(args) == 1:
+                arg = args[0]
+            else:
+                arg = nodes.Tuple(args, "load", lineno=token.lineno)
+            return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+        self.fail("expected subscript expression", token.lineno)
+
+    def parse_subscribed(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        args: t.List[t.Optional[nodes.Expr]]
+
+        if self.stream.current.type == "colon":
+            next(self.stream)
+            args = [None]
+        else:
+            node = self.parse_expression()
+            if self.stream.current.type != "colon":
+                return node
+            next(self.stream)
+            args = [node]
+
+        if self.stream.current.type == "colon":
+            args.append(None)
+        elif self.stream.current.type not in ("rbracket", "comma"):
+            args.append(self.parse_expression())
+        else:
+            args.append(None)
+
+        if self.stream.current.type == "colon":
+            next(self.stream)
+            if self.stream.current.type not in ("rbracket", "comma"):
+                args.append(self.parse_expression())
+            else:
+                args.append(None)
+        else:
+            args.append(None)
+
+        return nodes.Slice(lineno=lineno, *args)  # noqa: B026
+
+    def parse_call_args(
+        self,
+    ) -> t.Tuple[
+        t.List[nodes.Expr],
+        t.List[nodes.Keyword],
+        t.Optional[nodes.Expr],
+        t.Optional[nodes.Expr],
+    ]:
+        token = self.stream.expect("lparen")
+        args = []
+        kwargs = []
+        dyn_args = None
+        dyn_kwargs = None
+        require_comma = False
+
+        def ensure(expr: bool) -> None:
+            if not expr:
+                self.fail("invalid syntax for function call expression", token.lineno)
+
+        while self.stream.current.type != "rparen":
+            if require_comma:
+                self.stream.expect("comma")
+
+                # support for trailing comma
+                if self.stream.current.type == "rparen":
+                    break
+
+            if self.stream.current.type == "mul":
+                ensure(dyn_args is None and dyn_kwargs is None)
+                next(self.stream)
+                dyn_args = self.parse_expression()
+            elif self.stream.current.type == "pow":
+                ensure(dyn_kwargs is None)
+                next(self.stream)
+                dyn_kwargs = self.parse_expression()
+            else:
+                if (
+                    self.stream.current.type == "name"
+                    and self.stream.look().type == "assign"
+                ):
+                    # Parsing a kwarg
+                    ensure(dyn_kwargs is None)
+                    key = self.stream.current.value
+                    self.stream.skip(2)
+                    value = self.parse_expression()
+                    kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
+                else:
+                    # Parsing an arg
+                    ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
+                    args.append(self.parse_expression())
+
+            require_comma = True
+
+        self.stream.expect("rparen")
+        return args, kwargs, dyn_args, dyn_kwargs
+
+    def parse_call(self, node: nodes.Expr) -> nodes.Call:
+        # The lparen will be expected in parse_call_args, but the lineno
+        # needs to be recorded before the stream is advanced.
+        token = self.stream.current
+        args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+        return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
+
+    def parse_filter(
+        self, node: t.Optional[nodes.Expr], start_inline: bool = False
+    ) -> t.Optional[nodes.Expr]:
+        while self.stream.current.type == "pipe" or start_inline:
+            if not start_inline:
+                next(self.stream)
+            token = self.stream.expect("name")
+            name = token.value
+            while self.stream.current.type == "dot":
+                next(self.stream)
+                name += "." + self.stream.expect("name").value
+            if self.stream.current.type == "lparen":
+                args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+            else:
+                args = []
+                kwargs = []
+                dyn_args = dyn_kwargs = None
+            node = nodes.Filter(
+                node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+            )
+            start_inline = False
+        return node
+
+    def parse_test(self, node: nodes.Expr) -> nodes.Expr:
+        token = next(self.stream)
+        if self.stream.current.test("name:not"):
+            next(self.stream)
+            negated = True
+        else:
+            negated = False
+        name = self.stream.expect("name").value
+        while self.stream.current.type == "dot":
+            next(self.stream)
+            name += "." + self.stream.expect("name").value
+        dyn_args = dyn_kwargs = None
+        kwargs: t.List[nodes.Keyword] = []
+        if self.stream.current.type == "lparen":
+            args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+        elif self.stream.current.type in {
+            "name",
+            "string",
+            "integer",
+            "float",
+            "lparen",
+            "lbracket",
+            "lbrace",
+        } and not self.stream.current.test_any("name:else", "name:or", "name:and"):
+            if self.stream.current.test("name:is"):
+                self.fail("You cannot chain multiple tests with is")
+            arg_node = self.parse_primary()
+            arg_node = self.parse_postfix(arg_node)
+            args = [arg_node]
+        else:
+            args = []
+        node = nodes.Test(
+            node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+        )
+        if negated:
+            node = nodes.Not(node, lineno=token.lineno)
+        return node
+
+    def subparse(
+        self, end_tokens: t.Optional[t.Tuple[str, ...]] = None
+    ) -> t.List[nodes.Node]:
+        body: t.List[nodes.Node] = []
+        data_buffer: t.List[nodes.Node] = []
+        add_data = data_buffer.append
+
+        if end_tokens is not None:
+            self._end_token_stack.append(end_tokens)
+
+        def flush_data() -> None:
+            if data_buffer:
+                lineno = data_buffer[0].lineno
+                body.append(nodes.Output(data_buffer[:], lineno=lineno))
+                del data_buffer[:]
+
+        try:
+            while self.stream:
+                token = self.stream.current
+                if token.type == "data":
+                    if token.value:
+                        add_data(nodes.TemplateData(token.value, lineno=token.lineno))
+                    next(self.stream)
+                elif token.type == "variable_begin":
+                    next(self.stream)
+                    add_data(self.parse_tuple(with_condexpr=True))
+                    self.stream.expect("variable_end")
+                elif token.type == "block_begin":
+                    flush_data()
+                    next(self.stream)
+                    if end_tokens is not None and self.stream.current.test_any(
+                        *end_tokens
+                    ):
+                        return body
+                    rv = self.parse_statement()
+                    if isinstance(rv, list):
+                        body.extend(rv)
+                    else:
+                        body.append(rv)
+                    self.stream.expect("block_end")
+                else:
+                    raise AssertionError("internal parsing error")
+
+            flush_data()
+        finally:
+            if end_tokens is not None:
+                self._end_token_stack.pop()
+        return body

-    def parse(self) ->nodes.Template:
+    def parse(self) -> nodes.Template:
         """Parse the whole template into a `Template` node."""
-        pass
+        result = nodes.Template(self.subparse(), lineno=1)
+        result.set_environment(self.environment)
+        return result
diff --git a/src/jinja2/runtime.py b/src/jinja2/runtime.py
index c88211d..4325c8d 100644
--- a/src/jinja2/runtime.py
+++ b/src/jinja2/runtime.py
@@ -1,82 +1,144 @@
 """The runtime functions and state used by compiled templates."""
+
 import functools
 import sys
 import typing as t
 from collections import abc
 from itertools import chain
-from markupsafe import escape
+
+from markupsafe import escape  # noqa: F401
 from markupsafe import Markup
 from markupsafe import soft_str
+
 from .async_utils import auto_aiter
-from .async_utils import auto_await
-from .exceptions import TemplateNotFound
-from .exceptions import TemplateRuntimeError
+from .async_utils import auto_await  # noqa: F401
+from .exceptions import TemplateNotFound  # noqa: F401
+from .exceptions import TemplateRuntimeError  # noqa: F401
 from .exceptions import UndefinedError
 from .nodes import EvalContext
 from .utils import _PassArg
 from .utils import concat
 from .utils import internalcode
 from .utils import missing
-from .utils import Namespace
+from .utils import Namespace  # noqa: F401
 from .utils import object_type_repr
 from .utils import pass_eval_context
-V = t.TypeVar('V')
-F = t.TypeVar('F', bound=t.Callable[..., t.Any])
+
+V = t.TypeVar("V")
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
 if t.TYPE_CHECKING:
     import logging
+
     import typing_extensions as te
-    from .environment import Environment

+    from .environment import Environment

     class LoopRenderFunc(te.Protocol):
-
-        def __call__(self, reciter: t.Iterable[V], loop_render_func:
-            'LoopRenderFunc', depth: int=0) ->str:
-            ...
-exported = ['LoopContext', 'TemplateReference', 'Macro', 'Markup',
-    'TemplateRuntimeError', 'missing', 'escape', 'markup_join', 'str_join',
-    'identity', 'TemplateNotFound', 'Namespace', 'Undefined', 'internalcode']
-async_exported = ['AsyncLoopContext', 'auto_aiter', 'auto_await']
-
-
-def identity(x: V) ->V:
+        def __call__(
+            self,
+            reciter: t.Iterable[V],
+            loop_render_func: "LoopRenderFunc",
+            depth: int = 0,
+        ) -> str: ...
+
+
+# these variables are exported to the template runtime
+exported = [
+    "LoopContext",
+    "TemplateReference",
+    "Macro",
+    "Markup",
+    "TemplateRuntimeError",
+    "missing",
+    "escape",
+    "markup_join",
+    "str_join",
+    "identity",
+    "TemplateNotFound",
+    "Namespace",
+    "Undefined",
+    "internalcode",
+]
+async_exported = [
+    "AsyncLoopContext",
+    "auto_aiter",
+    "auto_await",
+]
+
+
+def identity(x: V) -> V:
     """Returns its argument. Useful for certain things in the
     environment.
     """
-    pass
+    return x


-def markup_join(seq: t.Iterable[t.Any]) ->str:
+def markup_join(seq: t.Iterable[t.Any]) -> str:
     """Concatenation that escapes if necessary and converts to string."""
-    pass
+    buf = []
+    iterator = map(soft_str, seq)
+    for arg in iterator:
+        buf.append(arg)
+        if hasattr(arg, "__html__"):
+            return Markup("").join(chain(buf, iterator))
+    return concat(buf)


-def str_join(seq: t.Iterable[t.Any]) ->str:
+def str_join(seq: t.Iterable[t.Any]) -> str:
     """Simple args to string conversion and concatenation."""
-    pass
-
-
-def new_context(environment: 'Environment', template_name: t.Optional[str],
-    blocks: t.Dict[str, t.Callable[['Context'], t.Iterator[str]]], vars: t.
-    Optional[t.Dict[str, t.Any]]=None, shared: bool=False, globals: t.
-    Optional[t.MutableMapping[str, t.Any]]=None, locals: t.Optional[t.
-    Mapping[str, t.Any]]=None) ->'Context':
+    return concat(map(str, seq))
+
+
+def new_context(
+    environment: "Environment",
+    template_name: t.Optional[str],
+    blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]],
+    vars: t.Optional[t.Dict[str, t.Any]] = None,
+    shared: bool = False,
+    globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    locals: t.Optional[t.Mapping[str, t.Any]] = None,
+) -> "Context":
     """Internal helper for context creation."""
-    pass
+    if vars is None:
+        vars = {}
+    if shared:
+        parent = vars
+    else:
+        parent = dict(globals or (), **vars)
+    if locals:
+        # if the parent is shared a copy should be created because
+        # we don't want to modify the dict passed
+        if shared:
+            parent = dict(parent)
+        for key, value in locals.items():
+            if value is not missing:
+                parent[key] = value
+    return environment.context_class(
+        environment, parent, template_name, blocks, globals=globals
+    )


 class TemplateReference:
     """The `self` in templates."""

-    def __init__(self, context: 'Context') ->None:
+    def __init__(self, context: "Context") -> None:
         self.__context = context

-    def __getitem__(self, name: str) ->t.Any:
+    def __getitem__(self, name: str) -> t.Any:
         blocks = self.__context.blocks[name]
         return BlockReference(name, self.__context, blocks, 0)

-    def __repr__(self) ->str:
-        return f'<{type(self).__name__} {self.__context.name!r}>'
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.__context.name!r}>"
+
+
+def _dict_method_all(dict_method: F) -> F:
+    @functools.wraps(dict_method)
+    def f_all(self: "Context") -> t.Any:
+        return dict_method(self.get_all())
+
+    return t.cast(F, f_all)


 @abc.Mapping.register
@@ -100,34 +162,54 @@ class Context:
     :class:`Undefined` object for missing variables.
     """

-    def __init__(self, environment: 'Environment', parent: t.Dict[str, t.
-        Any], name: t.Optional[str], blocks: t.Dict[str, t.Callable[[
-        'Context'], t.Iterator[str]]], globals: t.Optional[t.MutableMapping
-        [str, t.Any]]=None):
+    def __init__(
+        self,
+        environment: "Environment",
+        parent: t.Dict[str, t.Any],
+        name: t.Optional[str],
+        blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]],
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ):
         self.parent = parent
         self.vars: t.Dict[str, t.Any] = {}
-        self.environment: 'Environment' = environment
+        self.environment: "Environment" = environment
         self.eval_ctx = EvalContext(self.environment, name)
         self.exported_vars: t.Set[str] = set()
         self.name = name
         self.globals_keys = set() if globals is None else set(globals)
+
+        # create the initial mapping of blocks.  Whenever template inheritance
+        # takes place the runtime will update this mapping with the new blocks
+        # from the template.
         self.blocks = {k: [v] for k, v in blocks.items()}

-    def super(self, name: str, current: t.Callable[['Context'], t.Iterator[
-        str]]) ->t.Union['BlockReference', 'Undefined']:
+    def super(
+        self, name: str, current: t.Callable[["Context"], t.Iterator[str]]
+    ) -> t.Union["BlockReference", "Undefined"]:
         """Render a parent block."""
-        pass
-
-    def get(self, key: str, default: t.Any=None) ->t.Any:
+        try:
+            blocks = self.blocks[name]
+            index = blocks.index(current) + 1
+            blocks[index]
+        except LookupError:
+            return self.environment.undefined(
+                f"there is no parent block called {name!r}.", name="super"
+            )
+        return BlockReference(name, self, blocks, index)
+
+    def get(self, key: str, default: t.Any = None) -> t.Any:
         """Look up a variable by name, or return a default if the key is
         not found.

         :param key: The variable name to look up.
         :param default: The value to return if the key is not found.
         """
-        pass
+        try:
+            return self[key]
+        except KeyError:
+            return default

-    def resolve(self, key: str) ->t.Union[t.Any, 'Undefined']:
+    def resolve(self, key: str) -> t.Union[t.Any, "Undefined"]:
         """Look up a variable by name, or return an :class:`Undefined`
         object if the key is not found.

@@ -137,9 +219,14 @@ class Context:

         :param key: The variable name to look up.
         """
-        pass
+        rv = self.resolve_or_missing(key)
+
+        if rv is missing:
+            return self.environment.undefined(name=key)
+
+        return rv

-    def resolve_or_missing(self, key: str) ->t.Any:
+    def resolve_or_missing(self, key: str) -> t.Any:
         """Look up a variable by name, or return a ``missing`` sentinel
         if the key is not found.

@@ -149,77 +236,156 @@ class Context:

         :param key: The variable name to look up.
         """
-        pass
+        if key in self.vars:
+            return self.vars[key]
+
+        if key in self.parent:
+            return self.parent[key]

-    def get_exported(self) ->t.Dict[str, t.Any]:
+        return missing
+
+    def get_exported(self) -> t.Dict[str, t.Any]:
         """Get a new dict with the exported variables."""
-        pass
+        return {k: self.vars[k] for k in self.exported_vars}

-    def get_all(self) ->t.Dict[str, t.Any]:
+    def get_all(self) -> t.Dict[str, t.Any]:
         """Return the complete context as dict including the exported
         variables.  For optimizations reasons this might not return an
         actual copy so be careful with using it.
         """
-        pass
+        if not self.vars:
+            return self.parent
+        if not self.parent:
+            return self.vars
+        return dict(self.parent, **self.vars)

     @internalcode
-    def call(__self, __obj: t.Callable[..., t.Any], *args: t.Any, **kwargs:
-        t.Any) ->t.Union[t.Any, 'Undefined']:
+    def call(
+        __self,
+        __obj: t.Callable[..., t.Any],
+        *args: t.Any,
+        **kwargs: t.Any,  # noqa: B902
+    ) -> t.Union[t.Any, "Undefined"]:
         """Call the callable with the arguments and keyword arguments
         provided but inject the active context or environment as first
         argument if the callable has :func:`pass_context` or
         :func:`pass_environment`.
         """
-        pass
-
-    def derived(self, locals: t.Optional[t.Dict[str, t.Any]]=None) ->'Context':
+        if __debug__:
+            __traceback_hide__ = True  # noqa
+
+        # Allow callable classes to take a context
+        if (
+            hasattr(__obj, "__call__")  # noqa: B004
+            and _PassArg.from_obj(__obj.__call__) is not None
+        ):
+            __obj = __obj.__call__
+
+        pass_arg = _PassArg.from_obj(__obj)
+
+        if pass_arg is _PassArg.context:
+            # the active context should have access to variables set in
+            # loops and blocks without mutating the context itself
+            if kwargs.get("_loop_vars"):
+                __self = __self.derived(kwargs["_loop_vars"])
+            if kwargs.get("_block_vars"):
+                __self = __self.derived(kwargs["_block_vars"])
+            args = (__self,) + args
+        elif pass_arg is _PassArg.eval_context:
+            args = (__self.eval_ctx,) + args
+        elif pass_arg is _PassArg.environment:
+            args = (__self.environment,) + args
+
+        kwargs.pop("_block_vars", None)
+        kwargs.pop("_loop_vars", None)
+
+        try:
+            return __obj(*args, **kwargs)
+        except StopIteration:
+            return __self.environment.undefined(
+                "value was undefined because a callable raised a"
+                " StopIteration exception"
+            )
+
+    def derived(self, locals: t.Optional[t.Dict[str, t.Any]] = None) -> "Context":
         """Internal helper function to create a derived context.  This is
         used in situations where the system needs a new context in the same
         template that is independent.
         """
-        pass
+        context = new_context(
+            self.environment, self.name, {}, self.get_all(), True, None, locals
+        )
+        context.eval_ctx = self.eval_ctx
+        context.blocks.update((k, list(v)) for k, v in self.blocks.items())
+        return context
+
     keys = _dict_method_all(dict.keys)
     values = _dict_method_all(dict.values)
     items = _dict_method_all(dict.items)

-    def __contains__(self, name: str) ->bool:
+    def __contains__(self, name: str) -> bool:
         return name in self.vars or name in self.parent

-    def __getitem__(self, key: str) ->t.Any:
+    def __getitem__(self, key: str) -> t.Any:
         """Look up a variable by name with ``[]`` syntax, or raise a
         ``KeyError`` if the key is not found.
         """
         item = self.resolve_or_missing(key)
+
         if item is missing:
             raise KeyError(key)
+
         return item

-    def __repr__(self) ->str:
-        return f'<{type(self).__name__} {self.get_all()!r} of {self.name!r}>'
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.get_all()!r} of {self.name!r}>"


 class BlockReference:
     """One block on a template reference."""

-    def __init__(self, name: str, context: 'Context', stack: t.List[t.
-        Callable[['Context'], t.Iterator[str]]], depth: int) ->None:
+    def __init__(
+        self,
+        name: str,
+        context: "Context",
+        stack: t.List[t.Callable[["Context"], t.Iterator[str]]],
+        depth: int,
+    ) -> None:
         self.name = name
         self._context = context
         self._stack = stack
         self._depth = depth

     @property
-    def super(self) ->t.Union['BlockReference', 'Undefined']:
+    def super(self) -> t.Union["BlockReference", "Undefined"]:
         """Super the block."""
-        pass
+        if self._depth + 1 >= len(self._stack):
+            return self._context.environment.undefined(
+                f"there is no parent block called {self.name!r}.", name="super"
+            )
+        return BlockReference(self.name, self._context, self._stack, self._depth + 1)

     @internalcode
-    def __call__(self) ->str:
+    async def _async_call(self) -> str:
+        rv = concat(
+            [x async for x in self._stack[self._depth](self._context)]  # type: ignore
+        )
+
+        if self._context.eval_ctx.autoescape:
+            return Markup(rv)
+
+        return rv
+
+    @internalcode
+    def __call__(self) -> str:
         if self._context.environment.is_async:
-            return self._async_call()
+            return self._async_call()  # type: ignore
+
         rv = concat(self._stack[self._depth](self._context))
+
         if self._context.eval_ctx.autoescape:
             return Markup(rv)
+
         return rv


@@ -227,16 +393,23 @@ class LoopContext:
     """A wrapper iterable for dynamic ``for`` loops, with information
     about the loop and iteration.
     """
+
+    #: Current iteration of the loop, starting at 0.
     index0 = -1
+
     _length: t.Optional[int] = None
     _after: t.Any = missing
     _current: t.Any = missing
     _before: t.Any = missing
     _last_changed_value: t.Any = missing

-    def __init__(self, iterable: t.Iterable[V], undefined: t.Type[
-        'Undefined'], recurse: t.Optional['LoopRenderFunc']=None, depth0: int=0
-        ) ->None:
+    def __init__(
+        self,
+        iterable: t.Iterable[V],
+        undefined: t.Type["Undefined"],
+        recurse: t.Optional["LoopRenderFunc"] = None,
+        depth0: int = 0,
+    ) -> None:
         """
         :param iterable: Iterable to wrap.
         :param undefined: :class:`Undefined` class to use for next and
@@ -249,78 +422,100 @@ class LoopContext:
         self._iterator = self._to_iterator(iterable)
         self._undefined = undefined
         self._recurse = recurse
+        #: How many levels deep a recursive loop currently is, starting at 0.
         self.depth0 = depth0

+    @staticmethod
+    def _to_iterator(iterable: t.Iterable[V]) -> t.Iterator[V]:
+        return iter(iterable)
+
     @property
-    def length(self) ->int:
+    def length(self) -> int:
         """Length of the iterable.

         If the iterable is a generator or otherwise does not have a
         size, it is eagerly evaluated to get a size.
         """
-        pass
+        if self._length is not None:
+            return self._length
+
+        try:
+            self._length = len(self._iterable)  # type: ignore
+        except TypeError:
+            iterable = list(self._iterator)
+            self._iterator = self._to_iterator(iterable)
+            self._length = len(iterable) + self.index + (self._after is not missing)

-    def __len__(self) ->int:
+        return self._length
+
+    def __len__(self) -> int:
         return self.length

     @property
-    def depth(self) ->int:
+    def depth(self) -> int:
         """How many levels deep a recursive loop currently is, starting at 1."""
-        pass
+        return self.depth0 + 1

     @property
-    def index(self) ->int:
+    def index(self) -> int:
         """Current iteration of the loop, starting at 1."""
-        pass
+        return self.index0 + 1

     @property
-    def revindex0(self) ->int:
+    def revindex0(self) -> int:
         """Number of iterations from the end of the loop, ending at 0.

         Requires calculating :attr:`length`.
         """
-        pass
+        return self.length - self.index

     @property
-    def revindex(self) ->int:
+    def revindex(self) -> int:
         """Number of iterations from the end of the loop, ending at 1.

         Requires calculating :attr:`length`.
         """
-        pass
+        return self.length - self.index0

     @property
-    def first(self) ->bool:
+    def first(self) -> bool:
         """Whether this is the first iteration of the loop."""
-        pass
+        return self.index0 == 0

-    def _peek_next(self) ->t.Any:
+    def _peek_next(self) -> t.Any:
         """Return the next element in the iterable, or :data:`missing`
         if the iterable is exhausted. Only peeks one item ahead, caching
         the result in :attr:`_last` for use in subsequent checks. The
         cache is reset when :meth:`__next__` is called.
         """
-        pass
+        if self._after is not missing:
+            return self._after
+
+        self._after = next(self._iterator, missing)
+        return self._after

     @property
-    def last(self) ->bool:
+    def last(self) -> bool:
         """Whether this is the last iteration of the loop.

         Causes the iterable to advance early. See
         :func:`itertools.groupby` for issues this can cause.
         The :func:`groupby` filter avoids that issue.
         """
-        pass
+        return self._peek_next() is missing

     @property
-    def previtem(self) ->t.Union[t.Any, 'Undefined']:
+    def previtem(self) -> t.Union[t.Any, "Undefined"]:
         """The item in the previous iteration. Undefined during the
         first iteration.
         """
-        pass
+        if self.first:
+            return self._undefined("there is no previous item")
+
+        return self._before

     @property
-    def nextitem(self) ->t.Union[t.Any, 'Undefined']:
+    def nextitem(self) -> t.Union[t.Any, "Undefined"]:
         """The item in the next iteration. Undefined during the last
         iteration.

@@ -328,40 +523,53 @@ class LoopContext:
         :func:`itertools.groupby` for issues this can cause.
         The :func:`jinja-filters.groupby` filter avoids that issue.
         """
-        pass
+        rv = self._peek_next()
+
+        if rv is missing:
+            return self._undefined("there is no next item")
+
+        return rv

-    def cycle(self, *args: V) ->V:
+    def cycle(self, *args: V) -> V:
         """Return a value from the given args, cycling through based on
         the current :attr:`index0`.

         :param args: One or more values to cycle through.
         """
-        pass
+        if not args:
+            raise TypeError("no items for cycling given")

-    def changed(self, *value: t.Any) ->bool:
+        return args[self.index0 % len(args)]
+
+    def changed(self, *value: t.Any) -> bool:
         """Return ``True`` if previously called with a different value
         (including when called for the first time).

         :param value: One or more values to compare to the last call.
         """
-        pass
+        if self._last_changed_value != value:
+            self._last_changed_value = value
+            return True
+
+        return False

-    def __iter__(self) ->'LoopContext':
+    def __iter__(self) -> "LoopContext":
         return self

-    def __next__(self) ->t.Tuple[t.Any, 'LoopContext']:
+    def __next__(self) -> t.Tuple[t.Any, "LoopContext"]:
         if self._after is not missing:
             rv = self._after
             self._after = missing
         else:
             rv = next(self._iterator)
+
         self.index0 += 1
         self._before = self._current
         self._current = rv
         return rv, self

     @internalcode
-    def __call__(self, iterable: t.Iterable[V]) ->str:
+    def __call__(self, iterable: t.Iterable[V]) -> str:
         """When iterating over nested data, render the body of the loop
         recursively with the given inner iterable data.

@@ -370,25 +578,79 @@ class LoopContext:
         if self._recurse is None:
             raise TypeError(
                 "The loop must have the 'recursive' marker to be called recursively."
-                )
+            )
+
         return self._recurse(iterable, self._recurse, depth=self.depth)

-    def __repr__(self) ->str:
-        return f'<{type(self).__name__} {self.index}/{self.length}>'
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.index}/{self.length}>"


 class AsyncLoopContext(LoopContext):
-    _iterator: t.AsyncIterator[t.Any]
+    _iterator: t.AsyncIterator[t.Any]  # type: ignore
+
+    @staticmethod
+    def _to_iterator(  # type: ignore
+        iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]],
+    ) -> t.AsyncIterator[V]:
+        return auto_aiter(iterable)
+
+    @property
+    async def length(self) -> int:  # type: ignore
+        if self._length is not None:
+            return self._length
+
+        try:
+            self._length = len(self._iterable)  # type: ignore
+        except TypeError:
+            iterable = [x async for x in self._iterator]
+            self._iterator = self._to_iterator(iterable)
+            self._length = len(iterable) + self.index + (self._after is not missing)
+
+        return self._length

-    def __aiter__(self) ->'AsyncLoopContext':
+    @property
+    async def revindex0(self) -> int:  # type: ignore
+        return await self.length - self.index
+
+    @property
+    async def revindex(self) -> int:  # type: ignore
+        return await self.length - self.index0
+
+    async def _peek_next(self) -> t.Any:
+        if self._after is not missing:
+            return self._after
+
+        try:
+            self._after = await self._iterator.__anext__()
+        except StopAsyncIteration:
+            self._after = missing
+
+        return self._after
+
+    @property
+    async def last(self) -> bool:  # type: ignore
+        return await self._peek_next() is missing
+
+    @property
+    async def nextitem(self) -> t.Union[t.Any, "Undefined"]:
+        rv = await self._peek_next()
+
+        if rv is missing:
+            return self._undefined("there is no next item")
+
+        return rv
+
+    def __aiter__(self) -> "AsyncLoopContext":
         return self

-    async def __anext__(self) ->t.Tuple[t.Any, 'AsyncLoopContext']:
+    async def __anext__(self) -> t.Tuple[t.Any, "AsyncLoopContext"]:
         if self._after is not missing:
             rv = self._after
             self._after = missing
         else:
             rv = await self._iterator.__anext__()
+
         self.index0 += 1
         self._before = self._current
         self._current = rv
@@ -398,10 +660,17 @@ class AsyncLoopContext(LoopContext):
 class Macro:
     """Wraps a macro function."""

-    def __init__(self, environment: 'Environment', func: t.Callable[...,
-        str], name: str, arguments: t.List[str], catch_kwargs: bool,
-        catch_varargs: bool, caller: bool, default_autoescape: t.Optional[
-        bool]=None):
+    def __init__(
+        self,
+        environment: "Environment",
+        func: t.Callable[..., str],
+        name: str,
+        arguments: t.List[str],
+        catch_kwargs: bool,
+        catch_varargs: bool,
+        caller: bool,
+        default_autoescape: t.Optional[bool] = None,
+    ):
         self._environment = environment
         self._func = func
         self._argument_count = len(arguments)
@@ -410,63 +679,116 @@ class Macro:
         self.catch_kwargs = catch_kwargs
         self.catch_varargs = catch_varargs
         self.caller = caller
-        self.explicit_caller = 'caller' in arguments
+        self.explicit_caller = "caller" in arguments
+
         if default_autoescape is None:
             if callable(environment.autoescape):
                 default_autoescape = environment.autoescape(None)
             else:
                 default_autoescape = environment.autoescape
+
         self._default_autoescape = default_autoescape

     @internalcode
     @pass_eval_context
-    def __call__(self, *args: t.Any, **kwargs: t.Any) ->str:
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> str:
+        # This requires a bit of explanation,  In the past we used to
+        # decide largely based on compile-time information if a macro is
+        # safe or unsafe.  While there was a volatile mode it was largely
+        # unused for deciding on escaping.  This turns out to be
+        # problematic for macros because whether a macro is safe depends not
+        # on the escape mode when it was defined, but rather when it was used.
+        #
+        # Because however we export macros from the module system and
+        # there are historic callers that do not pass an eval context (and
+        # will continue to not pass one), we need to perform an instance
+        # check here.
+        #
+        # This is considered safe because an eval context is not a valid
+        # argument to callables otherwise anyway.  Worst case here is
+        # that if no eval context is passed we fall back to the compile
+        # time autoescape flag.
         if args and isinstance(args[0], EvalContext):
             autoescape = args[0].autoescape
             args = args[1:]
         else:
             autoescape = self._default_autoescape
-        arguments = list(args[:self._argument_count])
+
+        # try to consume the positional arguments
+        arguments = list(args[: self._argument_count])
         off = len(arguments)
+
+        # For information why this is necessary refer to the handling
+        # of caller in the `macro_body` handler in the compiler.
         found_caller = False
+
+        # if the number of arguments consumed is not the number of
+        # arguments expected we start filling in keyword arguments
+        # and defaults.
         if off != self._argument_count:
-            for name in self.arguments[len(arguments):]:
+            for name in self.arguments[len(arguments) :]:
                 try:
                     value = kwargs.pop(name)
                 except KeyError:
                     value = missing
-                if name == 'caller':
+                if name == "caller":
                     found_caller = True
                 arguments.append(value)
         else:
             found_caller = self.explicit_caller
+
+        # it's important that the order of these arguments does not change
+        # if not also changed in the compiler's `function_scoping` method.
+        # the order is caller, keyword arguments, positional arguments!
         if self.caller and not found_caller:
-            caller = kwargs.pop('caller', None)
+            caller = kwargs.pop("caller", None)
             if caller is None:
-                caller = self._environment.undefined('No caller defined',
-                    name='caller')
+                caller = self._environment.undefined("No caller defined", name="caller")
             arguments.append(caller)
+
         if self.catch_kwargs:
             arguments.append(kwargs)
         elif kwargs:
-            if 'caller' in kwargs:
+            if "caller" in kwargs:
                 raise TypeError(
-                    f'macro {self.name!r} was invoked with two values for the special caller argument. This is most likely a bug.'
-                    )
-            raise TypeError(
-                f'macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}'
+                    f"macro {self.name!r} was invoked with two values for the special"
+                    " caller argument. This is most likely a bug."
                 )
+            raise TypeError(
+                f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}"
+            )
         if self.catch_varargs:
-            arguments.append(args[self._argument_count:])
+            arguments.append(args[self._argument_count :])
         elif len(args) > self._argument_count:
             raise TypeError(
-                f'macro {self.name!r} takes not more than {len(self.arguments)} argument(s)'
-                )
+                f"macro {self.name!r} takes not more than"
+                f" {len(self.arguments)} argument(s)"
+            )
+
         return self._invoke(arguments, autoescape)

-    def __repr__(self) ->str:
-        name = 'anonymous' if self.name is None else repr(self.name)
-        return f'<{type(self).__name__} {name}>'
+    async def _async_invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str:
+        rv = await self._func(*arguments)  # type: ignore
+
+        if autoescape:
+            return Markup(rv)
+
+        return rv  # type: ignore
+
+    def _invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str:
+        if self._environment.is_async:
+            return self._async_invoke(arguments, autoescape)  # type: ignore
+
+        rv = self._func(*arguments)
+
+        if autoescape:
+            rv = Markup(rv)
+
+        return rv
+
+    def __repr__(self) -> str:
+        name = "anonymous" if self.name is None else repr(self.name)
+        return f"<{type(self).__name__} {name}>"


 class Undefined:
@@ -483,37 +805,64 @@ class Undefined:
       ...
     jinja2.exceptions.UndefinedError: 'foo' is undefined
     """
-    __slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name',
-        '_undefined_exception')

-    def __init__(self, hint: t.Optional[str]=None, obj: t.Any=missing, name:
-        t.Optional[str]=None, exc: t.Type[TemplateRuntimeError]=UndefinedError
-        ) ->None:
+    __slots__ = (
+        "_undefined_hint",
+        "_undefined_obj",
+        "_undefined_name",
+        "_undefined_exception",
+    )
+
+    def __init__(
+        self,
+        hint: t.Optional[str] = None,
+        obj: t.Any = missing,
+        name: t.Optional[str] = None,
+        exc: t.Type[TemplateRuntimeError] = UndefinedError,
+    ) -> None:
         self._undefined_hint = hint
         self._undefined_obj = obj
         self._undefined_name = name
         self._undefined_exception = exc

     @property
-    def _undefined_message(self) ->str:
+    def _undefined_message(self) -> str:
         """Build a message about the undefined value based on how it was
         accessed.
         """
-        pass
+        if self._undefined_hint:
+            return self._undefined_hint
+
+        if self._undefined_obj is missing:
+            return f"{self._undefined_name!r} is undefined"
+
+        if not isinstance(self._undefined_name, str):
+            return (
+                f"{object_type_repr(self._undefined_obj)} has no"
+                f" element {self._undefined_name!r}"
+            )
+
+        return (
+            f"{object_type_repr(self._undefined_obj)!r} has no"
+            f" attribute {self._undefined_name!r}"
+        )

     @internalcode
-    def _fail_with_undefined_error(self, *args: t.Any, **kwargs: t.Any
-        ) ->'te.NoReturn':
+    def _fail_with_undefined_error(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> "te.NoReturn":
         """Raise an :exc:`UndefinedError` when operations are performed
         on the undefined value.
         """
-        pass
+        raise self._undefined_exception(self._undefined_message)

     @internalcode
-    def __getattr__(self, name: str) ->t.Any:
-        if name[:2] == '__':
+    def __getattr__(self, name: str) -> t.Any:
+        if name[:2] == "__":
             raise AttributeError(name)
+
         return self._fail_with_undefined_error()
+
     __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error
     __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error
     __truediv__ = __rtruediv__ = _fail_with_undefined_error
@@ -525,37 +874,38 @@ class Undefined:
     __int__ = __float__ = __complex__ = _fail_with_undefined_error
     __pow__ = __rpow__ = _fail_with_undefined_error

-    def __eq__(self, other: t.Any) ->bool:
+    def __eq__(self, other: t.Any) -> bool:
         return type(self) is type(other)

-    def __ne__(self, other: t.Any) ->bool:
+    def __ne__(self, other: t.Any) -> bool:
         return not self.__eq__(other)

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return id(type(self))

-    def __str__(self) ->str:
-        return ''
+    def __str__(self) -> str:
+        return ""

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return 0

-    def __iter__(self) ->t.Iterator[t.Any]:
+    def __iter__(self) -> t.Iterator[t.Any]:
         yield from ()

-    async def __aiter__(self) ->t.AsyncIterator[t.Any]:
+    async def __aiter__(self) -> t.AsyncIterator[t.Any]:
         for _ in ():
             yield

-    def __bool__(self) ->bool:
+    def __bool__(self) -> bool:
         return False

-    def __repr__(self) ->str:
-        return 'Undefined'
+    def __repr__(self) -> str:
+        return "Undefined"


-def make_logging_undefined(logger: t.Optional['logging.Logger']=None, base:
-    t.Type[Undefined]=Undefined) ->t.Type[Undefined]:
+def make_logging_undefined(
+    logger: t.Optional["logging.Logger"] = None, base: t.Type[Undefined] = Undefined
+) -> t.Type[Undefined]:
     """Given a logger object this returns a new undefined class that will
     log certain failures.  It will log iterations and printing.  If no
     logger is given a default logger is created.
@@ -575,7 +925,40 @@ def make_logging_undefined(logger: t.Optional['logging.Logger']=None, base:
     :param base: the base class to add logging functionality to.  This
                  defaults to :class:`Undefined`.
     """
-    pass
+    if logger is None:
+        import logging
+
+        logger = logging.getLogger(__name__)
+        logger.addHandler(logging.StreamHandler(sys.stderr))
+
+    def _log_message(undef: Undefined) -> None:
+        logger.warning("Template variable warning: %s", undef._undefined_message)
+
+    class LoggingUndefined(base):  # type: ignore
+        __slots__ = ()
+
+        def _fail_with_undefined_error(  # type: ignore
+            self, *args: t.Any, **kwargs: t.Any
+        ) -> "te.NoReturn":
+            try:
+                super()._fail_with_undefined_error(*args, **kwargs)
+            except self._undefined_exception as e:
+                logger.error("Template variable error: %s", e)  # type: ignore
+                raise e
+
+        def __str__(self) -> str:
+            _log_message(self)
+            return super().__str__()  # type: ignore
+
+        def __iter__(self) -> t.Iterator[t.Any]:
+            _log_message(self)
+            return super().__iter__()  # type: ignore
+
+        def __bool__(self) -> bool:
+            _log_message(self)
+            return super().__bool__()  # type: ignore
+
+    return LoggingUndefined


 class ChainableUndefined(Undefined):
@@ -593,14 +976,16 @@ class ChainableUndefined(Undefined):

     .. versionadded:: 2.11.0
     """
+
     __slots__ = ()

-    def __html__(self) ->str:
+    def __html__(self) -> str:
         return str(self)

-    def __getattr__(self, _: str) ->'ChainableUndefined':
+    def __getattr__(self, _: str) -> "ChainableUndefined":
         return self
-    __getitem__ = __getattr__
+
+    __getitem__ = __getattr__  # type: ignore


 class DebugUndefined(Undefined):
@@ -616,18 +1001,23 @@ class DebugUndefined(Undefined):
       ...
     jinja2.exceptions.UndefinedError: 'foo' is undefined
     """
+
     __slots__ = ()

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         if self._undefined_hint:
-            message = f'undefined value printed: {self._undefined_hint}'
+            message = f"undefined value printed: {self._undefined_hint}"
+
         elif self._undefined_obj is missing:
-            message = self._undefined_name
+            message = self._undefined_name  # type: ignore
+
         else:
             message = (
-                f'no such element: {object_type_repr(self._undefined_obj)}[{self._undefined_name!r}]'
-                )
-        return f'{{{{ {message} }}}}'
+                f"no such element: {object_type_repr(self._undefined_obj)}"
+                f"[{self._undefined_name!r}]"
+            )
+
+        return f"{{{{ {message} }}}}"


 class StrictUndefined(Undefined):
@@ -649,12 +1039,18 @@ class StrictUndefined(Undefined):
       ...
     jinja2.exceptions.UndefinedError: 'foo' is undefined
     """
+
     __slots__ = ()
     __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error
-    __eq__ = __ne__ = __bool__ = __hash__ = (Undefined.
-        _fail_with_undefined_error)
+    __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
     __contains__ = Undefined._fail_with_undefined_error


-del (Undefined.__slots__, ChainableUndefined.__slots__, DebugUndefined.
-    __slots__, StrictUndefined.__slots__)
+# Remove slots attributes, after the metaclass is applied they are
+# unneeded and contain wrong data for subclasses.
+del (
+    Undefined.__slots__,
+    ChainableUndefined.__slots__,
+    DebugUndefined.__slots__,
+    StrictUndefined.__slots__,
+)
diff --git a/src/jinja2/sandbox.py b/src/jinja2/sandbox.py
index b73a983..0b4fc12 100644
--- a/src/jinja2/sandbox.py
+++ b/src/jinja2/sandbox.py
@@ -1,44 +1,116 @@
 """A sandbox layer that ensures unsafe operations cannot be performed.
 Useful when the template itself comes from an untrusted source.
 """
+
 import operator
 import types
 import typing as t
 from collections import abc
 from collections import deque
 from string import Formatter
-from _string import formatter_field_name_split
+
+from _string import formatter_field_name_split  # type: ignore
 from markupsafe import EscapeFormatter
 from markupsafe import Markup
+
 from .environment import Environment
 from .exceptions import SecurityError
 from .runtime import Context
 from .runtime import Undefined
-F = t.TypeVar('F', bound=t.Callable[..., t.Any])
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+#: maximum number of items a range may produce
 MAX_RANGE = 100000
+
+#: Unsafe function attributes.
 UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set()
+
+#: Unsafe method attributes. Function attributes are unsafe for methods too.
 UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set()
-UNSAFE_GENERATOR_ATTRIBUTES = {'gi_frame', 'gi_code'}
-UNSAFE_COROUTINE_ATTRIBUTES = {'cr_frame', 'cr_code'}
-UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {'ag_code', 'ag_frame'}
-_mutable_spec: t.Tuple[t.Tuple[t.Type[t.Any], t.FrozenSet[str]], ...] = ((
-    abc.MutableSet, frozenset(['add', 'clear', 'difference_update',
-    'discard', 'pop', 'remove', 'symmetric_difference_update', 'update'])),
-    (abc.MutableMapping, frozenset(['clear', 'pop', 'popitem', 'setdefault',
-    'update'])), (abc.MutableSequence, frozenset(['append', 'reverse',
-    'insert', 'sort', 'extend', 'remove'])), (deque, frozenset(['append',
-    'appendleft', 'clear', 'extend', 'extendleft', 'pop', 'popleft',
-    'remove', 'rotate'])))
-
-
-def safe_range(*args: int) ->range:
+
+#: unsafe generator attributes.
+UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
+
+#: unsafe attributes on coroutines
+UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
+
+#: unsafe attributes on async generators
+UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
+
+_mutable_spec: t.Tuple[t.Tuple[t.Type[t.Any], t.FrozenSet[str]], ...] = (
+    (
+        abc.MutableSet,
+        frozenset(
+            [
+                "add",
+                "clear",
+                "difference_update",
+                "discard",
+                "pop",
+                "remove",
+                "symmetric_difference_update",
+                "update",
+            ]
+        ),
+    ),
+    (
+        abc.MutableMapping,
+        frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
+    ),
+    (
+        abc.MutableSequence,
+        frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
+    ),
+    (
+        deque,
+        frozenset(
+            [
+                "append",
+                "appendleft",
+                "clear",
+                "extend",
+                "extendleft",
+                "pop",
+                "popleft",
+                "remove",
+                "rotate",
+            ]
+        ),
+    ),
+)
+
+
+def inspect_format_method(callable: t.Callable[..., t.Any]) -> t.Optional[str]:
+    if not isinstance(
+        callable, (types.MethodType, types.BuiltinMethodType)
+    ) or callable.__name__ not in ("format", "format_map"):
+        return None
+
+    obj = callable.__self__
+
+    if isinstance(obj, str):
+        return obj
+
+    return None
+
+
+def safe_range(*args: int) -> range:
     """A range that can't generate ranges with a length of more than
     MAX_RANGE items.
     """
-    pass
+    rng = range(*args)
+
+    if len(rng) > MAX_RANGE:
+        raise OverflowError(
+            "Range too big. The sandbox blocks ranges larger than"
+            f" MAX_RANGE ({MAX_RANGE})."
+        )
+
+    return rng


-def unsafe(f: F) ->F:
+def unsafe(f: F) -> F:
     """Marks a function or method as unsafe.

     .. code-block: python
@@ -47,10 +119,11 @@ def unsafe(f: F) ->F:
         def delete(self):
             pass
     """
-    pass
+    f.unsafe_callable = True  # type: ignore
+    return f


-def is_internal_attribute(obj: t.Any, attr: str) ->bool:
+def is_internal_attribute(obj: t.Any, attr: str) -> bool:
     """Test if the attribute given is an internal python attribute.  For
     example this function returns `True` for the `func_code` attribute of
     python objects.  This is useful if the environment method
@@ -62,10 +135,32 @@ def is_internal_attribute(obj: t.Any, attr: str) ->bool:
     >>> is_internal_attribute(str, "upper")
     False
     """
-    pass
-
-
-def modifies_known_mutable(obj: t.Any, attr: str) ->bool:
+    if isinstance(obj, types.FunctionType):
+        if attr in UNSAFE_FUNCTION_ATTRIBUTES:
+            return True
+    elif isinstance(obj, types.MethodType):
+        if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
+            return True
+    elif isinstance(obj, type):
+        if attr == "mro":
+            return True
+    elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
+        return True
+    elif isinstance(obj, types.GeneratorType):
+        if attr in UNSAFE_GENERATOR_ATTRIBUTES:
+            return True
+    elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
+        if attr in UNSAFE_COROUTINE_ATTRIBUTES:
+            return True
+    elif hasattr(types, "AsyncGeneratorType") and isinstance(
+        obj, types.AsyncGeneratorType
+    ):
+        if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
+            return True
+    return attr.startswith("__")
+
+
+def modifies_known_mutable(obj: t.Any, attr: str) -> bool:
     """This function checks if an attribute on a builtin mutable object
     (list, dict, set or deque) or the corresponding ABCs would modify it
     if called.
@@ -84,7 +179,10 @@ def modifies_known_mutable(obj: t.Any, attr: str) ->bool:
     >>> modifies_known_mutable("foo", "upper")
     False
     """
-    pass
+    for typespec, unsafe in _mutable_spec:
+        if isinstance(obj, typespec):
+            return attr in unsafe
+    return False


 class SandboxedEnvironment(Environment):
@@ -97,86 +195,203 @@ class SandboxedEnvironment(Environment):
     raised.  However also other exceptions may occur during the rendering so
     the caller has to ensure that all exceptions are caught.
     """
+
     sandboxed = True
-    default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {'+':
-        operator.add, '-': operator.sub, '*': operator.mul, '/': operator.
-        truediv, '//': operator.floordiv, '**': operator.pow, '%': operator.mod
-        }
-    default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = {'+':
-        operator.pos, '-': operator.neg}
+
+    #: default callback table for the binary operators.  A copy of this is
+    #: available on each instance of a sandboxed environment as
+    #: :attr:`binop_table`
+    default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+        "+": operator.add,
+        "-": operator.sub,
+        "*": operator.mul,
+        "/": operator.truediv,
+        "//": operator.floordiv,
+        "**": operator.pow,
+        "%": operator.mod,
+    }
+
+    #: default callback table for the unary operators.  A copy of this is
+    #: available on each instance of a sandboxed environment as
+    #: :attr:`unop_table`
+    default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
+        "+": operator.pos,
+        "-": operator.neg,
+    }
+
+    #: a set of binary operators that should be intercepted.  Each operator
+    #: that is added to this set (empty by default) is delegated to the
+    #: :meth:`call_binop` method that will perform the operator.  The default
+    #: operator callback is specified by :attr:`binop_table`.
+    #:
+    #: The following binary operators are interceptable:
+    #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
+    #:
+    #: The default operation form the operator table corresponds to the
+    #: builtin function.  Intercepted calls are always slower than the native
+    #: operator call, so make sure only to intercept the ones you are
+    #: interested in.
+    #:
+    #: .. versionadded:: 2.6
     intercepted_binops: t.FrozenSet[str] = frozenset()
+
+    #: a set of unary operators that should be intercepted.  Each operator
+    #: that is added to this set (empty by default) is delegated to the
+    #: :meth:`call_unop` method that will perform the operator.  The default
+    #: operator callback is specified by :attr:`unop_table`.
+    #:
+    #: The following unary operators are interceptable: ``+``, ``-``
+    #:
+    #: The default operation form the operator table corresponds to the
+    #: builtin function.  Intercepted calls are always slower than the native
+    #: operator call, so make sure only to intercept the ones you are
+    #: interested in.
+    #:
+    #: .. versionadded:: 2.6
     intercepted_unops: t.FrozenSet[str] = frozenset()

-    def __init__(self, *args: t.Any, **kwargs: t.Any) ->None:
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
         super().__init__(*args, **kwargs)
-        self.globals['range'] = safe_range
+        self.globals["range"] = safe_range
         self.binop_table = self.default_binop_table.copy()
         self.unop_table = self.default_unop_table.copy()

-    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) ->bool:
+    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
         """The sandboxed environment will call this method to check if the
         attribute of an object is safe to access.  Per default all attributes
         starting with an underscore are considered private as well as the
         special attributes of internal python objects as returned by the
         :func:`is_internal_attribute` function.
         """
-        pass
+        return not (attr.startswith("_") or is_internal_attribute(obj, attr))

-    def is_safe_callable(self, obj: t.Any) ->bool:
+    def is_safe_callable(self, obj: t.Any) -> bool:
         """Check if an object is safely callable. By default callables
         are considered safe unless decorated with :func:`unsafe`.

         This also recognizes the Django convention of setting
         ``func.alters_data = True``.
         """
-        pass
+        return not (
+            getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
+        )

-    def call_binop(self, context: Context, operator: str, left: t.Any,
-        right: t.Any) ->t.Any:
+    def call_binop(
+        self, context: Context, operator: str, left: t.Any, right: t.Any
+    ) -> t.Any:
         """For intercepted binary operator calls (:meth:`intercepted_binops`)
         this function is executed instead of the builtin operator.  This can
         be used to fine tune the behavior of certain operators.

         .. versionadded:: 2.6
         """
-        pass
+        return self.binop_table[operator](left, right)

-    def call_unop(self, context: Context, operator: str, arg: t.Any) ->t.Any:
+    def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any:
         """For intercepted unary operator calls (:meth:`intercepted_unops`)
         this function is executed instead of the builtin operator.  This can
         be used to fine tune the behavior of certain operators.

         .. versionadded:: 2.6
         """
-        pass
+        return self.unop_table[operator](arg)

-    def getitem(self, obj: t.Any, argument: t.Union[str, t.Any]) ->t.Union[
-        t.Any, Undefined]:
+    def getitem(
+        self, obj: t.Any, argument: t.Union[str, t.Any]
+    ) -> t.Union[t.Any, Undefined]:
         """Subscribe an object from sandboxed code."""
-        pass
-
-    def getattr(self, obj: t.Any, attribute: str) ->t.Union[t.Any, Undefined]:
+        try:
+            return obj[argument]
+        except (TypeError, LookupError):
+            if isinstance(argument, str):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        value = getattr(obj, attr)
+                    except AttributeError:
+                        pass
+                    else:
+                        if self.is_safe_attribute(obj, argument, value):
+                            return value
+                        return self.unsafe_undefined(obj, argument)
+        return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]:
         """Subscribe an object from sandboxed code and prefer the
         attribute.  The attribute passed *must* be a bytestring.
         """
-        pass
-
-    def unsafe_undefined(self, obj: t.Any, attribute: str) ->Undefined:
+        try:
+            value = getattr(obj, attribute)
+        except AttributeError:
+            try:
+                return obj[attribute]
+            except (TypeError, LookupError):
+                pass
+        else:
+            if self.is_safe_attribute(obj, attribute, value):
+                return value
+            return self.unsafe_undefined(obj, attribute)
+        return self.undefined(obj=obj, name=attribute)
+
+    def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined:
         """Return an undefined object for unsafe attributes."""
-        pass
-
-    def format_string(self, s: str, args: t.Tuple[t.Any, ...], kwargs: t.
-        Dict[str, t.Any], format_func: t.Optional[t.Callable[..., t.Any]]=None
-        ) ->str:
+        return self.undefined(
+            f"access to attribute {attribute!r} of"
+            f" {type(obj).__name__!r} object is unsafe.",
+            name=attribute,
+            obj=obj,
+            exc=SecurityError,
+        )
+
+    def format_string(
+        self,
+        s: str,
+        args: t.Tuple[t.Any, ...],
+        kwargs: t.Dict[str, t.Any],
+        format_func: t.Optional[t.Callable[..., t.Any]] = None,
+    ) -> str:
         """If a format call is detected, then this is routed through this
         method so that our safety sandbox can be used for it.
         """
-        pass
-
-    def call(__self, __context: Context, __obj: t.Any, *args: t.Any, **
-        kwargs: t.Any) ->t.Any:
+        formatter: SandboxedFormatter
+        if isinstance(s, Markup):
+            formatter = SandboxedEscapeFormatter(self, escape=s.escape)
+        else:
+            formatter = SandboxedFormatter(self)
+
+        if format_func is not None and format_func.__name__ == "format_map":
+            if len(args) != 1 or kwargs:
+                raise TypeError(
+                    "format_map() takes exactly one argument"
+                    f" {len(args) + (kwargs is not None)} given"
+                )
+
+            kwargs = args[0]
+            args = ()
+
+        rv = formatter.vformat(s, args, kwargs)
+        return type(s)(rv)
+
+    def call(
+        __self,  # noqa: B902
+        __context: Context,
+        __obj: t.Any,
+        *args: t.Any,
+        **kwargs: t.Any,
+    ) -> t.Any:
         """Call an object from sandboxed code."""
-        pass
+        fmt = inspect_format_method(__obj)
+        if fmt is not None:
+            return __self.format_string(fmt, args, kwargs, __obj)
+
+        # the double prefixes are to avoid double keyword argument
+        # errors when proxying the call.
+        if not __self.is_safe_callable(__obj):
+            raise SecurityError(f"{__obj!r} is not safely callable")
+        return __context.call(__obj, *args, **kwargs)


 class ImmutableSandboxedEnvironment(SandboxedEnvironment):
@@ -185,13 +400,30 @@ class ImmutableSandboxedEnvironment(SandboxedEnvironment):
     `dict` by using the :func:`modifies_known_mutable` function.
     """

+    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
+        if not super().is_safe_attribute(obj, attr, value):
+            return False

-class SandboxedFormatter(Formatter):
+        return not modifies_known_mutable(obj, attr)

-    def __init__(self, env: Environment, **kwargs: t.Any) ->None:
+
+class SandboxedFormatter(Formatter):
+    def __init__(self, env: Environment, **kwargs: t.Any) -> None:
         self._env = env
         super().__init__(**kwargs)

+    def get_field(
+        self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any]
+    ) -> t.Tuple[t.Any, str]:
+        first, rest = formatter_field_name_split(field_name)
+        obj = self.get_value(first, args, kwargs)
+        for is_attr, i in rest:
+            if is_attr:
+                obj = self._env.getattr(obj, i)
+            else:
+                obj = self._env.getitem(obj, i)
+        return obj, first
+

 class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter):
     pass
diff --git a/src/jinja2/tests.py b/src/jinja2/tests.py
index 2823a4b..1a59e37 100644
--- a/src/jinja2/tests.py
+++ b/src/jinja2/tests.py
@@ -1,30 +1,33 @@
 """Built-in template tests used with the ``is`` operator."""
+
 import operator
 import typing as t
 from collections import abc
 from numbers import Number
+
 from .runtime import Undefined
 from .utils import pass_environment
+
 if t.TYPE_CHECKING:
     from .environment import Environment


-def test_odd(value: int) ->bool:
+def test_odd(value: int) -> bool:
     """Return true if the variable is odd."""
-    pass
+    return value % 2 == 1


-def test_even(value: int) ->bool:
+def test_even(value: int) -> bool:
     """Return true if the variable is even."""
-    pass
+    return value % 2 == 0


-def test_divisibleby(value: int, num: int) ->bool:
+def test_divisibleby(value: int, num: int) -> bool:
     """Check if a variable is divisible by a number."""
-    pass
+    return value % num == 0


-def test_defined(value: t.Any) ->bool:
+def test_defined(value: t.Any) -> bool:
     """Return true if the variable is defined:

     .. sourcecode:: jinja
@@ -38,16 +41,16 @@ def test_defined(value: t.Any) ->bool:
     See the :func:`default` filter for a simple way to set undefined
     variables.
     """
-    pass
+    return not isinstance(value, Undefined)


-def test_undefined(value: t.Any) ->bool:
+def test_undefined(value: t.Any) -> bool:
     """Like :func:`defined` but the other way round."""
-    pass
+    return isinstance(value, Undefined)


 @pass_environment
-def test_filter(env: 'Environment', value: str) ->bool:
+def test_filter(env: "Environment", value: str) -> bool:
     """Check if a filter exists by name. Useful if a filter may be
     optionally available.

@@ -61,11 +64,11 @@ def test_filter(env: 'Environment', value: str) ->bool:

     .. versionadded:: 3.0
     """
-    pass
+    return value in env.filters


 @pass_environment
-def test_test(env: 'Environment', value: str) ->bool:
+def test_test(env: "Environment", value: str) -> bool:
     """Check if a test exists by name. Useful if a test may be
     optionally available.

@@ -83,90 +86,98 @@ def test_test(env: 'Environment', value: str) ->bool:

     .. versionadded:: 3.0
     """
-    pass
+    return value in env.tests


-def test_none(value: t.Any) ->bool:
+def test_none(value: t.Any) -> bool:
     """Return true if the variable is none."""
-    pass
+    return value is None


-def test_boolean(value: t.Any) ->bool:
+def test_boolean(value: t.Any) -> bool:
     """Return true if the object is a boolean value.

     .. versionadded:: 2.11
     """
-    pass
+    return value is True or value is False


-def test_false(value: t.Any) ->bool:
+def test_false(value: t.Any) -> bool:
     """Return true if the object is False.

     .. versionadded:: 2.11
     """
-    pass
+    return value is False


-def test_true(value: t.Any) ->bool:
+def test_true(value: t.Any) -> bool:
     """Return true if the object is True.

     .. versionadded:: 2.11
     """
-    pass
+    return value is True


-def test_integer(value: t.Any) ->bool:
+# NOTE: The existing 'number' test matches booleans and floats
+def test_integer(value: t.Any) -> bool:
     """Return true if the object is an integer.

     .. versionadded:: 2.11
     """
-    pass
+    return isinstance(value, int) and value is not True and value is not False


-def test_float(value: t.Any) ->bool:
+# NOTE: The existing 'number' test matches booleans and integers
+def test_float(value: t.Any) -> bool:
     """Return true if the object is a float.

     .. versionadded:: 2.11
     """
-    pass
+    return isinstance(value, float)


-def test_lower(value: str) ->bool:
+def test_lower(value: str) -> bool:
     """Return true if the variable is lowercased."""
-    pass
+    return str(value).islower()


-def test_upper(value: str) ->bool:
+def test_upper(value: str) -> bool:
     """Return true if the variable is uppercased."""
-    pass
+    return str(value).isupper()


-def test_string(value: t.Any) ->bool:
+def test_string(value: t.Any) -> bool:
     """Return true if the object is a string."""
-    pass
+    return isinstance(value, str)


-def test_mapping(value: t.Any) ->bool:
+def test_mapping(value: t.Any) -> bool:
     """Return true if the object is a mapping (dict etc.).

     .. versionadded:: 2.6
     """
-    pass
+    return isinstance(value, abc.Mapping)


-def test_number(value: t.Any) ->bool:
+def test_number(value: t.Any) -> bool:
     """Return true if the variable is a number."""
-    pass
+    return isinstance(value, Number)


-def test_sequence(value: t.Any) ->bool:
+def test_sequence(value: t.Any) -> bool:
     """Return true if the variable is a sequence. Sequences are variables
     that are iterable.
     """
-    pass
+    try:
+        len(value)
+        value.__getitem__  # noqa B018
+    except Exception:
+        return False

+    return True

-def test_sameas(value: t.Any, other: t.Any) ->bool:
+
+def test_sameas(value: t.Any, other: t.Any) -> bool:
     """Check if an object points to the same memory address than another
     object:

@@ -176,37 +187,70 @@ def test_sameas(value: t.Any, other: t.Any) ->bool:
             the foo attribute really is the `False` singleton
         {% endif %}
     """
-    pass
+    return value is other


-def test_iterable(value: t.Any) ->bool:
+def test_iterable(value: t.Any) -> bool:
     """Check if it's possible to iterate over an object."""
-    pass
+    try:
+        iter(value)
+    except TypeError:
+        return False
+
+    return True


-def test_escaped(value: t.Any) ->bool:
+def test_escaped(value: t.Any) -> bool:
     """Check if the value is escaped."""
-    pass
+    return hasattr(value, "__html__")


-def test_in(value: t.Any, seq: t.Container[t.Any]) ->bool:
+def test_in(value: t.Any, seq: t.Container[t.Any]) -> bool:
     """Check if value is in seq.

     .. versionadded:: 2.10
     """
-    pass
-
-
-TESTS = {'odd': test_odd, 'even': test_even, 'divisibleby':
-    test_divisibleby, 'defined': test_defined, 'undefined': test_undefined,
-    'filter': test_filter, 'test': test_test, 'none': test_none, 'boolean':
-    test_boolean, 'false': test_false, 'true': test_true, 'integer':
-    test_integer, 'float': test_float, 'lower': test_lower, 'upper':
-    test_upper, 'string': test_string, 'mapping': test_mapping, 'number':
-    test_number, 'sequence': test_sequence, 'iterable': test_iterable,
-    'callable': callable, 'sameas': test_sameas, 'escaped': test_escaped,
-    'in': test_in, '==': operator.eq, 'eq': operator.eq, 'equalto':
-    operator.eq, '!=': operator.ne, 'ne': operator.ne, '>': operator.gt,
-    'gt': operator.gt, 'greaterthan': operator.gt, 'ge': operator.ge, '>=':
-    operator.ge, '<': operator.lt, 'lt': operator.lt, 'lessthan': operator.
-    lt, '<=': operator.le, 'le': operator.le}
+    return value in seq
+
+
+TESTS = {
+    "odd": test_odd,
+    "even": test_even,
+    "divisibleby": test_divisibleby,
+    "defined": test_defined,
+    "undefined": test_undefined,
+    "filter": test_filter,
+    "test": test_test,
+    "none": test_none,
+    "boolean": test_boolean,
+    "false": test_false,
+    "true": test_true,
+    "integer": test_integer,
+    "float": test_float,
+    "lower": test_lower,
+    "upper": test_upper,
+    "string": test_string,
+    "mapping": test_mapping,
+    "number": test_number,
+    "sequence": test_sequence,
+    "iterable": test_iterable,
+    "callable": callable,
+    "sameas": test_sameas,
+    "escaped": test_escaped,
+    "in": test_in,
+    "==": operator.eq,
+    "eq": operator.eq,
+    "equalto": operator.eq,
+    "!=": operator.ne,
+    "ne": operator.ne,
+    ">": operator.gt,
+    "gt": operator.gt,
+    "greaterthan": operator.gt,
+    "ge": operator.ge,
+    ">=": operator.ge,
+    "<": operator.lt,
+    "lt": operator.lt,
+    "lessthan": operator.lt,
+    "<=": operator.le,
+    "le": operator.le,
+}
diff --git a/src/jinja2/utils.py b/src/jinja2/utils.py
index 7563812..7fb7693 100644
--- a/src/jinja2/utils.py
+++ b/src/jinja2/utils.py
@@ -10,16 +10,23 @@ from random import randrange
 from threading import Lock
 from types import CodeType
 from urllib.parse import quote_from_bytes
+
 import markupsafe
+
 if t.TYPE_CHECKING:
     import typing_extensions as te
-F = t.TypeVar('F', bound=t.Callable[..., t.Any])
-missing: t.Any = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+# special singleton representing missing values for the runtime
+missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})()
+
 internal_code: t.MutableSet[CodeType] = set()
-concat = ''.join
+
+concat = "".join


-def pass_context(f: F) ->F:
+def pass_context(f: F) -> F:
     """Pass the :class:`~jinja2.runtime.Context` as the first argument
     to the decorated function when called while rendering a template.

@@ -32,10 +39,11 @@ def pass_context(f: F) ->F:
     .. versionadded:: 3.0.0
         Replaces ``contextfunction`` and ``contextfilter``.
     """
-    pass
+    f.jinja_pass_arg = _PassArg.context  # type: ignore
+    return f


-def pass_eval_context(f: F) ->F:
+def pass_eval_context(f: F) -> F:
     """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
     to the decorated function when called while rendering a template.
     See :ref:`eval-context`.
@@ -48,10 +56,11 @@ def pass_eval_context(f: F) ->F:
     .. versionadded:: 3.0.0
         Replaces ``evalcontextfunction`` and ``evalcontextfilter``.
     """
-    pass
+    f.jinja_pass_arg = _PassArg.eval_context  # type: ignore
+    return f


-def pass_environment(f: F) ->F:
+def pass_environment(f: F) -> F:
     """Pass the :class:`~jinja2.Environment` as the first argument to
     the decorated function when called while rendering a template.

@@ -60,7 +69,8 @@ def pass_environment(f: F) ->F:
     .. versionadded:: 3.0.0
         Replaces ``environmentfunction`` and ``environmentfilter``.
     """
-    pass
+    f.jinja_pass_arg = _PassArg.environment  # type: ignore
+    return f


 class _PassArg(enum.Enum):
@@ -68,13 +78,21 @@ class _PassArg(enum.Enum):
     eval_context = enum.auto()
     environment = enum.auto()

+    @classmethod
+    def from_obj(cls, obj: F) -> t.Optional["_PassArg"]:
+        if hasattr(obj, "jinja_pass_arg"):
+            return obj.jinja_pass_arg  # type: ignore
+
+        return None
+

-def internalcode(f: F) ->F:
+def internalcode(f: F) -> F:
     """Marks the function as internally used"""
-    pass
+    internal_code.add(f.__code__)
+    return f


-def is_undefined(obj: t.Any) ->bool:
+def is_undefined(obj: t.Any) -> bool:
     """Check if the object passed is undefined.  This does nothing more than
     performing an instance check against :class:`Undefined` but looks nicer.
     This can be used for custom filters or tests that want to react to
@@ -86,24 +104,31 @@ def is_undefined(obj: t.Any) ->bool:
                 return default
             return var
     """
-    pass
+    from .runtime import Undefined

+    return isinstance(obj, Undefined)

-def consume(iterable: t.Iterable[t.Any]) ->None:
+
+def consume(iterable: t.Iterable[t.Any]) -> None:
     """Consumes an iterable without doing anything with it."""
-    pass
+    for _ in iterable:
+        pass


-def clear_caches() ->None:
+def clear_caches() -> None:
     """Jinja keeps internal caches for environments and lexers.  These are
     used so that Jinja doesn't have to recreate environments and lexers all
     the time.  Normally you don't have to care about that but if you are
     measuring memory consumption you may want to clean the caches.
     """
-    pass
+    from .environment import get_spontaneous_environment
+    from .lexer import _lexer_cache

+    get_spontaneous_environment.cache_clear()
+    _lexer_cache.clear()

-def import_string(import_name: str, silent: bool=False) ->t.Any:
+
+def import_string(import_name: str, silent: bool = False) -> t.Any:
     """Imports an object based on a string.  This is useful if you want to
     use import paths as endpoints or something similar.  An import path can
     be specified either in dotted notation (``xml.sax.saxutils.escape``)
@@ -114,62 +139,92 @@ def import_string(import_name: str, silent: bool=False) ->t.Any:

     :return: imported object
     """
-    pass
-
-
-def open_if_exists(filename: str, mode: str='rb') ->t.Optional[t.IO[t.Any]]:
+    try:
+        if ":" in import_name:
+            module, obj = import_name.split(":", 1)
+        elif "." in import_name:
+            module, _, obj = import_name.rpartition(".")
+        else:
+            return __import__(import_name)
+        return getattr(__import__(module, None, None, [obj]), obj)
+    except (ImportError, AttributeError):
+        if not silent:
+            raise
+
+
+def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO[t.Any]]:
     """Returns a file descriptor for the filename if that file exists,
     otherwise ``None``.
     """
-    pass
+    if not os.path.isfile(filename):
+        return None
+
+    return open(filename, mode)


-def object_type_repr(obj: t.Any) ->str:
+def object_type_repr(obj: t.Any) -> str:
     """Returns the name of the object's type.  For some recognized
     singletons the name of the object is returned instead. (For
     example for `None` and `Ellipsis`).
     """
-    pass
+    if obj is None:
+        return "None"
+    elif obj is Ellipsis:
+        return "Ellipsis"
+
+    cls = type(obj)

+    if cls.__module__ == "builtins":
+        return f"{cls.__name__} object"

-def pformat(obj: t.Any) ->str:
+    return f"{cls.__module__}.{cls.__name__} object"
+
+
+def pformat(obj: t.Any) -> str:
     """Format an object using :func:`pprint.pformat`."""
-    pass
+    from pprint import pformat
+
+    return pformat(obj)


 _http_re = re.compile(
-    """
+    r"""
     ^
     (
-        (https?://|www\\.)  # scheme or www
-        (([\\w%-]+\\.)+)?  # subdomain
+        (https?://|www\.)  # scheme or www
+        (([\w%-]+\.)+)?  # subdomain
         (
             [a-z]{2,63}  # basic tld
         |
-            xn--[\\w%]{2,59}  # idna tld
+            xn--[\w%]{2,59}  # idna tld
         )
     |
-        ([\\w%-]{2,63}\\.)+  # basic domain
+        ([\w%-]{2,63}\.)+  # basic domain
         (com|net|int|edu|gov|org|info|mil)  # basic tld
     |
         (https?://)  # scheme
         (
-            (([\\d]{1,3})(\\.[\\d]{1,3}){3})  # IPv4
+            (([\d]{1,3})(\.[\d]{1,3}){3})  # IPv4
         |
-            (\\[([\\da-f]{0,4}:){2}([\\da-f]{0,4}:?){1,6}])  # IPv6
+            (\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}])  # IPv6
         )
     )
-    (?::[\\d]{1,5})?  # port
-    (?:[/?#]\\S*)?  # path, query, and fragment
+    (?::[\d]{1,5})?  # port
+    (?:[/?#]\S*)?  # path, query, and fragment
     $
-    """
-    , re.IGNORECASE | re.VERBOSE)
-_email_re = re.compile('^\\S+@\\w[\\w.-]*\\.\\w+$')
-
-
-def urlize(text: str, trim_url_limit: t.Optional[int]=None, rel: t.Optional
-    [str]=None, target: t.Optional[str]=None, extra_schemes: t.Optional[t.
-    Iterable[str]]=None) ->str:
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
+_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$")
+
+
+def urlize(
+    text: str,
+    trim_url_limit: t.Optional[int] = None,
+    rel: t.Optional[str] = None,
+    target: t.Optional[str] = None,
+    extra_schemes: t.Optional[t.Iterable[str]] = None,
+) -> str:
     """Convert URLs in text into clickable links.

     This may not recognize links in some situations. Usually, a more
@@ -201,16 +256,145 @@ def urlize(text: str, trim_url_limit: t.Optional[int]=None, rel: t.Optional
         or without the ``mailto:`` scheme. Validate IP addresses. Ignore
         parentheses and brackets in more cases.
     """
-    pass
-
-
-def generate_lorem_ipsum(n: int=5, html: bool=True, min: int=20, max: int=100
-    ) ->str:
+    if trim_url_limit is not None:
+
+        def trim_url(x: str) -> str:
+            if len(x) > trim_url_limit:
+                return f"{x[:trim_url_limit]}..."
+
+            return x
+
+    else:
+
+        def trim_url(x: str) -> str:
+            return x
+
+    words = re.split(r"(\s+)", str(markupsafe.escape(text)))
+    rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
+    target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
+
+    for i, word in enumerate(words):
+        head, middle, tail = "", word, ""
+        match = re.match(r"^([(<]|&lt;)+", middle)
+
+        if match:
+            head = match.group()
+            middle = middle[match.end() :]
+
+        # Unlike lead, which is anchored to the start of the string,
+        # need to check that the string ends with any of the characters
+        # before trying to match all of them, to avoid backtracking.
+        if middle.endswith((")", ">", ".", ",", "\n", "&gt;")):
+            match = re.search(r"([)>.,\n]|&gt;)+$", middle)
+
+            if match:
+                tail = match.group()
+                middle = middle[: match.start()]
+
+        # Prefer balancing parentheses in URLs instead of ignoring a
+        # trailing character.
+        for start_char, end_char in ("(", ")"), ("<", ">"), ("&lt;", "&gt;"):
+            start_count = middle.count(start_char)
+
+            if start_count <= middle.count(end_char):
+                # Balanced, or lighter on the left
+                continue
+
+            # Move as many as possible from the tail to balance
+            for _ in range(min(start_count, tail.count(end_char))):
+                end_index = tail.index(end_char) + len(end_char)
+                # Move anything in the tail before the end char too
+                middle += tail[:end_index]
+                tail = tail[end_index:]
+
+        if _http_re.match(middle):
+            if middle.startswith("https://") or middle.startswith("http://"):
+                middle = (
+                    f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
+                )
+            else:
+                middle = (
+                    f'<a href="https://{middle}"{rel_attr}{target_attr}>'
+                    f"{trim_url(middle)}</a>"
+                )
+
+        elif middle.startswith("mailto:") and _email_re.match(middle[7:]):
+            middle = f'<a href="{middle}">{middle[7:]}</a>'
+
+        elif (
+            "@" in middle
+            and not middle.startswith("www.")
+            and ":" not in middle
+            and _email_re.match(middle)
+        ):
+            middle = f'<a href="mailto:{middle}">{middle}</a>'
+
+        elif extra_schemes is not None:
+            for scheme in extra_schemes:
+                if middle != scheme and middle.startswith(scheme):
+                    middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>'
+
+        words[i] = f"{head}{middle}{tail}"
+
+    return "".join(words)
+
+
+def generate_lorem_ipsum(
+    n: int = 5, html: bool = True, min: int = 20, max: int = 100
+) -> str:
     """Generate some lorem ipsum for the template."""
-    pass
+    from .constants import LOREM_IPSUM_WORDS
+
+    words = LOREM_IPSUM_WORDS.split()
+    result = []
+
+    for _ in range(n):
+        next_capitalized = True
+        last_comma = last_fullstop = 0
+        word = None
+        last = None
+        p = []
+
+        # each paragraph contains out of 20 to 100 words.
+        for idx, _ in enumerate(range(randrange(min, max))):
+            while True:
+                word = choice(words)
+                if word != last:
+                    last = word
+                    break
+            if next_capitalized:
+                word = word.capitalize()
+                next_capitalized = False
+            # add commas
+            if idx - randrange(3, 8) > last_comma:
+                last_comma = idx
+                last_fullstop += 2
+                word += ","
+            # add end of sentences
+            if idx - randrange(10, 20) > last_fullstop:
+                last_comma = last_fullstop = idx
+                word += "."
+                next_capitalized = True
+            p.append(word)
+
+        # ensure that the paragraph ends with a dot.
+        p_str = " ".join(p)
+
+        if p_str.endswith(","):
+            p_str = p_str[:-1] + "."
+        elif not p_str.endswith("."):
+            p_str += "."
+
+        result.append(p_str)
+
+    if not html:
+        return "\n\n".join(result)
+    return markupsafe.Markup(
+        "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
+    )


-def url_quote(obj: t.Any, charset: str='utf-8', for_qs: bool=False) ->str:
+def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
     """Quote a string for use in a URL using the given charset.

     :param obj: String or bytes to quote. Other types are converted to
@@ -218,60 +402,99 @@ def url_quote(obj: t.Any, charset: str='utf-8', for_qs: bool=False) ->str:
     :param charset: Encode text to bytes using this charset.
     :param for_qs: Quote "/" and use "+" for spaces.
     """
-    pass
+    if not isinstance(obj, bytes):
+        if not isinstance(obj, str):
+            obj = str(obj)
+
+        obj = obj.encode(charset)
+
+    safe = b"" if for_qs else b"/"
+    rv = quote_from_bytes(obj, safe)
+
+    if for_qs:
+        rv = rv.replace("%20", "+")
+
+    return rv


 @abc.MutableMapping.register
 class LRUCache:
     """A simple LRU Cache implementation."""

-    def __init__(self, capacity: int) ->None:
+    # this is fast for small capacities (something below 1000) but doesn't
+    # scale.  But as long as it's only used as storage for templates this
+    # won't do any harm.
+
+    def __init__(self, capacity: int) -> None:
         self.capacity = capacity
         self._mapping: t.Dict[t.Any, t.Any] = {}
-        self._queue: 'te.Deque[t.Any]' = deque()
+        self._queue: "te.Deque[t.Any]" = deque()
         self._postinit()

-    def __getstate__(self) ->t.Mapping[str, t.Any]:
-        return {'capacity': self.capacity, '_mapping': self._mapping,
-            '_queue': self._queue}
-
-    def __setstate__(self, d: t.Mapping[str, t.Any]) ->None:
+    def _postinit(self) -> None:
+        # alias all queue methods for faster lookup
+        self._popleft = self._queue.popleft
+        self._pop = self._queue.pop
+        self._remove = self._queue.remove
+        self._wlock = Lock()
+        self._append = self._queue.append
+
+    def __getstate__(self) -> t.Mapping[str, t.Any]:
+        return {
+            "capacity": self.capacity,
+            "_mapping": self._mapping,
+            "_queue": self._queue,
+        }
+
+    def __setstate__(self, d: t.Mapping[str, t.Any]) -> None:
         self.__dict__.update(d)
         self._postinit()

-    def __getnewargs__(self) ->t.Tuple[t.Any, ...]:
-        return self.capacity,
+    def __getnewargs__(self) -> t.Tuple[t.Any, ...]:
+        return (self.capacity,)

-    def copy(self) ->'LRUCache':
+    def copy(self) -> "LRUCache":
         """Return a shallow copy of the instance."""
-        pass
+        rv = self.__class__(self.capacity)
+        rv._mapping.update(self._mapping)
+        rv._queue.extend(self._queue)
+        return rv

-    def get(self, key: t.Any, default: t.Any=None) ->t.Any:
+    def get(self, key: t.Any, default: t.Any = None) -> t.Any:
         """Return an item from the cache dict or `default`"""
-        pass
+        try:
+            return self[key]
+        except KeyError:
+            return default

-    def setdefault(self, key: t.Any, default: t.Any=None) ->t.Any:
+    def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any:
         """Set `default` if the key is not in the cache otherwise
         leave unchanged. Return the value of this key.
         """
-        pass
+        try:
+            return self[key]
+        except KeyError:
+            self[key] = default
+            return default

-    def clear(self) ->None:
+    def clear(self) -> None:
         """Clear the cache."""
-        pass
+        with self._wlock:
+            self._mapping.clear()
+            self._queue.clear()

-    def __contains__(self, key: t.Any) ->bool:
+    def __contains__(self, key: t.Any) -> bool:
         """Check if a key exists in this cache."""
         return key in self._mapping

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         """Return the current size of the cache."""
         return len(self._mapping)

-    def __repr__(self) ->str:
-        return f'<{type(self).__name__} {self._mapping!r}>'
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self._mapping!r}>"

-    def __getitem__(self, key: t.Any) ->t.Any:
+    def __getitem__(self, key: t.Any) -> t.Any:
         """Get an item from the cache. Moves the item up so that it has the
         highest priority then.

@@ -279,15 +502,21 @@ class LRUCache:
         """
         with self._wlock:
             rv = self._mapping[key]
+
             if self._queue[-1] != key:
                 try:
                     self._remove(key)
                 except ValueError:
+                    # if something removed the key from the container
+                    # when we read, ignore the ValueError that we would
+                    # get otherwise.
                     pass
+
                 self._append(key)
+
             return rv

-    def __setitem__(self, key: t.Any, value: t.Any) ->None:
+    def __setitem__(self, key: t.Any, value: t.Any) -> None:
         """Sets the value for an item. Moves the item up so that it
         has the highest priority then.
         """
@@ -296,46 +525,54 @@ class LRUCache:
                 self._remove(key)
             elif len(self._mapping) == self.capacity:
                 del self._mapping[self._popleft()]
+
             self._append(key)
             self._mapping[key] = value

-    def __delitem__(self, key: t.Any) ->None:
+    def __delitem__(self, key: t.Any) -> None:
         """Remove an item from the cache dict.
         Raise a `KeyError` if it does not exist.
         """
         with self._wlock:
             del self._mapping[key]
+
             try:
                 self._remove(key)
             except ValueError:
                 pass

-    def items(self) ->t.Iterable[t.Tuple[t.Any, t.Any]]:
+    def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]:
         """Return a list of items."""
-        pass
+        result = [(key, self._mapping[key]) for key in list(self._queue)]
+        result.reverse()
+        return result

-    def values(self) ->t.Iterable[t.Any]:
+    def values(self) -> t.Iterable[t.Any]:
         """Return a list of all values."""
-        pass
+        return [x[1] for x in self.items()]

-    def keys(self) ->t.Iterable[t.Any]:
+    def keys(self) -> t.Iterable[t.Any]:
         """Return a list of all keys ordered by most recent usage."""
-        pass
+        return list(self)

-    def __iter__(self) ->t.Iterator[t.Any]:
+    def __iter__(self) -> t.Iterator[t.Any]:
         return reversed(tuple(self._queue))

-    def __reversed__(self) ->t.Iterator[t.Any]:
+    def __reversed__(self) -> t.Iterator[t.Any]:
         """Iterate over the keys in the cache dict, oldest items
         coming first.
         """
         return iter(tuple(self._queue))
+
     __copy__ = copy


-def select_autoescape(enabled_extensions: t.Collection[str]=('html', 'htm',
-    'xml'), disabled_extensions: t.Collection[str]=(), default_for_string:
-    bool=True, default: bool=False) ->t.Callable[[t.Optional[str]], bool]:
+def select_autoescape(
+    enabled_extensions: t.Collection[str] = ("html", "htm", "xml"),
+    disabled_extensions: t.Collection[str] = (),
+    default_for_string: bool = True,
+    default: bool = False,
+) -> t.Callable[[t.Optional[str]], bool]:
     """Intelligently sets the initial value of autoescaping based on the
     filename of the template.  This is the recommended way to configure
     autoescaping if you do not want to write a custom function yourself.
@@ -370,11 +607,25 @@ def select_autoescape(enabled_extensions: t.Collection[str]=('html', 'htm',

     .. versionadded:: 2.9
     """
-    pass
+    enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
+    disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
+
+    def autoescape(template_name: t.Optional[str]) -> bool:
+        if template_name is None:
+            return default_for_string
+        template_name = template_name.lower()
+        if template_name.endswith(enabled_patterns):
+            return True
+        if template_name.endswith(disabled_patterns):
+            return False
+        return default

+    return autoescape

-def htmlsafe_json_dumps(obj: t.Any, dumps: t.Optional[t.Callable[..., str]]
-    =None, **kwargs: t.Any) ->markupsafe.Markup:
+
+def htmlsafe_json_dumps(
+    obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
+) -> markupsafe.Markup:
     """Serialize an object to a string of JSON with :func:`json.dumps`,
     then replace HTML-unsafe characters with Unicode escapes and mark
     the result safe with :class:`~markupsafe.Markup`.
@@ -400,7 +651,16 @@ def htmlsafe_json_dumps(obj: t.Any, dumps: t.Optional[t.Callable[..., str]]

     .. versionadded:: 2.9
     """
-    pass
+    if dumps is None:
+        dumps = json.dumps
+
+    return markupsafe.Markup(
+        dumps(obj, **kwargs)
+        .replace("<", "\\u003c")
+        .replace(">", "\\u003e")
+        .replace("&", "\\u0026")
+        .replace("'", "\\u0027")
+    )


 class Cycler:
@@ -429,42 +689,45 @@ class Cycler:
     .. versionadded:: 2.1
     """

-    def __init__(self, *items: t.Any) ->None:
+    def __init__(self, *items: t.Any) -> None:
         if not items:
-            raise RuntimeError('at least one item has to be provided')
+            raise RuntimeError("at least one item has to be provided")
         self.items = items
         self.pos = 0

-    def reset(self) ->None:
+    def reset(self) -> None:
         """Resets the current item to the first item."""
-        pass
+        self.pos = 0

     @property
-    def current(self) ->t.Any:
+    def current(self) -> t.Any:
         """Return the current item. Equivalent to the item that will be
         returned next time :meth:`next` is called.
         """
-        pass
+        return self.items[self.pos]

-    def next(self) ->t.Any:
+    def next(self) -> t.Any:
         """Return the current item, then advance :attr:`current` to the
         next item.
         """
-        pass
+        rv = self.current
+        self.pos = (self.pos + 1) % len(self.items)
+        return rv
+
     __next__ = next


 class Joiner:
     """A joining helper for templates."""

-    def __init__(self, sep: str=', ') ->None:
+    def __init__(self, sep: str = ", ") -> None:
         self.sep = sep
         self.used = False

-    def __call__(self) ->str:
+    def __call__(self) -> str:
         if not self.used:
             self.used = True
-            return ''
+            return ""
         return self.sep


@@ -472,20 +735,21 @@ class Namespace:
     """A namespace object that can hold arbitrary attributes.  It may be
     initialized from a dictionary or with keyword arguments."""

-    def __init__(*args: t.Any, **kwargs: t.Any) ->None:
+    def __init__(*args: t.Any, **kwargs: t.Any) -> None:  # noqa: B902
         self, args = args[0], args[1:]
         self.__attrs = dict(*args, **kwargs)

-    def __getattribute__(self, name: str) ->t.Any:
-        if name in {'_Namespace__attrs', '__class__'}:
+    def __getattribute__(self, name: str) -> t.Any:
+        # __class__ is needed for the awaitable check in async mode
+        if name in {"_Namespace__attrs", "__class__"}:
             return object.__getattribute__(self, name)
         try:
             return self.__attrs[name]
         except KeyError:
             raise AttributeError(name) from None

-    def __setitem__(self, name: str, value: t.Any) ->None:
+    def __setitem__(self, name: str, value: t.Any) -> None:
         self.__attrs[name] = value

-    def __repr__(self) ->str:
-        return f'<Namespace {self.__attrs!r}>'
+    def __repr__(self) -> str:
+        return f"<Namespace {self.__attrs!r}>"
diff --git a/src/jinja2/visitor.py b/src/jinja2/visitor.py
index ebb34c6..7b8e180 100644
--- a/src/jinja2/visitor.py
+++ b/src/jinja2/visitor.py
@@ -1,16 +1,16 @@
 """API for traversing the AST nodes. Implemented by the compiler and
 meta introspection.
 """
+
 import typing as t
+
 from .nodes import Node
+
 if t.TYPE_CHECKING:
     import typing_extensions as te

-
     class VisitCallable(te.Protocol):
-
-        def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) ->t.Any:
-            ...
+        def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: ...


 class NodeVisitor:
@@ -25,20 +25,26 @@ class NodeVisitor:
     (return value `None`) the `generic_visit` visitor is used instead.
     """

-    def get_visitor(self, node: Node) ->'t.Optional[VisitCallable]':
+    def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]":
         """Return the visitor function for this node or `None` if no visitor
         exists for this node.  In that case the generic visit function is
         used instead.
         """
-        pass
+        return getattr(self, f"visit_{type(node).__name__}", None)

-    def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) ->t.Any:
+    def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
         """Visit a node."""
-        pass
+        f = self.get_visitor(node)

-    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) ->t.Any:
+        if f is not None:
+            return f(node, *args, **kwargs)
+
+        return self.generic_visit(node, *args, **kwargs)
+
+    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
         """Called if no explicit visitor function exists for a node."""
-        pass
+        for child_node in node.iter_child_nodes():
+            self.visit(child_node, *args, **kwargs)


 class NodeTransformer(NodeVisitor):
@@ -52,9 +58,35 @@ class NodeTransformer(NodeVisitor):
     replacement takes place.
     """

-    def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) ->t.List[
-        Node]:
+    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node:
+        for field, old_value in node.iter_fields():
+            if isinstance(old_value, list):
+                new_values = []
+                for value in old_value:
+                    if isinstance(value, Node):
+                        value = self.visit(value, *args, **kwargs)
+                        if value is None:
+                            continue
+                        elif not isinstance(value, Node):
+                            new_values.extend(value)
+                            continue
+                    new_values.append(value)
+                old_value[:] = new_values
+            elif isinstance(old_value, Node):
+                new_node = self.visit(old_value, *args, **kwargs)
+                if new_node is None:
+                    delattr(node, field)
+                else:
+                    setattr(node, field, new_node)
+        return node
+
+    def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]:
         """As transformers may return lists in some places this method
         can be used to enforce a list as return value.
         """
-        pass
+        rv = self.visit(node, *args, **kwargs)
+
+        if not isinstance(rv, list):
+            return [rv]
+
+        return rv